text
stringlengths
11
4.05M
// Copyright 2020 The ChromiumOS Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // Package motioninput provides a representation of Android's MotionEvent, and allows communication // with the test application ArcMotionInputTest.apk via a Tester. It also provides a test framework // that allows tests to verify MotionEvents across various windowing modes, such as clamshell, // tablet, fullscreen, maximized, etc. package motioninput import ( "context" "encoding/json" "math" "time" "chromiumos/tast/common/android/ui" "chromiumos/tast/errors" "chromiumos/tast/local/arc" "chromiumos/tast/local/chrome" "chromiumos/tast/local/coords" "chromiumos/tast/testing" ) // Action represents a MotionEvent's action key. // The values are from Android's MotionEvent.java. // See: https://cs.android.com/android/platform/superproject/+/HEAD:frameworks/base/core/java/android/view/MotionEvent.java type Action string // Axis represents a MotionEvent's axis key. // The values are from Android's MotionEvent.java. // See: https://cs.android.com/android/platform/superproject/+/HEAD:frameworks/base/core/java/android/view/MotionEvent.java type Axis string // Source represents an input device's source. // The values should be kept in sync with ArcMotionInputTest.apk. type Source string // Constant definitions for MotionEvent that should be kept in sync with its respective sources. const ( ActionDown Action = "ACTION_DOWN" ActionUp Action = "ACTION_UP" ActionMove Action = "ACTION_MOVE" ActionHoverMove Action = "ACTION_HOVER_MOVE" ActionHoverEnter Action = "ACTION_HOVER_ENTER" ActionHoverExit Action = "ACTION_HOVER_EXIT" ActionButtonPress Action = "ACTION_BUTTON_PRESS" ActionButtonRelease Action = "ACTION_BUTTON_RELEASE" AxisX Axis = "AXIS_X" AxisY Axis = "AXIS_Y" AxisPressure Axis = "AXIS_PRESSURE" SourceTouchscreen Source = "touchscreen" SourceMouse Source = "mouse" SourceMouseRelative Source = "mouse_relative" ) // MotionEvent represents a MotionEvent that was received by the Android application. // For all Axis values that represent an absolute location, the values are in the // coordinate space of the Android window (i.e. 0,0 is the top left corner of the application // window in Android). type MotionEvent struct { Action Action `json:"action"` DeviceID int `json:"device_id"` Sources []Source `json:"sources"` PointerAxes []map[Axis]float64 `json:"pointer_axes"` // Batched is true if this event was included in the history of another MotionEvent in Android, // and false otherwise. See more information about batching at: // https://cs.android.com/android/platform/superproject/+/HEAD:frameworks/base/core/java/android/view/MotionEvent.java;l=93 Batched bool `json:"batched"` } // Constants for the test application ArcMotionInputTest.apk. const ( APK = "ArcMotionInputTest.apk" Package = "org.chromium.arc.testapp.motioninput" EventReportingActivity = ".MotionEventReportingActivity" AutoPointerCaptureActivity = ".AutoPointerCaptureActivity" intentActionClearEvents = Package + ".ACTION_CLEAR_EVENTS" ) // Tester holds resources associated with ArcMotionInputTest activity. type Tester struct { tconn *chrome.TestConn d *ui.Device act *arc.Activity } var defaultPollOptions = &testing.PollOptions{Timeout: 30 * time.Second} // NewTester creates a new instance of a Tester. // The provided activity should be started before any of the Tester's methods are called. // All provided arguments must outlive the Tester. func NewTester(tconn *chrome.TestConn, d *ui.Device, act *arc.Activity) *Tester { return &Tester{ tconn: tconn, d: d, act: act, } } // Matcher represents a matcher for motionEvent. type Matcher func(*MotionEvent) error // ExpectMotionEvents polls readMotionEvents repeatedly until it receives motionEvents that // successfully match all of the provided motionEventMatchers in order, or until it times out. func (t *Tester) ExpectMotionEvents(ctx context.Context, matchers ...Matcher) error { return testing.Poll(ctx, func(ctx context.Context) error { events, err := t.readMotionEvents(ctx) if err != nil { return errors.Wrap(err, "failed to read motion event") } // TODO(b/156655077): Remove filtering of batched events after the bug is fixed. // We filter out batched events because in some cases, we observe extraneous events that // are automatically generated in the input pipeline. Since the extraneous events are // generated immediately after real events, they are batched, so we skip batched events. for i := 0; i < len(events); { if events[i].Batched { events = append(events[:i], events[i+1:]...) continue } i++ } if len(events) != len(matchers) { return errors.Errorf("did not receive the exact number of events as expected; got: %d, want: %d", len(events), len(matchers)) } for i := 0; i < len(matchers); i++ { if err := matchers[i](&events[i]); err != nil { return testing.PollBreak(err) } } return nil }, defaultPollOptions) } // WaitUntilEvent polls readMotionEvents repeatedly until it receives any motionEvent that // matches the provided matcher, while ignoring any event that does not match. func (t *Tester) WaitUntilEvent(ctx context.Context, matcher Matcher) error { return testing.Poll(ctx, func(ctx context.Context) error { events, err := t.readMotionEvents(ctx) if err != nil { return errors.Wrap(err, "failed to read motion event") } for i := 0; i < len(events); i++ { if err := matcher(&events[i]); err == nil { return nil } } return errors.New("no matching event received") }, defaultPollOptions) } // readMotionEvents unmarshalls the JSON string in the TextView representing the MotionEvents // received by ArcMotionInputTest.apk, and returns it as a slice of motionEvent. func (t *Tester) readMotionEvents(ctx context.Context) ([]MotionEvent, error) { view := t.d.Object(ui.ID(Package + ":id/motion_event")) text, err := view.GetText(ctx) if err != nil { return nil, err } var events []MotionEvent if err := json.Unmarshal([]byte(text), &events); err != nil { return nil, err } return events, nil } // ClearMotionEvents tells the test application to clear the events that it is currently reporting, // and verifies that no events are reported. This is done by sending an intent with the appropriate // action to Android, which is subsequently picked up by the MotionInputTest application and handled // appropriately. func (t *Tester) ClearMotionEvents(ctx context.Context) error { if err := t.act.Start(ctx, t.tconn, arc.WithIntentAction(intentActionClearEvents)); err != nil { return errors.Wrap(err, "failed to send the clear events intent") } if err := t.ExpectMotionEvents(ctx); err != nil { return errors.Wrap(err, "failed to verify that the reported MotionEvents were cleared") } return nil } // ExpectEventsAndClear is a convenience function that verifies expected events and clears the // events to be ready for the next assertions. func (t *Tester) ExpectEventsAndClear(ctx context.Context, matchers ...Matcher) error { if err := t.ExpectMotionEvents(ctx, matchers...); err != nil { return errors.Wrap(err, "failed to verify expected events") } if err := t.ClearMotionEvents(ctx); err != nil { return errors.Wrap(err, "failed to clear events") } return nil } // ActionSourceMatcher returns a motionEventMatcher that matches a motionEvent with the provided // action and source. func ActionSourceMatcher(a Action, s Source) Matcher { return func(event *MotionEvent) error { sourceMatches := false for _, v := range event.Sources { if v == s { sourceMatches = true break } } var err error if !sourceMatches { err = errors.Wrapf(err, "source does not match: got %v; want %s", event.Sources, s) } if event.Action != a { err = errors.Wrapf(err, "action does not match: got %s; want: %s", event.Action, a) } return err } } // SinglePointerMatcher returns a motionEventMatcher that matches a motionEvent with a single // pointer that has the following axes: axisX, axisY, and axisPressure. func SinglePointerMatcher(a Action, s Source, p coords.Point, pressure float64) Matcher { return func(event *MotionEvent) error { if err := ActionSourceMatcher(a, s)(event); err != nil { return err } if pointerCount := len(event.PointerAxes); pointerCount != 1 { return errors.Errorf("pointer count does not match: got: %d; want: %d", pointerCount, 1) } axisMatcher := func(axis Axis, expected, epsilon float64) error { v := event.PointerAxes[0][axis] if math.Abs(v-expected) > epsilon { return errors.Errorf("value of axis %s did not match: got %.5f; want %.5f; epsilon %.5f", axis, v, expected, epsilon) } return nil } const ( // coordinateAxisEpsilon is the epsilon value to be used when comparing axis values that // represent absolute display coordinates. Scaling and conversions from Chrome to Android's // display spaces means absolute coordinates can be off by up to two pixels. coordinateAxisEpsilon = 2e0 // defaultAxisEpsilon is the epsilon value to be used when comparing axis values that do // not need to be scaled or converted, like pressure (which is in the range [0,1]). We // expect these values to be more precise. defaultAxisEpsilon = 1e-5 ) var err error if e := axisMatcher(AxisX, float64(p.X), coordinateAxisEpsilon); e != nil { err = errors.Wrap(err, e.Error()) } if e := axisMatcher(AxisY, float64(p.Y), coordinateAxisEpsilon); e != nil { err = errors.Wrap(err, e.Error()) } if e := axisMatcher(AxisPressure, pressure, defaultAxisEpsilon); e != nil { err = errors.Wrap(err, e.Error()) } return err } } // WaitForTestAppFocused polls the test app until its window reaches the wanted focused state. func (t *Tester) WaitForTestAppFocused(ctx context.Context, wantFocused bool) error { return testing.Poll(ctx, func(ctx context.Context) error { focused, err := t.act.Focused(ctx) if err != nil { return err } if focused != wantFocused { return errors.Errorf("the focused state did not match: got %t; want %t", focused, wantFocused) } return nil }, defaultPollOptions) } // MatcherOr produces a Matcher that matches any of the provided matchers. func MatcherOr(matchers ...Matcher) Matcher { return func(event *MotionEvent) error { for i := 0; i < len(matchers); i++ { if err := matchers[i](event); err == nil { return nil } } return errors.New("did not match any of the expected matchers") } }
package api import ( "github.com/google/uuid" "github.com/jiangmitiao/cali/app/models" "github.com/jiangmitiao/cali/app/rcali" "github.com/revel/revel" "strconv" "time" ) type User struct { *revel.Controller } // /user func (c User) Index() revel.Result { return c.RenderJSONP(c.Request.FormValue("callback"), models.NewOKApi()) } func (c User) Login() revel.Result { callback := c.Request.FormValue("callback") var loginName string = c.Request.FormValue("loginName") var loginPassword string = c.Request.FormValue("loginPassword") if loginName == "" || loginPassword == "" { errStatus := models.NewErrorApiWithMessageAndInfo(c.Message("loginNameOrLoginPasswordError"), nil) errStatus.StatusCode = 401 return c.RenderJSONP(callback, errStatus) } if len(loginName) > 64 || len(loginPassword) > 64 { errStatus := models.NewErrorApiWithMessageAndInfo(c.Message("loginNameOrLoginPasswordError"), nil) errStatus.StatusCode = 402 return c.RenderJSONP(callback, errStatus) } if userInfo, exist := userService.GetUserByLoginName(loginName); exist && userInfo.LoginPassword == rcali.Sha3_256(loginPassword+userInfo.Salt) { //if exist and password correct loginSession := rcali.Sha3_256(userInfo.LoginPassword + strconv.FormatInt(time.Now().Unix(), 10)) userService.FreshLoginSession(loginSession, userInfo.Id) return c.RenderJSONP(callback, models.NewOKApiWithMessageAndInfo(c.Message("loginSuccess"), loginSession)) } else { errStatus := models.NewErrorApiWithMessageAndInfo(c.Message("loginNameOrLoginPasswordError"), nil) errStatus.StatusCode = 402 return c.RenderJSONP(callback, errStatus) } } //get userinfo by session func (c User) Info() revel.Result { callback := c.Request.FormValue("callback") session := c.Request.FormValue("session") user, has := userService.GetLoginUser(session) if has { user.Salt = "" user.LoginPassword = "" return c.RenderJSONP(callback, models.NewOKApiWithInfo(user)) } else { return c.RenderJSONP(callback, models.NewErrorApiWithMessageAndInfo(c.Message("loginNameOrLoginPasswordError"), nil)) } } //find a session is or not login func (c User) IsLogin() revel.Result { callback := c.Request.FormValue("callback") session := c.Request.FormValue("session") id, _ := rcali.GetUserIdByLoginSession(session) if id == "" { return c.RenderJSONP(callback, models.NewErrorApi()) } else { return c.RenderJSONP(callback, models.NewOKApi()) } } //delete the server's login cache func (c User) Logout() revel.Result { callback := c.Request.FormValue("callback") session := c.Request.FormValue("session") rcali.DeleteLoginSession(session) return c.RenderJSONP(callback, models.NewOKApi()) } //regist a user ,if delete watcherUserRegist in role action ,then not allow to regist func (c User) Regist() revel.Result { callback := c.Request.FormValue("callback") loginName := c.Request.FormValue("loginName") loginPassword := c.Request.FormValue("loginPassword") if loginName == "" || loginPassword == "" || len(loginName) > 64 || len(loginPassword) > 64 { return c.RenderJSONP(callback, models.NewErrorApiWithMessageAndInfo(c.Message("signupfail")+c.Message("loginNameOrLoginPasswordError"), nil)) } else { salt := uuid.New().String() safePassword := rcali.Sha3_256(loginPassword + salt) newUser := models.UserInfo{ Id: uuid.New().String(), LoginName: loginName, LoginPassword: safePassword, Salt: salt, UserName: loginName, Email: "", } if userService.Regist(newUser) { return c.RenderJSONP(callback, models.NewOKApiWithMessageAndInfo(c.Message("signupsuccess"), nil)) } else { return c.RenderJSONP(callback, models.NewErrorApiWithMessageAndInfo(c.Message("signupfail")+c.Message("loginNameOrLoginPasswordError"), nil)) } } } // update userName and email by this method func (c User) Update() revel.Result { callback := c.Request.FormValue("callback") session := c.Request.FormValue("session") userName := c.Request.FormValue("userName") email := c.Request.FormValue("email") if len(userName) > 64 || len(email) > 128 { return c.RenderJSONP(callback, models.NewErrorApiWithMessageAndInfo(c.Message("emailOrUsernameIsTooLong"), nil)) } if user, isLogin := userService.GetLoginUser(session); isLogin { user.UserName = userName user.Email = email user.Img = "" if updateOK := userService.UpdateInfo(user); updateOK { return c.RenderJSONP(callback, models.NewOKApi()) } else { return c.RenderJSONP(callback, models.NewErrorApiWithMessageAndInfo(c.Message("uncatchedError"), nil)) } } else { return c.RenderJSONP(callback, models.NewErrorApiWithMessageAndInfo(c.Message("needLogin"), nil)) } } // change the password ,need oldpassword and newpassword func (c User) ChangePassword() revel.Result { callback := c.Request.FormValue("callback") session := c.Request.FormValue("session") oldLoginPassword := c.Request.FormValue("oldLoginPassword") loginPassword := c.Request.FormValue("loginPassword") if user, isLogin := userService.GetLoginUser(session); isLogin { if user.LoginPassword == rcali.Sha3_256(oldLoginPassword+user.Salt) { //oldpassword is ok user.Salt = uuid.New().String() user.LoginPassword = rcali.Sha3_256(loginPassword + user.Salt) if changed := userService.UpdatePassword(user); changed { return c.RenderJSONP(callback, models.NewOKApi()) } else { return c.RenderJSONP(callback, models.NewErrorApiWithMessageAndInfo("uncatched error", nil)) } } else { return c.RenderJSONP(callback, models.NewErrorApiWithMessageAndInfo("old password error", nil)) } } else { return c.RenderJSONP(callback, models.NewErrorApiWithMessageAndInfo("no login", nil)) } } func (c User) QueryUserCount() revel.Result { callback := c.Request.FormValue("callback") session := c.Request.FormValue("session") if user, isLogin := userService.GetLoginUser(session); isLogin { role := userRoleService.GetRoleByUser(user.Id) if role.Name == "admin" { return c.RenderJSONP(callback, models.NewOKApiWithInfo(userService.QueryUserCount(""))) } } return c.RenderJSONP(callback, models.NewErrorApi()) } func (c User) QueryUser() revel.Result { callback := c.Request.FormValue("callback") session := c.Request.FormValue("session") limit, _ := strconv.Atoi(rcali.ValueOrDefault(c.Request.FormValue("limit"), rcali.UserListNumsStr)) start, _ := strconv.Atoi(rcali.ValueOrDefault(c.Request.FormValue("start"), "0")) if user, isLogin := userService.GetLoginUser(session); isLogin { role := userRoleService.GetRoleByUser(user.Id) if role.Name == "admin" { return c.RenderJSONP(callback, models.NewOKApiWithInfo(userService.QueryUser("", limit, start))) } } return c.RenderJSONP(callback, models.NewErrorApi()) } func (c User) Delete() revel.Result { callback := c.Request.FormValue("callback") session := c.Request.FormValue("session") userId := c.Request.FormValue("userId") if user, isLogin := userService.GetLoginUser(session); isLogin { role := userRoleService.GetRoleByUser(user.Id) if role.Name != "admin" { //delete login user go rcali.DeleteLoginUserId(userId) return c.RenderJSONP(callback, models.NewOKApiWithInfo(userService.DeleteUser(userId))) } } return c.RenderJSONP(callback, models.NewErrorApi()) }
package service import ( "shopping-cart/types" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" ) func (suite *ServiceTestSuite) SetupTest() { suite.clientMock = new(MockApiClient) suite.serviceMock = new(MockApiService) } type ServiceTestSuite struct { suite.Suite clientMock *MockApiClient serviceMock *MockApiService } func TestServiceTestSuite(t *testing.T) { suite.Run(t, new(ServiceTestSuite)) } func (suite *ServiceTestSuite) Test_Validate_Success() { suite.clientMock.On("GetUserByEmail").Return(nil) suite.clientMock.On("GetUserByName").Return(nil) service := UserService{dbsrv: suite.clientMock} // Act response := service.Validate(&types.User{Name: "nik",Email: "nik@gmail.com",Password: "nik",UserName: "nik"}) // Assert assert.Nil(suite.T(), response, "Response should be nil") } func (suite *ServiceTestSuite) Test_RegisterUser_Success() { suite.clientMock.On("InsertCart").Return(nil) suite.clientMock.On("InsertUser").Return(nil) service := UserService{dbsrv: suite.clientMock} // Act response := service.RegisterUser(&types.User{Name: "nik",Email: "nik@gmail.com",Password: "nik",UserName: "nik"}) // Assert assert.Nil(suite.T(), response, "Response should be nil") } func (suite *ServiceTestSuite) Test_ValidateCart_Success() { suite.clientMock.On("GetCartByID").Return(nil) suite.clientMock.On("GetItemByID").Return(nil) service := CartService{dbsrv: suite.clientMock} cart := types.Cart{ID: "5fc966a74d278b000141901f"} item := types.Item{Name: "shoe", Price: 2000, Quantity: 1 } // Act response := service.Validate(&item, &cart) // Assert assert.Nil(suite.T(), response, "Response should be nil") } func (suite *ServiceTestSuite) Test_AddToCart_Success() { suite.clientMock.On("GetCartByID").Return(nil) suite.clientMock.On("GetItemByID").Return(nil) suite.clientMock.On("UpdateCart").Return(nil) service := CartService{dbsrv: suite.clientMock} cart := types.Cart{ID: "5fc966a74d278b000141901f"} item := types.Item{Name: "shoe", Price: 2000, Quantity: 1 } response := service.Validate(&item, &cart) // Act response = service.AddToCart(&cart) // Assert assert.Nil(suite.T(), response, "Response should be nil") } func (suite *ServiceTestSuite) Test_ViewCart_Success() { suite.clientMock.On("GetCartByID").Return(nil) service := CartService{dbsrv: suite.clientMock} // Act _, err := service.ViewAllCarts("5fc966a74d278b000141901d") // Assert assert.Nil(suite.T(), err, "Error should be nil") } func (suite *ServiceTestSuite) Test_RemoveItem_Success() { suite.clientMock.On("GetCartByID").Return(nil) suite.clientMock.On("UpdateCart").Return(nil) service := CartService{dbsrv: suite.clientMock} cart := types.Cart{ID: "5fc966a74d278b000141901f"} // Act response := service.RemoveItem(&cart,"5fc966eb182897aed9b4bfa7") // Assert assert.Nil(suite.T(), response, "Response should be nil") } func (suite *ServiceTestSuite) Test_ClearCart_Success() { suite.clientMock.On("GetCartByID").Return(nil) suite.clientMock.On("DeleteCart").Return(nil) service := CartService{dbsrv: suite.clientMock} // Act response := service.DeleteCart("5fc966a74d278b000141901f") // Assert assert.Nil(suite.T(), response, "Response should be nil") } func (suite *ServiceTestSuite) Test_AddToInventory_Success() { suite.clientMock.On("GetItemByName").Return(nil) suite.clientMock.On("UpdateItemByID").Return(nil) suite.clientMock.On("InsertItem").Return(nil) service := InventoryService{dbsrv: suite.clientMock} item := types.Item{ID: "5fc966a74d278b000141901f"} // Act response := service.AddToInventory(&item) // Assert assert.Nil(suite.T(), response, "Response should be nil") } // ViewInvetory func (suite *ServiceTestSuite) Test_ViewInvetory_Success() { suite.clientMock.On("GetAllItems").Return(nil,nil) service := InventoryService{dbsrv: suite.clientMock} // Act items := types.ItemList{} response := service.ViewInvetory(&items) // Assert assert.Nil(suite.T(), response, "Response should be nil") } // RemoveItem func (suite *ServiceTestSuite) Test_Inventory_RemoveItem_Success() { suite.clientMock.On("RemoveItem").Return(nil) suite.clientMock.On("RemoveAllItem").Return(nil) service := InventoryService{dbsrv: suite.clientMock} item := types.Item{} // Act response := service.RemoveItem(&item, "5fc966a74d278b000141901f") // Assert assert.Nil(suite.T(), response, "Response should be nil") }
package main import ( "fmt" ) func main() { var i *int i = 70 i = method(&i) fmt.Println(*i) } func method(i *int) *int { &i = 7 return i }
package exchange import ( "bytes" "context" "encoding/json" "errors" "fmt" "math/rand" "github.com/buger/jsonparser" "github.com/prebid/go-gdpr/vendorconsent" gpplib "github.com/prebid/go-gpp" gppConstants "github.com/prebid/go-gpp/constants" "github.com/prebid/openrtb/v19/openrtb2" "github.com/prebid/prebid-server/config" "github.com/prebid/prebid-server/errortypes" "github.com/prebid/prebid-server/firstpartydata" "github.com/prebid/prebid-server/gdpr" "github.com/prebid/prebid-server/metrics" "github.com/prebid/prebid-server/openrtb_ext" "github.com/prebid/prebid-server/privacy" "github.com/prebid/prebid-server/privacy/ccpa" "github.com/prebid/prebid-server/privacy/lmt" "github.com/prebid/prebid-server/schain" "github.com/prebid/prebid-server/stored_responses" "github.com/prebid/prebid-server/util/ptrutil" ) var channelTypeMap = map[metrics.RequestType]config.ChannelType{ metrics.ReqTypeAMP: config.ChannelAMP, metrics.ReqTypeORTB2App: config.ChannelApp, metrics.ReqTypeVideo: config.ChannelVideo, metrics.ReqTypeORTB2Web: config.ChannelWeb, } const unknownBidder string = "" type requestSplitter struct { bidderToSyncerKey map[string]string me metrics.MetricsEngine privacyConfig config.Privacy gdprPermsBuilder gdpr.PermissionsBuilder hostSChainNode *openrtb2.SupplyChainNode bidderInfo config.BidderInfos } // cleanOpenRTBRequests splits the input request into requests which are sanitized for each bidder. Intended behavior is: // // 1. BidRequest.Imp[].Ext will only contain the "prebid" field and a "bidder" field which has the params for the intended Bidder. // 2. Every BidRequest.Imp[] requested Bids from the Bidder who keys it. // 3. BidRequest.User.BuyerUID will be set to that Bidder's ID. func (rs *requestSplitter) cleanOpenRTBRequests(ctx context.Context, auctionReq AuctionRequest, requestExt *openrtb_ext.ExtRequest, gdprDefaultValue gdpr.Signal, ) (allowedBidderRequests []BidderRequest, privacyLabels metrics.PrivacyLabels, errs []error) { req := auctionReq.BidRequestWrapper aliases, errs := parseAliases(req.BidRequest) if len(errs) > 0 { return } allowedBidderRequests = make([]BidderRequest, 0) bidderImpWithBidResp := stored_responses.InitStoredBidResponses(req.BidRequest, auctionReq.StoredBidResponses) impsByBidder, err := splitImps(req.BidRequest.Imp) if err != nil { errs = []error{err} return } aliasesGVLIDs, errs := parseAliasesGVLIDs(req.BidRequest) if len(errs) > 0 { return } var allBidderRequests []BidderRequest allBidderRequests, errs = getAuctionBidderRequests(auctionReq, requestExt, rs.bidderToSyncerKey, impsByBidder, aliases, rs.hostSChainNode) bidderNameToBidderReq := buildBidResponseRequest(req.BidRequest, bidderImpWithBidResp, aliases, auctionReq.BidderImpReplaceImpID) //this function should be executed after getAuctionBidderRequests allBidderRequests = mergeBidderRequests(allBidderRequests, bidderNameToBidderReq) var gpp gpplib.GppContainer if req.BidRequest.Regs != nil && len(req.BidRequest.Regs.GPP) > 0 { gpp, err = gpplib.Parse(req.BidRequest.Regs.GPP) if err != nil { errs = append(errs, err) } } gdprSignal, err := getGDPR(req) if err != nil { errs = append(errs, err) } consent, err := getConsent(req, gpp) if err != nil { errs = append(errs, err) } gdprApplies := gdprSignal == gdpr.SignalYes || (gdprSignal == gdpr.SignalAmbiguous && gdprDefaultValue == gdpr.SignalYes) ccpaEnforcer, err := extractCCPA(req.BidRequest, rs.privacyConfig, &auctionReq.Account, aliases, channelTypeMap[auctionReq.LegacyLabels.RType], gpp) if err != nil { errs = append(errs, err) } lmtEnforcer := extractLMT(req.BidRequest, rs.privacyConfig) // request level privacy policies coppa := req.BidRequest.Regs != nil && req.BidRequest.Regs.COPPA == 1 lmt := lmtEnforcer.ShouldEnforce(unknownBidder) privacyLabels.CCPAProvided = ccpaEnforcer.CanEnforce() privacyLabels.CCPAEnforced = ccpaEnforcer.ShouldEnforce(unknownBidder) privacyLabels.COPPAEnforced = coppa privacyLabels.LMTEnforced = lmt var gdprEnforced bool var gdprPerms gdpr.Permissions = &gdpr.AlwaysAllow{} if gdprApplies { gdprEnforced = auctionReq.TCF2Config.ChannelEnabled(channelTypeMap[auctionReq.LegacyLabels.RType]) } if gdprEnforced { privacyLabels.GDPREnforced = true parsedConsent, err := vendorconsent.ParseString(consent) if err == nil { version := int(parsedConsent.Version()) privacyLabels.GDPRTCFVersion = metrics.TCFVersionToValue(version) } gdprRequestInfo := gdpr.RequestInfo{ AliasGVLIDs: aliasesGVLIDs, Consent: consent, GDPRSignal: gdprSignal, PublisherID: auctionReq.LegacyLabels.PubID, } gdprPerms = rs.gdprPermsBuilder(auctionReq.TCF2Config, gdprRequestInfo) } // bidder level privacy policies for _, bidderRequest := range allBidderRequests { privacyEnforcement := privacy.Enforcement{ COPPA: coppa, LMT: lmt, } // fetchBids activity scopedName := privacy.Component{Type: privacy.ComponentTypeBidder, Name: bidderRequest.BidderName.String()} fetchBidsActivityAllowed := auctionReq.Activities.Allow(privacy.ActivityFetchBids, scopedName) if !fetchBidsActivityAllowed { // skip the call to a bidder if fetchBids activity is not allowed // do not add this bidder to allowedBidderRequests continue } var auctionPermissions gdpr.AuctionPermissions var gdprErr error if gdprEnforced { auctionPermissions, gdprErr = gdprPerms.AuctionActivitiesAllowed(ctx, bidderRequest.BidderCoreName, bidderRequest.BidderName) if !auctionPermissions.AllowBidRequest { // auction request is not permitted by GDPR // do not add this bidder to allowedBidderRequests rs.me.RecordAdapterGDPRRequestBlocked(bidderRequest.BidderCoreName) continue } } passIDActivityAllowed := auctionReq.Activities.Allow(privacy.ActivityTransmitUserFPD, scopedName) if !passIDActivityAllowed { privacyEnforcement.UFPD = true } else { // run existing policies (GDPR, CCPA, COPPA, LMT) // potentially block passing IDs based on GDPR if gdprEnforced { if gdprErr == nil { privacyEnforcement.GDPRID = !auctionPermissions.PassID } else { privacyEnforcement.GDPRID = true } } // potentially block passing IDs based on CCPA privacyEnforcement.CCPA = ccpaEnforcer.ShouldEnforce(bidderRequest.BidderName.String()) } passGeoActivityAllowed := auctionReq.Activities.Allow(privacy.ActivityTransmitPreciseGeo, scopedName) if !passGeoActivityAllowed { privacyEnforcement.PreciseGeo = true } else { // run existing policies (GDPR, CCPA, COPPA, LMT) // potentially block passing geo based on GDPR if gdprEnforced { if gdprErr == nil { privacyEnforcement.GDPRGeo = !auctionPermissions.PassGeo } else { privacyEnforcement.GDPRGeo = true } } // potentially block passing geo based on CCPA privacyEnforcement.CCPA = ccpaEnforcer.ShouldEnforce(bidderRequest.BidderName.String()) } if auctionReq.FirstPartyData != nil && auctionReq.FirstPartyData[bidderRequest.BidderName] != nil { applyFPD(auctionReq.FirstPartyData[bidderRequest.BidderName], bidderRequest.BidRequest) } privacyEnforcement.TID = !auctionReq.Activities.Allow(privacy.ActivityTransmitTids, scopedName) privacyEnforcement.Apply(bidderRequest.BidRequest) allowedBidderRequests = append(allowedBidderRequests, bidderRequest) // GPP downgrade: always downgrade unless we can confirm GPP is supported if shouldSetLegacyPrivacy(rs.bidderInfo, string(bidderRequest.BidderCoreName)) { setLegacyGDPRFromGPP(bidderRequest.BidRequest, gpp) setLegacyUSPFromGPP(bidderRequest.BidRequest, gpp) } } return } func shouldSetLegacyPrivacy(bidderInfo config.BidderInfos, bidder string) bool { binfo, defined := bidderInfo[bidder] if !defined || binfo.OpenRTB == nil { return true } return !binfo.OpenRTB.GPPSupported } func ccpaEnabled(account *config.Account, privacyConfig config.Privacy, requestType config.ChannelType) bool { if accountEnabled := account.CCPA.EnabledForChannelType(requestType); accountEnabled != nil { return *accountEnabled } return privacyConfig.CCPA.Enforce } func extractCCPA(orig *openrtb2.BidRequest, privacyConfig config.Privacy, account *config.Account, aliases map[string]string, requestType config.ChannelType, gpp gpplib.GppContainer) (privacy.PolicyEnforcer, error) { // Quick extra wrapper until RequestWrapper makes its way into CleanRequests ccpaPolicy, err := ccpa.ReadFromRequestWrapper(&openrtb_ext.RequestWrapper{BidRequest: orig}, gpp) if err != nil { return privacy.NilPolicyEnforcer{}, err } validBidders := GetValidBidders(aliases) ccpaParsedPolicy, err := ccpaPolicy.Parse(validBidders) if err != nil { return privacy.NilPolicyEnforcer{}, err } ccpaEnforcer := privacy.EnabledPolicyEnforcer{ Enabled: ccpaEnabled(account, privacyConfig, requestType), PolicyEnforcer: ccpaParsedPolicy, } return ccpaEnforcer, nil } func extractLMT(orig *openrtb2.BidRequest, privacyConfig config.Privacy) privacy.PolicyEnforcer { return privacy.EnabledPolicyEnforcer{ Enabled: privacyConfig.LMT.Enforce, PolicyEnforcer: lmt.ReadFromRequest(orig), } } func ExtractReqExtBidderParamsMap(bidRequest *openrtb2.BidRequest) (map[string]json.RawMessage, error) { if bidRequest == nil { return nil, errors.New("error bidRequest should not be nil") } reqExt := &openrtb_ext.ExtRequest{} if len(bidRequest.Ext) > 0 { err := json.Unmarshal(bidRequest.Ext, &reqExt) if err != nil { return nil, fmt.Errorf("error decoding Request.ext : %s", err.Error()) } } if reqExt.Prebid.BidderParams == nil { return nil, nil } var bidderParams map[string]json.RawMessage err := json.Unmarshal(reqExt.Prebid.BidderParams, &bidderParams) if err != nil { return nil, err } return bidderParams, nil } func getAuctionBidderRequests(auctionRequest AuctionRequest, requestExt *openrtb_ext.ExtRequest, bidderToSyncerKey map[string]string, impsByBidder map[string][]openrtb2.Imp, aliases map[string]string, hostSChainNode *openrtb2.SupplyChainNode) ([]BidderRequest, []error) { bidderRequests := make([]BidderRequest, 0, len(impsByBidder)) req := auctionRequest.BidRequestWrapper explicitBuyerUIDs, err := extractBuyerUIDs(req.BidRequest.User) if err != nil { return nil, []error{err} } bidderParamsInReqExt, err := ExtractReqExtBidderParamsMap(req.BidRequest) if err != nil { return nil, []error{err} } sChainWriter, err := schain.NewSChainWriter(requestExt, hostSChainNode) if err != nil { return nil, []error{err} } var errs []error for bidder, imps := range impsByBidder { coreBidder := resolveBidder(bidder, aliases) reqCopy := *req.BidRequest reqCopy.Imp = imps sChainWriter.Write(&reqCopy, bidder) reqCopy.Ext, err = buildRequestExtForBidder(bidder, req.BidRequest.Ext, requestExt, bidderParamsInReqExt, auctionRequest.Account.AlternateBidderCodes) if err != nil { return nil, []error{err} } if err := removeUnpermissionedEids(&reqCopy, bidder, requestExt); err != nil { errs = append(errs, fmt.Errorf("unable to enforce request.ext.prebid.data.eidpermissions because %v", err)) continue } bidderRequest := BidderRequest{ BidderName: openrtb_ext.BidderName(bidder), BidderCoreName: coreBidder, BidRequest: &reqCopy, BidderLabels: metrics.AdapterLabels{ Source: auctionRequest.LegacyLabels.Source, RType: auctionRequest.LegacyLabels.RType, Adapter: coreBidder, PubID: auctionRequest.LegacyLabels.PubID, CookieFlag: auctionRequest.LegacyLabels.CookieFlag, AdapterBids: metrics.AdapterBidPresent, }, } syncerKey := bidderToSyncerKey[string(coreBidder)] if hadSync := prepareUser(&reqCopy, bidder, syncerKey, explicitBuyerUIDs, auctionRequest.UserSyncs); !hadSync && req.BidRequest.App == nil { bidderRequest.BidderLabels.CookieFlag = metrics.CookieFlagNo } else { bidderRequest.BidderLabels.CookieFlag = metrics.CookieFlagYes } bidderRequests = append(bidderRequests, bidderRequest) } return bidderRequests, errs } func buildRequestExtForBidder(bidder string, requestExt json.RawMessage, requestExtParsed *openrtb_ext.ExtRequest, bidderParamsInReqExt map[string]json.RawMessage, cfgABC *openrtb_ext.ExtAlternateBidderCodes) (json.RawMessage, error) { // Resolve alternatebiddercode for current bidder var reqABC *openrtb_ext.ExtAlternateBidderCodes if len(requestExt) != 0 && requestExtParsed != nil && requestExtParsed.Prebid.AlternateBidderCodes != nil { reqABC = requestExtParsed.Prebid.AlternateBidderCodes } alternateBidderCodes := buildRequestExtAlternateBidderCodes(bidder, cfgABC, reqABC) if (len(requestExt) == 0 || requestExtParsed == nil) && alternateBidderCodes == nil { return nil, nil } // Resolve Bidder Params var bidderParams json.RawMessage if bidderParamsInReqExt != nil { bidderParams = bidderParamsInReqExt[bidder] } // Copy Allowed Fields // Per: https://docs.prebid.org/prebid-server/endpoints/openrtb2/pbs-endpoint-auction.html#prebid-server-ortb2-extension-summary prebid := openrtb_ext.ExtRequestPrebid{ BidderParams: bidderParams, AlternateBidderCodes: alternateBidderCodes, } if requestExtParsed != nil { prebid.Channel = requestExtParsed.Prebid.Channel prebid.CurrencyConversions = requestExtParsed.Prebid.CurrencyConversions prebid.Debug = requestExtParsed.Prebid.Debug prebid.Integration = requestExtParsed.Prebid.Integration prebid.MultiBid = buildRequestExtMultiBid(bidder, requestExtParsed.Prebid.MultiBid, alternateBidderCodes) prebid.Sdk = requestExtParsed.Prebid.Sdk prebid.Server = requestExtParsed.Prebid.Server } // Marshal New Prebid Object prebidJson, err := json.Marshal(prebid) if err != nil { return nil, err } // Parse Existing Ext extMap := make(map[string]json.RawMessage) if len(requestExt) != 0 { if err := json.Unmarshal(requestExt, &extMap); err != nil { return nil, err } } // Update Ext With Prebid Json if bytes.Equal(prebidJson, []byte(`{}`)) { delete(extMap, "prebid") } else { extMap["prebid"] = prebidJson } if len(extMap) > 0 { return json.Marshal(extMap) } else { return nil, nil } } func buildRequestExtAlternateBidderCodes(bidder string, accABC *openrtb_ext.ExtAlternateBidderCodes, reqABC *openrtb_ext.ExtAlternateBidderCodes) *openrtb_ext.ExtAlternateBidderCodes { if reqABC != nil { alternateBidderCodes := &openrtb_ext.ExtAlternateBidderCodes{ Enabled: reqABC.Enabled, } if bidderCodes, ok := reqABC.Bidders[bidder]; ok { alternateBidderCodes.Bidders = map[string]openrtb_ext.ExtAdapterAlternateBidderCodes{ bidder: bidderCodes, } } return alternateBidderCodes } if accABC != nil { alternateBidderCodes := &openrtb_ext.ExtAlternateBidderCodes{ Enabled: accABC.Enabled, } if bidderCodes, ok := accABC.Bidders[bidder]; ok { alternateBidderCodes.Bidders = map[string]openrtb_ext.ExtAdapterAlternateBidderCodes{ bidder: bidderCodes, } } return alternateBidderCodes } return nil } func buildRequestExtMultiBid(adapter string, reqMultiBid []*openrtb_ext.ExtMultiBid, adapterABC *openrtb_ext.ExtAlternateBidderCodes) []*openrtb_ext.ExtMultiBid { adapterMultiBid := make([]*openrtb_ext.ExtMultiBid, 0) for _, multiBid := range reqMultiBid { if multiBid.Bidder != "" { if multiBid.Bidder == adapter || isBidderInExtAlternateBidderCodes(adapter, multiBid.Bidder, adapterABC) { adapterMultiBid = append(adapterMultiBid, multiBid) } } else { for _, bidder := range multiBid.Bidders { if bidder == adapter || isBidderInExtAlternateBidderCodes(adapter, bidder, adapterABC) { adapterMultiBid = append(adapterMultiBid, &openrtb_ext.ExtMultiBid{ Bidders: []string{bidder}, MaxBids: multiBid.MaxBids, }) } } } } if len(adapterMultiBid) > 0 { return adapterMultiBid } return nil } func isBidderInExtAlternateBidderCodes(adapter, currentMultiBidBidder string, adapterABC *openrtb_ext.ExtAlternateBidderCodes) bool { if adapterABC != nil { if abc, ok := adapterABC.Bidders[adapter]; ok { for _, bidder := range abc.AllowedBidderCodes { if bidder == "*" || bidder == currentMultiBidBidder { return true } } } } return false } // extractBuyerUIDs parses the values from user.ext.prebid.buyeruids, and then deletes those values from the ext. // This prevents a Bidder from using these values to figure out who else is involved in the Auction. func extractBuyerUIDs(user *openrtb2.User) (map[string]string, error) { if user == nil { return nil, nil } if len(user.Ext) == 0 { return nil, nil } var userExt openrtb_ext.ExtUser if err := json.Unmarshal(user.Ext, &userExt); err != nil { return nil, err } if userExt.Prebid == nil { return nil, nil } // The API guarantees that user.ext.prebid.buyeruids exists and has at least one ID defined, // as long as user.ext.prebid exists. buyerUIDs := userExt.Prebid.BuyerUIDs userExt.Prebid = nil // Remarshal (instead of removing) if the ext has other known fields if userExt.Consent != "" || len(userExt.Eids) > 0 { if newUserExtBytes, err := json.Marshal(userExt); err != nil { return nil, err } else { user.Ext = newUserExtBytes } } else { user.Ext = nil } return buyerUIDs, nil } // splitImps takes a list of Imps and returns a map of imps which have been sanitized for each bidder. // // For example, suppose imps has two elements. One goes to rubicon, while the other goes to appnexus and index. // The returned map will have three keys: rubicon, appnexus, and index--each with one Imp. // The "imp.ext" value of the appnexus Imp will only contain the "prebid" values, and "appnexus" value at the "bidder" key. // The "imp.ext" value of the rubicon Imp will only contain the "prebid" values, and "rubicon" value at the "bidder" key. // // The goal here is so that Bidders only get Imps and Imp.Ext values which are intended for them. func splitImps(imps []openrtb2.Imp) (map[string][]openrtb2.Imp, error) { bidderImps := make(map[string][]openrtb2.Imp) for i, imp := range imps { var impExt map[string]json.RawMessage if err := json.Unmarshal(imp.Ext, &impExt); err != nil { return nil, fmt.Errorf("invalid json for imp[%d]: %v", i, err) } var impExtPrebid map[string]json.RawMessage if impExtPrebidJSON, exists := impExt[openrtb_ext.PrebidExtKey]; exists { // validation already performed by impExt unmarshal. no error is possible here, proven by tests. json.Unmarshal(impExtPrebidJSON, &impExtPrebid) } var impExtPrebidBidder map[string]json.RawMessage if impExtPrebidBidderJSON, exists := impExtPrebid[openrtb_ext.PrebidExtBidderKey]; exists { // validation already performed by impExt unmarshal. no error is possible here, proven by tests. json.Unmarshal(impExtPrebidBidderJSON, &impExtPrebidBidder) } sanitizedImpExt, err := createSanitizedImpExt(impExt, impExtPrebid) if err != nil { return nil, fmt.Errorf("unable to remove other bidder fields for imp[%d]: %v", i, err) } for bidder, bidderExt := range impExtPrebidBidder { impCopy := imp sanitizedImpExt[openrtb_ext.PrebidExtBidderKey] = bidderExt impExtJSON, err := json.Marshal(sanitizedImpExt) if err != nil { return nil, fmt.Errorf("unable to remove other bidder fields for imp[%d]: cannot marshal ext: %v", i, err) } impCopy.Ext = impExtJSON bidderImps[bidder] = append(bidderImps[bidder], impCopy) } } return bidderImps, nil } var allowedImpExtFields = map[string]interface{}{ openrtb_ext.AuctionEnvironmentKey: struct{}{}, openrtb_ext.FirstPartyDataExtKey: struct{}{}, openrtb_ext.FirstPartyDataContextExtKey: struct{}{}, openrtb_ext.GPIDKey: struct{}{}, openrtb_ext.SKAdNExtKey: struct{}{}, openrtb_ext.TIDKey: struct{}{}, } var allowedImpExtPrebidFields = map[string]interface{}{ openrtb_ext.IsRewardedInventoryKey: struct{}{}, openrtb_ext.OptionsKey: struct{}{}, } func createSanitizedImpExt(impExt, impExtPrebid map[string]json.RawMessage) (map[string]json.RawMessage, error) { sanitizedImpExt := make(map[string]json.RawMessage, 6) sanitizedImpPrebidExt := make(map[string]json.RawMessage, 2) // copy allowed imp[].ext.prebid fields for k := range allowedImpExtPrebidFields { if v, exists := impExtPrebid[k]; exists { sanitizedImpPrebidExt[k] = v } } // marshal sanitized imp[].ext.prebid if len(sanitizedImpPrebidExt) > 0 { if impExtPrebidJSON, err := json.Marshal(sanitizedImpPrebidExt); err == nil { sanitizedImpExt[openrtb_ext.PrebidExtKey] = impExtPrebidJSON } else { return nil, fmt.Errorf("cannot marshal ext.prebid: %v", err) } } // copy reserved imp[].ext fields known to not be bidder names for k := range allowedImpExtFields { if v, exists := impExt[k]; exists { sanitizedImpExt[k] = v } } return sanitizedImpExt, nil } // prepareUser changes req.User so that it's ready for the given bidder. // This *will* mutate the request, but will *not* mutate any objects nested inside it. // // In this function, "givenBidder" may or may not be an alias. "coreBidder" must *not* be an alias. // It returns true if a Cookie User Sync existed, and false otherwise. func prepareUser(req *openrtb2.BidRequest, givenBidder, syncerKey string, explicitBuyerUIDs map[string]string, usersyncs IdFetcher) bool { cookieId, hadCookie, _ := usersyncs.GetUID(syncerKey) if id, ok := explicitBuyerUIDs[givenBidder]; ok { req.User = copyWithBuyerUID(req.User, id) } else if hadCookie { req.User = copyWithBuyerUID(req.User, cookieId) } return hadCookie } // copyWithBuyerUID either overwrites the BuyerUID property on user with the argument, or returns // a new (empty) User with the BuyerUID already set. func copyWithBuyerUID(user *openrtb2.User, buyerUID string) *openrtb2.User { if user == nil { return &openrtb2.User{ BuyerUID: buyerUID, } } if user.BuyerUID == "" { clone := *user clone.BuyerUID = buyerUID return &clone } return user } // removeUnpermissionedEids modifies the request to remove any request.user.ext.eids not permissions for the specific bidder func removeUnpermissionedEids(request *openrtb2.BidRequest, bidder string, requestExt *openrtb_ext.ExtRequest) error { // ensure request might have eids (as much as we can check before unmarshalling) if request.User == nil || len(request.User.Ext) == 0 { return nil } // ensure request has eid permissions to enforce if requestExt == nil || requestExt.Prebid.Data == nil || len(requestExt.Prebid.Data.EidPermissions) == 0 { return nil } // low level unmarshal to preserve other request.user.ext values. prebid server is non-destructive. var userExt map[string]json.RawMessage if err := json.Unmarshal(request.User.Ext, &userExt); err != nil { return err } eidsJSON, eidsSpecified := userExt["eids"] if !eidsSpecified { return nil } var eids []openrtb2.EID if err := json.Unmarshal(eidsJSON, &eids); err != nil { return err } // exit early if there are no eids (empty array) if len(eids) == 0 { return nil } // translate eid permissions to a map for quick lookup eidRules := make(map[string][]string) for _, p := range requestExt.Prebid.Data.EidPermissions { eidRules[p.Source] = p.Bidders } eidsAllowed := make([]openrtb2.EID, 0, len(eids)) for _, eid := range eids { allowed := false if rule, hasRule := eidRules[eid.Source]; hasRule { for _, ruleBidder := range rule { if ruleBidder == "*" || ruleBidder == bidder { allowed = true break } } } else { allowed = true } if allowed { eidsAllowed = append(eidsAllowed, eid) } } // exit early if all eids are allowed and nothing needs to be removed if len(eids) == len(eidsAllowed) { return nil } // marshal eidsAllowed back to userExt if len(eidsAllowed) == 0 { delete(userExt, "eids") } else { eidsRaw, err := json.Marshal(eidsAllowed) if err != nil { return err } userExt["eids"] = eidsRaw } // exit early if userExt is empty if len(userExt) == 0 { setUserExtWithCopy(request, nil) return nil } userExtJSON, err := json.Marshal(userExt) if err != nil { return err } setUserExtWithCopy(request, userExtJSON) return nil } func setUserExtWithCopy(request *openrtb2.BidRequest, userExtJSON json.RawMessage) { userCopy := *request.User userCopy.Ext = userExtJSON request.User = &userCopy } // resolveBidder returns the known BidderName associated with bidder, if bidder is an alias. If it's not an alias, the bidder is returned. func resolveBidder(bidder string, aliases map[string]string) openrtb_ext.BidderName { if coreBidder, ok := aliases[bidder]; ok { return openrtb_ext.BidderName(coreBidder) } return openrtb_ext.BidderName(bidder) } // parseAliases parses the aliases from the BidRequest func parseAliases(orig *openrtb2.BidRequest) (map[string]string, []error) { var aliases map[string]string if value, dataType, _, err := jsonparser.Get(orig.Ext, openrtb_ext.PrebidExtKey, "aliases"); dataType == jsonparser.Object && err == nil { if err := json.Unmarshal(value, &aliases); err != nil { return nil, []error{err} } } else if dataType != jsonparser.NotExist && err != jsonparser.KeyPathNotFoundError { return nil, []error{err} } return aliases, nil } // parseAliasesGVLIDs parses the Bidder Alias GVLIDs from the BidRequest func parseAliasesGVLIDs(orig *openrtb2.BidRequest) (map[string]uint16, []error) { var aliasesGVLIDs map[string]uint16 if value, dataType, _, err := jsonparser.Get(orig.Ext, openrtb_ext.PrebidExtKey, "aliasgvlids"); dataType == jsonparser.Object && err == nil { if err := json.Unmarshal(value, &aliasesGVLIDs); err != nil { return nil, []error{err} } } else if dataType != jsonparser.NotExist && err != jsonparser.KeyPathNotFoundError { return nil, []error{err} } return aliasesGVLIDs, nil } func GetValidBidders(aliases map[string]string) map[string]struct{} { validBidders := openrtb_ext.BuildBidderNameHashSet() for k := range aliases { validBidders[k] = struct{}{} } return validBidders } // Quick little randomizer for a list of strings. Stuffing it in utils to keep other files clean func randomizeList(list []openrtb_ext.BidderName) { l := len(list) perm := rand.Perm(l) var j int for i := 0; i < l; i++ { j = perm[i] list[i], list[j] = list[j], list[i] } } func getExtCacheInstructions(requestExtPrebid *openrtb_ext.ExtRequestPrebid) extCacheInstructions { //returnCreative defaults to true cacheInstructions := extCacheInstructions{returnCreative: true} foundBidsRC := false foundVastRC := false if requestExtPrebid != nil && requestExtPrebid.Cache != nil { if requestExtPrebid.Cache.Bids != nil { cacheInstructions.cacheBids = true if requestExtPrebid.Cache.Bids.ReturnCreative != nil { cacheInstructions.returnCreative = *requestExtPrebid.Cache.Bids.ReturnCreative foundBidsRC = true } } if requestExtPrebid.Cache.VastXML != nil { cacheInstructions.cacheVAST = true if requestExtPrebid.Cache.VastXML.ReturnCreative != nil { cacheInstructions.returnCreative = *requestExtPrebid.Cache.VastXML.ReturnCreative foundVastRC = true } } } if foundBidsRC && foundVastRC { cacheInstructions.returnCreative = *requestExtPrebid.Cache.Bids.ReturnCreative || *requestExtPrebid.Cache.VastXML.ReturnCreative } return cacheInstructions } func getExtTargetData(requestExtPrebid *openrtb_ext.ExtRequestPrebid, cacheInstructions extCacheInstructions) *targetData { if requestExtPrebid != nil && requestExtPrebid.Targeting != nil { return &targetData{ includeWinners: *requestExtPrebid.Targeting.IncludeWinners, includeBidderKeys: *requestExtPrebid.Targeting.IncludeBidderKeys, includeCacheBids: cacheInstructions.cacheBids, includeCacheVast: cacheInstructions.cacheVAST, includeFormat: requestExtPrebid.Targeting.IncludeFormat, priceGranularity: *requestExtPrebid.Targeting.PriceGranularity, mediaTypePriceGranularity: requestExtPrebid.Targeting.MediaTypePriceGranularity, preferDeals: requestExtPrebid.Targeting.PreferDeals, } } return nil } // getDebugInfo returns the boolean flags that allow for debug information in bidResponse.Ext, the SeatBid.httpcalls slice, and // also sets the debugLog information func getDebugInfo(test int8, requestExtPrebid *openrtb_ext.ExtRequestPrebid, accountDebugFlag bool, debugLog *DebugLog) (bool, bool, *DebugLog) { requestDebugAllow := parseRequestDebugValues(test, requestExtPrebid) debugLog = setDebugLogValues(accountDebugFlag, debugLog) responseDebugAllow := (requestDebugAllow && accountDebugFlag) || debugLog.DebugEnabledOrOverridden accountDebugAllow := (requestDebugAllow && accountDebugFlag) || (debugLog.DebugEnabledOrOverridden && accountDebugFlag) return responseDebugAllow, accountDebugAllow, debugLog } // setDebugLogValues initializes the DebugLog if nil. It also sets the value of the debugInfo flag // used in HoldAuction func setDebugLogValues(accountDebugFlag bool, debugLog *DebugLog) *DebugLog { if debugLog == nil { debugLog = &DebugLog{} } debugLog.Enabled = debugLog.DebugEnabledOrOverridden || accountDebugFlag return debugLog } func parseRequestDebugValues(test int8, requestExtPrebid *openrtb_ext.ExtRequestPrebid) bool { return test == 1 || (requestExtPrebid != nil && requestExtPrebid.Debug) } func getExtBidAdjustmentFactors(requestExtPrebid *openrtb_ext.ExtRequestPrebid) map[string]float64 { if requestExtPrebid != nil { return requestExtPrebid.BidAdjustmentFactors } return nil } func applyFPD(fpd *firstpartydata.ResolvedFirstPartyData, bidReq *openrtb2.BidRequest) { if fpd.Site != nil { bidReq.Site = fpd.Site } if fpd.App != nil { bidReq.App = fpd.App } if fpd.User != nil { //BuyerUID is a value obtained between fpd extraction and fpd application. //BuyerUID needs to be set back to fpd before applying this fpd to final bidder request if bidReq.User != nil && len(bidReq.User.BuyerUID) > 0 { fpd.User.BuyerUID = bidReq.User.BuyerUID } bidReq.User = fpd.User } } func buildBidResponseRequest(req *openrtb2.BidRequest, bidderImpResponses stored_responses.BidderImpsWithBidResponses, aliases map[string]string, bidderImpReplaceImpID stored_responses.BidderImpReplaceImpID) map[openrtb_ext.BidderName]BidderRequest { bidderToBidderResponse := make(map[openrtb_ext.BidderName]BidderRequest) for bidderName, impResps := range bidderImpResponses { resolvedBidder := resolveBidder(string(bidderName), aliases) bidderToBidderResponse[bidderName] = BidderRequest{ BidRequest: req, BidderCoreName: resolvedBidder, BidderName: bidderName, BidderStoredResponses: impResps, ImpReplaceImpId: bidderImpReplaceImpID[string(resolvedBidder)], BidderLabels: metrics.AdapterLabels{Adapter: resolvedBidder}, } } return bidderToBidderResponse } func mergeBidderRequests(allBidderRequests []BidderRequest, bidderNameToBidderReq map[openrtb_ext.BidderName]BidderRequest) []BidderRequest { if len(allBidderRequests) == 0 && len(bidderNameToBidderReq) == 0 { return allBidderRequests } if len(allBidderRequests) == 0 && len(bidderNameToBidderReq) > 0 { for _, v := range bidderNameToBidderReq { allBidderRequests = append(allBidderRequests, v) } return allBidderRequests } else if len(allBidderRequests) > 0 && len(bidderNameToBidderReq) > 0 { //merge bidder requests with real imps and imps with stored resp for bn, br := range bidderNameToBidderReq { found := false for i, ar := range allBidderRequests { if ar.BidderName == bn { //bidder req with real imps and imps with stored resp allBidderRequests[i].BidderStoredResponses = br.BidderStoredResponses found = true break } } if !found { //bidder req with stored bid responses only br.BidRequest.Imp = nil // to indicate this bidder request has bidder responses only allBidderRequests = append(allBidderRequests, br) } } } return allBidderRequests } func setLegacyGDPRFromGPP(r *openrtb2.BidRequest, gpp gpplib.GppContainer) { if r.Regs != nil && r.Regs.GDPR == nil { if r.Regs.GPPSID != nil { // Set to 0 unless SID exists regs := *r.Regs regs.GDPR = ptrutil.ToPtr[int8](0) for _, id := range r.Regs.GPPSID { if id == int8(gppConstants.SectionTCFEU2) { regs.GDPR = ptrutil.ToPtr[int8](1) } } r.Regs = &regs } } if r.User == nil || len(r.User.Consent) == 0 { for _, sec := range gpp.Sections { if sec.GetID() == gppConstants.SectionTCFEU2 { var user openrtb2.User if r.User == nil { user = openrtb2.User{} } else { user = *r.User } user.Consent = sec.GetValue() r.User = &user } } } } func setLegacyUSPFromGPP(r *openrtb2.BidRequest, gpp gpplib.GppContainer) { if r.Regs == nil { return } if len(r.Regs.USPrivacy) > 0 || r.Regs.GPPSID == nil { return } for _, sid := range r.Regs.GPPSID { if sid == int8(gppConstants.SectionUSPV1) { for _, sec := range gpp.Sections { if sec.GetID() == gppConstants.SectionUSPV1 { regs := *r.Regs regs.USPrivacy = sec.GetValue() r.Regs = &regs } } } } } func WrapJSONInData(data []byte) []byte { res := make([]byte, 0, len(data)) res = append(res, []byte(`{"data":`)...) res = append(res, data...) res = append(res, []byte(`}`)...) return res } func getMediaTypeForBid(bid openrtb2.Bid) (openrtb_ext.BidType, error) { mType := bid.MType var bidType openrtb_ext.BidType if mType > 0 { switch mType { case openrtb2.MarkupBanner: bidType = openrtb_ext.BidTypeBanner case openrtb2.MarkupVideo: bidType = openrtb_ext.BidTypeVideo case openrtb2.MarkupAudio: bidType = openrtb_ext.BidTypeAudio case openrtb2.MarkupNative: bidType = openrtb_ext.BidTypeNative default: return bidType, fmt.Errorf("Failed to parse bid mType for impression \"%s\"", bid.ImpID) } } else { var err error bidType, err = getPrebidMediaTypeForBid(bid) if err != nil { return bidType, err } } return bidType, nil } func getPrebidMediaTypeForBid(bid openrtb2.Bid) (openrtb_ext.BidType, error) { var err error var bidType openrtb_ext.BidType if bid.Ext != nil { var bidExt openrtb_ext.ExtBid err = json.Unmarshal(bid.Ext, &bidExt) if err == nil && bidExt.Prebid != nil { if bidType, err = openrtb_ext.ParseBidType(string(bidExt.Prebid.Type)); err == nil { return bidType, nil } } } errMsg := fmt.Sprintf("Failed to parse bid mediatype for impression \"%s\"", bid.ImpID) if err != nil { errMsg = fmt.Sprintf("%s, %s", errMsg, err.Error()) } return bidType, &errortypes.BadServerResponse{ Message: errMsg, } }
package database import ( "encoding/json" "log" "os" "gopkg.in/mgo.v2" ) var DatabaseName string var DatabaseSession *mgo.Session var AdminCollection *mgo.Collection var CourseCollection *mgo.Collection var SubjectCollection *mgo.Collection var FacultyCollection *mgo.Collection var SectionCollection *mgo.Collection var FeedbackCollection *mgo.Collection var QuestionCollection *mgo.Collection var TextQuestionCollection *mgo.Collection type DatabaseDetails struct { Url string `json:"url"` Dbname string `json:"dbname"` } type InitDatabaseError int func (err InitDatabaseError) Error() string { if err == PROBLEMOPENINGFILE { return "Could not open file faconfig.json" } return "Miscellaneous Issues in database" } const ( PROBLEMOPENINGFILE = 0 PROBLEMDECODING = 1 ) func InitDatabaseSession() error { var err error myFile, err := os.Open("feedbackadminres/faconfig.json") defer myFile.Close() if err != nil { return InitDatabaseError(PROBLEMOPENINGFILE) } var myDBDetails DatabaseDetails err = json.NewDecoder(myFile).Decode(&myDBDetails) if err != nil { return InitDatabaseError(PROBLEMDECODING) } DatabaseSession, err = mgo.Dial(myDBDetails.Url) DatabaseName = myDBDetails.Dbname log.Println("feedback-admin: Initialised new database session to url:", myDBDetails.Url, "and Dbname:", myDBDetails.Dbname) return nil } func InitCollections() { //log.Println("**Initialising Essential Collections with Database",DatabaseName,"**") AdminCollection = DatabaseSession.DB(DatabaseName).C("admin") CourseCollection = DatabaseSession.DB(DatabaseName).C("course") SubjectCollection = DatabaseSession.DB(DatabaseName).C("subject") FacultyCollection = DatabaseSession.DB(DatabaseName).C("faculty") SectionCollection = DatabaseSession.DB(DatabaseName).C("section") FeedbackCollection = DatabaseSession.DB(DatabaseName).C("feedback") QuestionCollection = DatabaseSession.DB(DatabaseName).C("question") TextQuestionCollection = DatabaseSession.DB(DatabaseName).C("textquestion") }
package main import ( "LeetCodeGo/base" "LeetCodeGo/utils" "log" ) /* 给定一个二叉树,返回它的中序 遍历。 示例: 输入: [1,null,2,3] 1 \ 2 / 3 输出: [1,3,2] 进阶: 递归算法很简单,你可以通过迭代算法完成吗? 来源:力扣(LeetCode) 链接:https://leetcode-cn.com/problems/binary-tree-inorder-traversal 著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。 */ func traversal(node *base.TreeNode, nums *[]int) { if node == nil { return } if node.Left != nil { traversal(node.Left, nums) } *nums = append(*nums, node.Val) if node.Right != nil { traversal(node.Right, nums) } } // 递归实现 func inorderTraversal(root *base.TreeNode) []int { nums := make([]int, 0) traversal(root, &nums) return nums } // 迭代实现 func inorderTraversal2(root *base.TreeNode) []int { var nums []int if root == nil { return nums } nodes := make([]*base.TreeNode, 0) cur := root for cur != nil || len(nodes) > 0 { for cur != nil { nodes = append(nodes, cur) cur = cur.Left } if len(nodes) > 0 { cur = nodes[len(nodes)-1] // 勿粗心大意,这里的cur不要使用 := 而使得go以为是局部变量 nums = append(nums, cur.Val) nodes = nodes[:len(nodes)-1] cur = cur.Right } } return nums } func main() { values := []string{"1", "nil", "2", "3"} root := base.GetBinaryTree(values) nums := inorderTraversal2(root) if len(nums) != 3 || !utils.CompareEqual(nums, []int{1, 3, 2}) { log.Fatal("error: ", nums) } log.Println("success") }
/* Copyright 2020 Rafael Fernández López <ereslibre@ereslibre.es> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package reconciler import ( "k8s.io/klog" "github.com/oneinfra/oneinfra/internal/pkg/component" "github.com/oneinfra/oneinfra/internal/pkg/component/components" "github.com/oneinfra/oneinfra/internal/pkg/conditions" "github.com/oneinfra/oneinfra/internal/pkg/inquirer" ) // Reconcile reconciles the component func Reconcile(inquirer inquirer.ReconcilerInquirer) error { klog.V(1).Infof("reconciling component %q with role %q", inquirer.Component().Name, inquirer.Component().Role) var componentObj components.Component switch inquirer.Component().Role { case component.ControlPlaneRole: componentObj = &components.ControlPlane{} case component.ControlPlaneIngressRole: componentObj = &components.ControlPlaneIngress{} } inquirer.Component().Conditions.SetCondition( component.ReconcileStarted, conditions.ConditionTrue, ) res := componentObj.Reconcile(inquirer) if res == nil { inquirer.Component().Conditions.SetCondition( component.ReconcileSucceeded, conditions.ConditionTrue, ) } else { inquirer.Component().Conditions.SetCondition( component.ReconcileSucceeded, conditions.ConditionFalse, ) } return res }
// Copyright 2017 The EvAlgo Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package evhtml var ( // DEBUG sets debuggin on or off DEBUG = false )
package ohdear import ( "fmt" "net/http" "testing" "github.com/stretchr/testify/assert" "github.com/VictorAvelar/goh-dear/testdata" ) func TestSitesSrv_Get(t *testing.T) { setEnv() setup() defer func() { tearDown() unsetEnv() }() cases := []struct { id uint name string status int respBody string wantErr bool err error }{ { 1, "successful request/response cycle", http.StatusOK, testdata.SingleSiteResponse, false, nil, }, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { tMux.HandleFunc(fmt.Sprintf("/sites/%d", c.id), func(w http.ResponseWriter, r *http.Request) { testHeader(t, r, AuthHeader, fmt.Sprintf("%s %s", TokenType, testTkn)) testMethod(t, r, http.MethodGet) w.WriteHeader(http.StatusOK) _, _ = fmt.Fprint(w, c.respBody) }) got, err := tClient.Sites.Get(c.id) if err != nil { if c.wantErr { assert.EqualError(t, err, c.err.Error()) } else { t.Fatal(err) } } assert.Equal(t, c.id, got.ID) }) } }
// Copyright 2020 The Moov Authors // Use of this source code is governed by an Apache License // license that can be found in the LICENSE file. package camt_v07 import ( "reflect" "github.com/moov-io/iso20022/pkg/utils" ) // Must be at least 1 items long type ExternalAccountIdentification1Code string func (r ExternalAccountIdentification1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalAccountIdentification1Code", 1, 4) } return nil } // Must be at least 1 items long type ExternalCashAccountType1Code string func (r ExternalCashAccountType1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalCashAccountType1Code", 1, 4) } return nil } // Must be at least 1 items long type ExternalClearingSystemIdentification1Code string func (r ExternalClearingSystemIdentification1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 5 { return utils.NewErrTextLengthInvalid("ExternalClearingSystemIdentification1Code", 1, 5) } return nil } // Must be at least 1 items long type ExternalEnquiryRequestType1Code string func (r ExternalEnquiryRequestType1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalEnquiryRequestType1Code", 1, 4) } return nil } // Must be at least 1 items long type ExternalFinancialInstitutionIdentification1Code string func (r ExternalFinancialInstitutionIdentification1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalFinancialInstitutionIdentification1Code", 1, 4) } return nil } // Must be at least 1 items long type ExternalOrganisationIdentification1Code string func (r ExternalOrganisationIdentification1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalOrganisationIdentification1Code", 1, 4) } return nil } // Must be at least 1 items long type ExternalPaymentControlRequestType1Code string func (r ExternalPaymentControlRequestType1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalPaymentControlRequestType1Code", 1, 4) } return nil } // Must be at least 1 items long type ExternalPersonIdentification1Code string func (r ExternalPersonIdentification1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalPersonIdentification1Code", 1, 4) } return nil } // Must be at least 1 items long type ExternalSystemBalanceType1Code string func (r ExternalSystemBalanceType1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalSystemBalanceType1Code", 1, 4) } return nil } // Must be at least 1 items long type ExternalMarketInfrastructure1Code string func (r ExternalMarketInfrastructure1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 3 { return utils.NewErrTextLengthInvalid("ExternalMarketInfrastructure1Code", 1, 3) } return nil } // Must be at least 1 items long type ExternalSystemErrorHandling1Code string func (r ExternalSystemErrorHandling1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalSystemErrorHandling1Code", 1, 4) } return nil } // Must be at least 1 items long type ExternalSystemEventType1Code string func (r ExternalSystemEventType1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalSystemEventType1Code", 1, 4) } return nil } // Must be at least 1 items long type ExternalAgentInstruction1Code string func (r ExternalAgentInstruction1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalAgentInstruction1Code", 1, 4) } return nil } // Must be at least 1 items long type ExternalCashClearingSystem1Code string func (r ExternalCashClearingSystem1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 3 { return utils.NewErrTextLengthInvalid("ExternalCashClearingSystem1Code", 1, 3) } return nil } // Must be at least 1 items long type ExternalCategoryPurpose1Code string func (r ExternalCategoryPurpose1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalCategoryPurpose1Code", 1, 4) } return nil } // Must be at least 1 items long type ExternalCreditorAgentInstruction1Code string func (r ExternalCreditorAgentInstruction1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalCreditorAgentInstruction1Code", 1, 4) } return nil } // Must be at least 1 items long type ExternalDiscountAmountType1Code string func (r ExternalDiscountAmountType1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalDiscountAmountType1Code", 1, 4) } return nil } // Must be at least 1 items long type ExternalDocumentLineType1Code string func (r ExternalDocumentLineType1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalDocumentLineType1Code", 1, 4) } return nil } // Must be at least 1 items long type ExternalGarnishmentType1Code string func (r ExternalGarnishmentType1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalGarnishmentType1Code", 1, 4) } return nil } // Must be at least 1 items long type ExternalLocalInstrument1Code string func (r ExternalLocalInstrument1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 35 { return utils.NewErrTextLengthInvalid("ExternalLocalInstrument1Code", 1, 35) } return nil } // Must be at least 1 items long type ExternalMandateSetupReason1Code string func (r ExternalMandateSetupReason1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalMandateSetupReason1Code", 1, 4) } return nil } // Must be at least 1 items long type ExternalProxyAccountType1Code string func (r ExternalProxyAccountType1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalProxyAccountType1Code", 1, 4) } return nil } // Must be at least 1 items long type ExternalPurpose1Code string func (r ExternalPurpose1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalPurpose1Code", 1, 4) } return nil } // Must be at least 1 items long type ExternalServiceLevel1Code string func (r ExternalServiceLevel1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalServiceLevel1Code", 1, 4) } return nil } // Must be at least 1 items long type ExternalTaxAmountType1Code string func (r ExternalTaxAmountType1Code) Validate() error { if len(string(r)) < 1 || len(string(r)) > 4 { return utils.NewErrTextLengthInvalid("ExternalTaxAmountType1Code", 1, 4) } return nil } // May be one of LETT, MAIL, PHON, FAXX, CELL type PreferredContactMethod1Code string func (r PreferredContactMethod1Code) Validate() error { for _, vv := range []string{ "LETT", "MAIL", "PHON", "FAXX", "CELL", } { if reflect.DeepEqual(string(r), vv) { return nil } } return utils.NewErrValueInvalid("PreferredContactMethod1Code") } // May be one of ALLL, CHNG, MODF, DELD type QueryType2Code string func (r QueryType2Code) Validate() error { for _, vv := range []string{ "ALLL", "CHNG", "MODF", "DELD", } { if reflect.DeepEqual(string(r), vv) { return nil } } return utils.NewErrValueInvalid("QueryType2Code") } // May be one of BILA, MULT type BalanceCounterparty1Code string func (r BalanceCounterparty1Code) Validate() error { for _, vv := range []string{ "BILA", "MULT", } { if reflect.DeepEqual(string(r), vv) { return nil } } return utils.NewErrValueInvalid("BalanceCounterparty1Code") } // May be one of MULT, BILI, MAND, DISC, NELI, INBI, GLBL, DIDB, SPLC, SPLF, TDLC, TDLF, UCDT, ACOL, EXGT type LimitType3Code string func (r LimitType3Code) Validate() error { for _, vv := range []string{ "MULT", "BILI", "MAND", "DISC", "NELI", "INBI", "GLBL", "DIDB", "SPLC", "SPLF", "TDLC", "TDLF", "UCDT", "ACOL", "EXGT", } { if reflect.DeepEqual(string(r), vv) { return nil } } return utils.NewErrValueInvalid("LimitType3Code") } // May be one of BHOL, SMTN, NOOP, RCVR, ADTW type SystemClosureReason1Code string func (r SystemClosureReason1Code) Validate() error { for _, vv := range []string{ "BHOL", "SMTN", "NOOP", "RCVR", "ADTW", } { if reflect.DeepEqual(string(r), vv) { return nil } } return utils.NewErrValueInvalid("SystemClosureReason1Code") } // May be one of SUSP, ACTV, CLSD, CLSG type SystemStatus2Code string func (r SystemStatus2Code) Validate() error { for _, vv := range []string{ "SUSP", "ACTV", "CLSD", "CLSG", } { if reflect.DeepEqual(string(r), vv) { return nil } } return utils.NewErrValueInvalid("SystemStatus2Code") } // May be one of CBS, BCK, BAL, CLS, CTR, CBH, CBP, DPG, DPN, EXP, TCH, LMT, LIQ, DPP, DPH, DPS, STF, TRP, TCS, LOA, LOR, TCP, OND, MGL type PaymentType3Code string func (r PaymentType3Code) Validate() error { for _, vv := range []string{ "CBS", "BCK", "BAL", "CLS", "CTR", "CBH", "CBP", "DPG", "DPN", "EXP", "TCH", "LMT", "LIQ", "DPP", "DPH", "DPS", "STF", "TRP", "TCS", "LOA", "LOR", "TCP", "OND", "MGL", } { if reflect.DeepEqual(string(r), vv) { return nil } } return utils.NewErrValueInvalid("PaymentType3Code") } // May be one of DEBT, CRED, SHAR, SLEV type ChargeBearerType1Code string func (r ChargeBearerType1Code) Validate() error { for _, vv := range []string{ "DEBT", "CRED", "SHAR", "SLEV", } { if reflect.DeepEqual(string(r), vv) { return nil } } return utils.NewErrValueInvalid("ChargeBearerType1Code") } // May be one of RTGS, RTNS, MPNS, BOOK type ClearingChannel2Code string func (r ClearingChannel2Code) Validate() error { for _, vv := range []string{ "RTGS", "RTNS", "MPNS", "BOOK", } { if reflect.DeepEqual(string(r), vv) { return nil } } return utils.NewErrValueInvalid("ClearingChannel2Code") } // May be one of RADM, RPIN, FXDR, DISP, PUOR, SCOR type DocumentType3Code string func (r DocumentType3Code) Validate() error { for _, vv := range []string{ "RADM", "RPIN", "FXDR", "DISP", "PUOR", "SCOR", } { if reflect.DeepEqual(string(r), vv) { return nil } } return utils.NewErrValueInvalid("DocumentType3Code") } // May be one of MSIN, CNFA, DNFA, CINV, CREN, DEBN, HIRI, SBIN, CMCN, SOAC, DISP, BOLD, VCHR, AROI, TSUT, PUOR type DocumentType6Code string func (r DocumentType6Code) Validate() error { for _, vv := range []string{ "MSIN", "CNFA", "DNFA", "CINV", "CREN", "DEBN", "HIRI", "SBIN", "CMCN", "SOAC", "DISP", "BOLD", "VCHR", "AROI", "TSUT", "PUOR", } { if reflect.DeepEqual(string(r), vv) { return nil } } return utils.NewErrValueInvalid("DocumentType6Code") } // May be one of YEAR, MNTH, QURT, MIAN, WEEK, DAIL, ADHO, INDA, FRTN type Frequency6Code string func (r Frequency6Code) Validate() error { for _, vv := range []string{ "YEAR", "MNTH", "QURT", "MIAN", "WEEK", "DAIL", "ADHO", "INDA", "FRTN", } { if reflect.DeepEqual(string(r), vv) { return nil } } return utils.NewErrValueInvalid("Frequency6Code") } // May be one of PHOA, TELA type Instruction4Code string func (r Instruction4Code) Validate() error { for _, vv := range []string{ "PHOA", "TELA", } { if reflect.DeepEqual(string(r), vv) { return nil } } return utils.NewErrValueInvalid("Instruction4Code") } // May be one of CHK, TRF, DD, TRA type PaymentMethod4Code string func (r PaymentMethod4Code) Validate() error { for _, vv := range []string{ "CHK", "TRF", "DD", "TRA", } { if reflect.DeepEqual(string(r), vv) { return nil } } return utils.NewErrValueInvalid("PaymentMethod4Code") } // May be one of HIGH, NORM type Priority2Code string func (r Priority2Code) Validate() error { for _, vv := range []string{ "HIGH", "NORM", } { if reflect.DeepEqual(string(r), vv) { return nil } } return utils.NewErrValueInvalid("Priority2Code") } // May be one of FRST, RCUR, FNAL, OOFF, RPRE type SequenceType3Code string func (r SequenceType3Code) Validate() error { for _, vv := range []string{ "FRST", "RCUR", "FNAL", "OOFF", "RPRE", } { if reflect.DeepEqual(string(r), vv) { return nil } } return utils.NewErrValueInvalid("SequenceType3Code") } // May be one of INDA, INGA, COVE, CLRG type SettlementMethod1Code string func (r SettlementMethod1Code) Validate() error { for _, vv := range []string{ "INDA", "INGA", "COVE", "CLRG", } { if reflect.DeepEqual(string(r), vv) { return nil } } return utils.NewErrValueInvalid("SettlementMethod1Code") } // May be one of MM01, MM02, MM03, MM04, MM05, MM06, MM07, MM08, MM09, MM10, MM11, MM12, QTR1, QTR2, QTR3, QTR4, HLF1, HLF2 type TaxRecordPeriod1Code string func (r TaxRecordPeriod1Code) Validate() error { for _, vv := range []string{ "MM01", "MM02", "MM03", "MM04", "MM05", "MM06", "MM07", "MM08", "MM09", "MM10", "MM11", "MM12", "QTR1", "QTR2", "QTR3", "QTR4", "HLF1", "HLF2", } { if reflect.DeepEqual(string(r), vv) { return nil } } return utils.NewErrValueInvalid("TaxRecordPeriod1Code") }
package main import ( "fmt" "github.com/gashjp/hellogo" ) func main() { fmt.Println(hello_lib.World()) }
package pem import ( "bytes" "crypto/ed25519" "crypto/x509" "encoding/pem" "fmt" "os" "path" "github.com/pkg/errors" "github.com/iotaledger/hive.go/runtime/ioutils" ) // ReadEd25519PrivateKeyFromPEMFile reads an Ed25519 private key from a file with PEM format. func ReadEd25519PrivateKeyFromPEMFile(filepath string) (ed25519.PrivateKey, error) { pemPrivateBlockBytes, err := os.ReadFile(filepath) if err != nil { return nil, fmt.Errorf("unable to read private key: %w", err) } pemPrivateBlock, _ := pem.Decode(pemPrivateBlockBytes) if pemPrivateBlock == nil { return nil, errors.New("unable to decode private key") } cryptoPrivKey, err := x509.ParsePKCS8PrivateKey(pemPrivateBlock.Bytes) if err != nil { return nil, fmt.Errorf("unable to parse private key: %w", err) } privKey, ok := cryptoPrivKey.(ed25519.PrivateKey) if !ok { return nil, errors.New("unable to type assert private key") } return privKey, nil } // WriteEd25519PrivateKeyToPEMFile stores an Ed25519 private key to a file with PEM format. func WriteEd25519PrivateKeyToPEMFile(filepath string, privateKey ed25519.PrivateKey) error { if err := ioutils.CreateDirectory(path.Dir(filepath), 0o700); err != nil { return fmt.Errorf("unable to store private key: %w", err) } pkcs8Bytes, err := x509.MarshalPKCS8PrivateKey(privateKey) if err != nil { return fmt.Errorf("unable to marshal private key: %w", err) } pemPrivateBlock := &pem.Block{ Type: "PRIVATE KEY", Bytes: pkcs8Bytes, } var pemBuffer bytes.Buffer if err := pem.Encode(&pemBuffer, pemPrivateBlock); err != nil { return fmt.Errorf("unable to encode private key: %w", err) } if err := ioutils.WriteToFile(filepath, pemBuffer.Bytes(), 0660); err != nil { return fmt.Errorf("unable to write private key: %w", err) } return nil }
package operations // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "time" "github.com/go-openapi/errors" "github.com/go-openapi/runtime" cr "github.com/go-openapi/runtime/client" "github.com/go-openapi/swag" strfmt "github.com/go-openapi/strfmt" ) // NewCreateHandleDirectUploadParams creates a new CreateHandleDirectUploadParams object // with the default values initialized. func NewCreateHandleDirectUploadParams() *CreateHandleDirectUploadParams { var ( includeAccessEventsDefault bool = bool(false) includeAuditEventsDefault bool = bool(false) includeMonitoringEventsDefault bool = bool(false) includeServerSettingsDefault bool = bool(false) skipUserUpdateDefault bool = bool(false) updateDefault bool = bool(false) ) return &CreateHandleDirectUploadParams{ IncludeAccessEvents: &includeAccessEventsDefault, IncludeAuditEvents: &includeAuditEventsDefault, IncludeMonitoringEvents: &includeMonitoringEventsDefault, IncludeServerSettings: &includeServerSettingsDefault, SkipUserUpdate: &skipUserUpdateDefault, Update: &updateDefault, timeout: cr.DefaultTimeout, } } // NewCreateHandleDirectUploadParamsWithTimeout creates a new CreateHandleDirectUploadParams object // with the default values initialized, and the ability to set a timeout on a request func NewCreateHandleDirectUploadParamsWithTimeout(timeout time.Duration) *CreateHandleDirectUploadParams { var ( includeAccessEventsDefault bool = bool(false) includeAuditEventsDefault bool = bool(false) includeMonitoringEventsDefault bool = bool(false) includeServerSettingsDefault bool = bool(false) skipUserUpdateDefault bool = bool(false) updateDefault bool = bool(false) ) return &CreateHandleDirectUploadParams{ IncludeAccessEvents: &includeAccessEventsDefault, IncludeAuditEvents: &includeAuditEventsDefault, IncludeMonitoringEvents: &includeMonitoringEventsDefault, IncludeServerSettings: &includeServerSettingsDefault, SkipUserUpdate: &skipUserUpdateDefault, Update: &updateDefault, timeout: timeout, } } /*CreateHandleDirectUploadParams contains all the parameters to send to the API endpoint for the create handle direct upload operation typically these are written to a http.Request */ type CreateHandleDirectUploadParams struct { /*IncludeAccessEvents*/ IncludeAccessEvents *bool /*IncludeAuditEvents*/ IncludeAuditEvents *bool /*IncludeMonitoringEvents*/ IncludeMonitoringEvents *bool /*IncludeServerSettings*/ IncludeServerSettings *bool /*SkipUserUpdate*/ SkipUserUpdate *bool /*Update*/ Update *bool timeout time.Duration } // WithIncludeAccessEvents adds the includeAccessEvents to the create handle direct upload params func (o *CreateHandleDirectUploadParams) WithIncludeAccessEvents(IncludeAccessEvents *bool) *CreateHandleDirectUploadParams { o.IncludeAccessEvents = IncludeAccessEvents return o } // WithIncludeAuditEvents adds the includeAuditEvents to the create handle direct upload params func (o *CreateHandleDirectUploadParams) WithIncludeAuditEvents(IncludeAuditEvents *bool) *CreateHandleDirectUploadParams { o.IncludeAuditEvents = IncludeAuditEvents return o } // WithIncludeMonitoringEvents adds the includeMonitoringEvents to the create handle direct upload params func (o *CreateHandleDirectUploadParams) WithIncludeMonitoringEvents(IncludeMonitoringEvents *bool) *CreateHandleDirectUploadParams { o.IncludeMonitoringEvents = IncludeMonitoringEvents return o } // WithIncludeServerSettings adds the includeServerSettings to the create handle direct upload params func (o *CreateHandleDirectUploadParams) WithIncludeServerSettings(IncludeServerSettings *bool) *CreateHandleDirectUploadParams { o.IncludeServerSettings = IncludeServerSettings return o } // WithSkipUserUpdate adds the skipUserUpdate to the create handle direct upload params func (o *CreateHandleDirectUploadParams) WithSkipUserUpdate(SkipUserUpdate *bool) *CreateHandleDirectUploadParams { o.SkipUserUpdate = SkipUserUpdate return o } // WithUpdate adds the update to the create handle direct upload params func (o *CreateHandleDirectUploadParams) WithUpdate(Update *bool) *CreateHandleDirectUploadParams { o.Update = Update return o } // WriteToRequest writes these params to a swagger request func (o *CreateHandleDirectUploadParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error { r.SetTimeout(o.timeout) var res []error if o.IncludeAccessEvents != nil { // query param includeAccessEvents var qrIncludeAccessEvents bool if o.IncludeAccessEvents != nil { qrIncludeAccessEvents = *o.IncludeAccessEvents } qIncludeAccessEvents := swag.FormatBool(qrIncludeAccessEvents) if qIncludeAccessEvents != "" { if err := r.SetQueryParam("includeAccessEvents", qIncludeAccessEvents); err != nil { return err } } } if o.IncludeAuditEvents != nil { // query param includeAuditEvents var qrIncludeAuditEvents bool if o.IncludeAuditEvents != nil { qrIncludeAuditEvents = *o.IncludeAuditEvents } qIncludeAuditEvents := swag.FormatBool(qrIncludeAuditEvents) if qIncludeAuditEvents != "" { if err := r.SetQueryParam("includeAuditEvents", qIncludeAuditEvents); err != nil { return err } } } if o.IncludeMonitoringEvents != nil { // query param includeMonitoringEvents var qrIncludeMonitoringEvents bool if o.IncludeMonitoringEvents != nil { qrIncludeMonitoringEvents = *o.IncludeMonitoringEvents } qIncludeMonitoringEvents := swag.FormatBool(qrIncludeMonitoringEvents) if qIncludeMonitoringEvents != "" { if err := r.SetQueryParam("includeMonitoringEvents", qIncludeMonitoringEvents); err != nil { return err } } } if o.IncludeServerSettings != nil { // query param includeServerSettings var qrIncludeServerSettings bool if o.IncludeServerSettings != nil { qrIncludeServerSettings = *o.IncludeServerSettings } qIncludeServerSettings := swag.FormatBool(qrIncludeServerSettings) if qIncludeServerSettings != "" { if err := r.SetQueryParam("includeServerSettings", qIncludeServerSettings); err != nil { return err } } } if o.SkipUserUpdate != nil { // query param skipUserUpdate var qrSkipUserUpdate bool if o.SkipUserUpdate != nil { qrSkipUserUpdate = *o.SkipUserUpdate } qSkipUserUpdate := swag.FormatBool(qrSkipUserUpdate) if qSkipUserUpdate != "" { if err := r.SetQueryParam("skipUserUpdate", qSkipUserUpdate); err != nil { return err } } } if o.Update != nil { // query param update var qrUpdate bool if o.Update != nil { qrUpdate = *o.Update } qUpdate := swag.FormatBool(qrUpdate) if qUpdate != "" { if err := r.SetQueryParam("update", qUpdate); err != nil { return err } } } if len(res) > 0 { return errors.CompositeValidationError(res...) } return nil }
package cryptdo import ( "crypto/rand" "errors" "fmt" "io" "github.com/golang/protobuf/proto" "code.xoeb.us/cryptdo/cryptdo/cryptdopb" ) const currentVersion = 1 // ErrEmptyMessage is caused by trying to decrypt and empty function. var ErrEmptyMessage = errors.New("cryptdo: empty message") // Encrypt takes a plaintext blob and encrypts it with a key based on the // passphrase provided. The byte slice returned is suitable for passing in to // the Decrypt function with the same passphrase in order to retrieve the data. // // The output format and internal details of the cryptography performed is // documented in the associated protocol buffers file. // // Due to the use of authenticated encryption we need to read the entire // plaintext into memory. Therefore it is recommended to only use this for // smaller plaintexts. func Encrypt(plaintext []byte, passphrase string) ([]byte, error) { // Encryption always uses the current cryptography version. Which (unless // something has gone horrifically wrong) will always be found. v, _ := lookup(currentVersion) message, err := v.encrypt(plaintext, passphrase) if err != nil { return nil, err } return proto.Marshal(message) } // Decrypt decrypts a piece of ciphertext which was encrypted with the Encrypt // function. The original plaintext is returned if no error occured during the // decryption. It supports passing in ciphertext which was made with previous // versions of the library. // // The output format and internal details of the cryptography performed is // documented in the associated protocol buffers file. // // Due to the use of authenticated encryption we need to read the entire // ciphertext into memory. Therefore it is recommended to only use this for // smaller plaintexts. func Decrypt(ciphertext []byte, passphrase string) ([]byte, error) { if len(ciphertext) == 0 { return nil, ErrEmptyMessage } var message cryptdopb.Message if err := proto.Unmarshal(ciphertext, &message); err != nil { return nil, err } v, found := lookup(message.GetVersion()) if !found { return nil, &UnknownVersionError{message.GetVersion()} } return v.decrypt(&message, passphrase) } // randomBytes returns a byte slice (of the requested length) filled with // cryptographically secure random bytes. func randomBytes(count int) ([]byte, error) { bs := make([]byte, count) if _, err := io.ReadFull(rand.Reader, bs); err != nil { return nil, err } return bs, nil } // InvalidNonceError is caused by a mismatch between the expected nonce length // from the encryption algorithm and the actual nonce length provided in the // message. type InvalidNonceError struct { expected int actual int } func (i *InvalidNonceError) Error() string { return fmt.Sprintf("cryptdo: message nonce has incorrect size (expected %d, got: %d)", i.expected, i.actual) } // UnknownVersionError is caused when a message is too new for the executed // version to understand. This will often be caused by a mismatch between two // cryptography versions operating on the same data. type UnknownVersionError struct { version int32 } func (u *UnknownVersionError) Error() string { return fmt.Sprintf("cryptdo: message has incompatible version (expected %d or below, got: %d)", currentVersion, u.version) }
package schema // LoginParam 登录参数 type LoginParam struct { UserName string `json:"username" binding:"required" swaggo:"true,用户名"` Password string `json:"password" binding:"required" swaggo:"true,密码(md5加密)"` } // User 用户对象 type UserParam struct { UserId string `json:"user_id" swaggo:"false,记录ID"` UserName string `json:"username" binding:"required" swaggo:"true,用户名"` Nickname string `json:"nickname" binding:"required" swaggo:"true,真实姓名"` Password string `json:"password" swaggo:"false,密码"` Phone string `json:"phone" swaggo:"false,手机号"` Email string `json:"email" swaggo:"true,邮箱"` Status int64 `json:"status" swaggo:"true,用户状态(1:启用 2:停用)"` }
package app import ( "log" "path/filepath" "strings" "github.com/spf13/viper" ) // Initialize config.toml config. Fatal if not exist. func InitConfig(f string) { name, dir, ext := filepath.Base(f), filepath.Dir(f), filepath.Ext(f) name = strings.TrimSuffix(name, ext) viper.SetConfigName(name) viper.AddConfigPath(dir) err := viper.ReadInConfig() if err != nil { // Handle errors reading the config file log.Fatalf("Fatal error config file: %s \n", err) } }
package entity type StateType string const ( Regular StateType = "Regular" // Обычный (значение по умолчанию) Successful StateType = "Successful" // Финальный положительный Unsuccessful StateType = "Unsuccessful" // Финальный отрицательный ) // State Статус договра type State struct { Meta *Meta `json:"meta,omitempty"` // Метаданные Статуса (Только для чтения) Id string `json:"id,omitempty"` // ID Статуса (Только для чтения) AccountId string `json:"accountId,omitempty"` // ID учетной записи (Только для чтения) Name string `json:"name,omitempty"` // Наименование Статуса (Необходимое при создании) Color int `json:"color,omitempty"` // Цвет Статуса (Необходимое при создании) StateType StateType `json:"stateType,omitempty"` // Тип Статуса (Необходимое при создании) !! Дефолтное значение - Regular EntityType string `json:"entityType,omitempty"` // Тип сущности, к которой относится Статус (ключевое слово в рамках JSON API) (Только для чтения) }
// +build windows package ConsoleColor import ( "fmt" ) func Print(c int, a ...interface{}) (int, error) { if GetOS() == OS_WINDOWS { defer locker.Unlock() defer ResetColor() locker.Lock() fc := GetForegroundColor(OS_WINDOWS, c) Textbackground(fc) } return fmt.Print(a...) } func Printf(c int, format string, a ...interface{}) (int, error) { if GetOS() == OS_WINDOWS { defer locker.Unlock() defer ResetColor() locker.Lock() fc := GetForegroundColor(OS_WINDOWS, c) Textbackground(fc) } return fmt.Printf(format, a...) } func Println(c int, a ...interface{}) (int, error) { if GetOS() == OS_WINDOWS { defer locker.Unlock() defer ResetColor() locker.Lock() fc := GetForegroundColor(OS_WINDOWS, c) Textbackground(fc) } return fmt.Println(a...) } func PrintXY(c int, x, y int16, a ...interface{}) (int, error) { if GetOS() == OS_WINDOWS { defer locker.Unlock() defer ResetColor() locker.Lock() fc := GetForegroundColor(OS_WINDOWS, c) Textbackground(fc) Gotoxy(x, y) } return fmt.Print(a...) } func PrintfXY(c int, x, y int16, format string, a ...interface{}) (int, error) { if GetOS() == OS_WINDOWS { defer locker.Unlock() defer ResetColor() locker.Lock() fc := GetForegroundColor(OS_WINDOWS, c) Textbackground(fc) Gotoxy(x, y) } return fmt.Printf(format, a...) } func PrintlnXY(c int, x, y int16, a ...interface{}) (int, error) { if GetOS() == OS_WINDOWS { defer locker.Unlock() defer ResetColor() locker.Lock() fc := GetForegroundColor(OS_WINDOWS, c) Textbackground(fc) Gotoxy(x, y) } return fmt.Println(a...) }
package gio import ( exp "github.com/jholowczak/guacamole_client_go" "github.com/jholowczak/guacamole_client_go/gprotocol" ) // GuacamoleWriter Provides abstract and raw character write access // to a stream of Guacamole instructions. type GuacamoleWriter interface { // WriteEx function // * Writes a portion of the given array of characters to the Guacamole // * instruction stream. The portion must contain only complete Guacamole // * instructions. // * // * @param chunk An array of characters containing Guacamole instructions. // * @param off The start offset of the portion of the array to write. // * @param len The length of the portion of the array to write. // * @throws GuacamoleException If an error occurred while writing the // * portion of the array specified. Write(chunk []byte, off, len int) (err exp.ExceptionInterface) // WriteAll // * Writes the entire given array of characters to the Guacamole instruction // * stream. The array must consist only of complete Guacamole instructions. // * // * @param chunk An array of characters consisting only of complete // * Guacamole instructions. // * @throws GuacamoleException If an error occurred while writing the // * the specified array. WriteAll(chunk []byte) (err exp.ExceptionInterface) // WriteInstruction function // * Writes the given fully parsed instruction to the Guacamole instruction // * stream. // * // * @param instruction The Guacamole instruction to write. // * @throws GuacamoleException If an error occurred while writing the // * instruction. WriteInstruction(instruction gprotocol.GuacamoleInstruction) (err exp.ExceptionInterface) }
package raws // import "github.com/BenLubar/dfide/gui/raws" import ( "bytes" "log" "github.com/BenLubar/dfide/gui" "github.com/BenLubar/dfide/raws" ) type visualEditor interface { control() gui.Control setName(string) OnChange(func([]byte)) } type baseVisualEditor struct { listeners []func([]byte) name string } func (e *baseVisualEditor) OnChange(f func([]byte)) { e.listeners = append(e.listeners, f) } func (e *baseVisualEditor) setName(name string) { e.name = name } func (e *baseVisualEditor) onChange(objectType string, write func(*raws.Writer) error) { if len(e.listeners) == 0 { return } // TODO: handle errors var buf bytes.Buffer w, err := raws.NewWriter(&buf, e.name, objectType) if err != nil { log.Println(err) return } err = write(w) if err != nil { log.Println(err) return } err = w.Flush() if err != nil { log.Println(err) return } for _, l := range e.listeners { l(buf.Bytes()) } } type errorEditor struct { baseVisualEditor err error } func (e *errorEditor) control() gui.Control { return gui.NewLabel("Error: " + e.err.Error()) }
package warning import ( "github.com/go-gorp/gorp" "github.com/ovh/cds/engine/api/application" "github.com/ovh/cds/engine/api/environment" "github.com/ovh/cds/engine/api/pipeline" "github.com/ovh/cds/engine/api/workflow" "github.com/ovh/cds/sdk/log" ) func variableIsUsed(db gorp.SqlExecutor, key string, varName string) ([]workflow.CountVarInWorkflowData, []string, []string, []string, []pipeline.CountInPipelineData) { ws, errWS := workflow.CountVariableInWorkflow(db, key, varName) if errWS != nil { log.Warning("manageAddVariableEvent> Unable to search variable in workflow: %v", errWS) } // Check if used in environment envsName, errE := environment.CountEnvironmentByVarValue(db, key, varName) if errE != nil { log.Warning("manageAddVariableEvent> Unable to search variable in environments: %v", errE) } // Check if used on application appsName, errA := application.CountInVarValue(db, key, varName) if errA != nil { log.Warning("manageAddVariableEvent> Unable to search variable in applications: %v", errA) } // Check if used on pipeline parameters pipsName, errP := pipeline.CountInParamValue(db, key, varName) if errP != nil { log.Warning("manageAddVariableEvent> Unable to search variable in pipeline parameters: %s", errP) } // Check if used on pipeline jobs pipsJob, errP2 := pipeline.CountInPipelines(db, key, varName) if errP2 != nil { log.Warning("manageAddVariableEvent> Unable to search variable in pipelines: %s", errP2) } return ws, envsName, appsName, pipsName, pipsJob }
package main import ( "freshdesk/server" "net/http" "github.com/joho/godotenv" "github.com/sirupsen/logrus" ) func init() { // loads values from .env into the system /* Expected env variables: API_URL=<https://domain.freshdesk.com/api/v2/> API_KEY=<key> API_LOC=<timezone, eg America/New_York> ALLOW_IP=127.0.0.1,A.B.C.D,W.X.Y.Z */ if err := godotenv.Load(); err != nil { logrus.Fatalln("No .env file found") } } func main() { s := server.New() logrus.Fatalln(http.ListenAndServe(":8080", s.Router)) }
// Copyright 2020 The ChromiumOS Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package crostini import ( "context" "os" "time" "chromiumos/tast/common/testexec" "chromiumos/tast/errors" "chromiumos/tast/local/chrome/uiauto" "chromiumos/tast/local/chrome/uiauto/filesapp" "chromiumos/tast/local/crostini" "chromiumos/tast/local/cryptohome" "chromiumos/tast/local/input" "chromiumos/tast/local/vm" "chromiumos/tast/testing" ) func init() { testing.AddTest(&testing.Test{ Func: HomeDirectoryRenameFile, LacrosStatus: testing.LacrosVariantUnneeded, Desc: "Test renaming a file in Linux files and container using a pre-built crostini image", Contacts: []string{"clumptini+oncall@google.com"}, Attr: []string{"group:mainline"}, SoftwareDeps: []string{"chrome", "vm_host"}, Params: []testing.Param{ // Parameters generated by params_test.go. DO NOT EDIT. { Name: "buster_stable", ExtraSoftwareDeps: []string{"dlc"}, ExtraHardwareDeps: crostini.CrostiniStable, Fixture: "crostiniBuster", Timeout: 7 * time.Minute, }, { Name: "buster_unstable", ExtraAttr: []string{"informational"}, ExtraSoftwareDeps: []string{"dlc"}, ExtraHardwareDeps: crostini.CrostiniUnstable, Fixture: "crostiniBuster", Timeout: 7 * time.Minute, }, { Name: "bullseye_stable", ExtraSoftwareDeps: []string{"dlc"}, ExtraHardwareDeps: crostini.CrostiniStable, Fixture: "crostiniBullseye", Timeout: 7 * time.Minute, }, { Name: "bullseye_unstable", ExtraAttr: []string{"informational"}, ExtraSoftwareDeps: []string{"dlc"}, ExtraHardwareDeps: crostini.CrostiniUnstable, Fixture: "crostiniBullseye", Timeout: 7 * time.Minute, }, }, }) } func HomeDirectoryRenameFile(ctx context.Context, s *testing.State) { tconn := s.FixtValue().(crostini.FixtureData).Tconn cont := s.FixtValue().(crostini.FixtureData).Cont cr := s.FixtValue().(crostini.FixtureData).Chrome kb := s.FixtValue().(crostini.FixtureData).KB // Open Files app. filesApp, err := filesapp.Launch(ctx, tconn) if err != nil { s.Fatal("Failed to open Files app: ", err) } const ( fileName = "testRename.txt" newFileName = "someotherdgsjtey" lastFileName = "lastFileName.txt" ) // Create a file in container. if err := cont.Command(ctx, "touch", fileName).Run(testexec.DumpLogOnError); err != nil { s.Fatal("Failed to create a file in the container: ", err) } if err := testRenameFileFromLinuxFiles(ctx, filesApp, cont, kb, fileName, newFileName); err != nil { s.Fatal("Failed to test Renaming files in Linux files: ", err) } ownerID, err := cryptohome.UserHash(ctx, cr.NormalizedUser()) if err != nil { s.Fatal("Failed to get user hash: ", err) } folderPath := "/media/fuse/crostini_" + ownerID + "_termina_penguin/" if err := testRenameFileFromContainer(ctx, cont, folderPath, newFileName, lastFileName); err != nil { s.Fatal("Failed to test Renaming files in container: ", err) } } // testRenameFileFromLinuxFiles first renames a file in Linux file then checks it is also renamed in container. func testRenameFileFromLinuxFiles(ctx context.Context, filesApp *filesapp.FilesApp, cont *vm.Container, kb *input.KeyboardEventWriter, fileName, newFileName string) error { // Rename a file in Linux files. if err := uiauto.Combine("Rename file", filesApp.OpenLinuxFiles(), filesApp.RenameFile(kb, fileName, newFileName))(ctx); err != nil { return errors.Wrapf(err, "failed to rename file %s in Linux files", fileName) } // Check the old file does not exist in container. return testing.Poll(ctx, func(ctx context.Context) error { if err := cont.CheckFileDoesNotExistInDir(ctx, ".", fileName); err != nil { return err } return cont.CheckFilesExistInDir(ctx, ".", newFileName) }, &testing.PollOptions{Timeout: 5 * time.Second}) } // testRenameFileFromContainer first renames a file in container then checks it is also renamed in Linux files. func testRenameFileFromContainer(ctx context.Context, cont *vm.Container, folderPath, fileName, newFileName string) error { // Rename a file in container. if err := cont.Command(ctx, "mv", fileName, newFileName).Run(testexec.DumpLogOnError); err != nil { return errors.Wrapf(err, "failed to rename file %s in container", fileName) } if err := testing.Poll(ctx, func(ctx context.Context) error { // The old file should not exist in Linux files. if _, err := os.Stat(folderPath + fileName); !os.IsNotExist(err) { return errors.Wrapf(err, "file %s still exists", folderPath+fileName) } // The new file should exist in Linux files. if _, err := os.Stat(folderPath + newFileName); err != nil { return errors.Wrapf(err, "file %s does not exist", folderPath+newFileName) } return nil }, &testing.PollOptions{Timeout: 5 * time.Second}); err != nil { return err } return nil }
// Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"). You may not // use this file except in compliance with the License. A copy of the // License is located at // // http://aws.amazon.com/apache2.0/ // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or // implied. See the License for the specific language governing // permissions and limitations under the License. // Package configurepackage implements the ConfigurePackage plugin. package configurepackage import ( "testing" "github.com/aws/amazon-ssm-agent/agent/appconfig" "github.com/aws/amazon-ssm-agent/agent/log" "github.com/stretchr/testify/assert" ) func TestGetS3Location(t *testing.T) { pluginInformation := createStubPluginInputInstall() util := NewUtil(createStubInstanceContext(), "") packageLocation := "https://s3.us-west-2.amazonaws.com/amazon-ssm-packages-us-west-2/Packages/PVDriver/" + appconfig.PackagePlatform + "/amd64/1.0.0/PVDriver.zip" result := util.GetS3Location(pluginInformation.Name, pluginInformation.Version) assert.Equal(t, packageLocation, result) } func TestGetS3Location_Bjs(t *testing.T) { pluginInformation := createStubPluginInputInstall() util := NewUtil(createStubInstanceContextBjs(), "") packageLocation := "https://s3.cn-north-1.amazonaws.com.cn/amazon-ssm-packages-cn-north-1/Packages/PVDriver/" + appconfig.PackagePlatform + "/amd64/1.0.0/PVDriver.zip" result := util.GetS3Location(pluginInformation.Name, pluginInformation.Version) assert.Equal(t, packageLocation, result) } func TestGetLatestVersion_NumericSort(t *testing.T) { versions := [3]string{"1.0.0", "2.0.0", "10.0.0"} latest := getLatestVersion(versions[:], "") assert.Equal(t, "10.0.0", latest) } func TestGetLatestVersion_OnlyOneValid(t *testing.T) { versions := [3]string{"0.0.0", "1.0", "1.0.0.0"} latest := getLatestVersion(versions[:], "") assert.Equal(t, "0.0.0", latest) } func TestGetLatestVersion_NoneValid(t *testing.T) { versions := [3]string{"Foo", "1.0", "1.0.0.0"} latest := getLatestVersion(versions[:], "") assert.Equal(t, "", latest) } func TestGetLatestVersion_None(t *testing.T) { versions := make([]string, 0) latest := getLatestVersion(versions[:], "") assert.Equal(t, "", latest) } func createStubPluginInputInstall() *ConfigurePackagePluginInput { input := ConfigurePackagePluginInput{} input.Version = "1.0.0" input.Name = "PVDriver" input.Action = "Install" return &input } func createStubPluginInputInstallLatest() *ConfigurePackagePluginInput { input := ConfigurePackagePluginInput{} input.Name = "PVDriver" input.Action = "Install" return &input } func createStubPluginInputUninstall() *ConfigurePackagePluginInput { input := ConfigurePackagePluginInput{} input.Version = "1.0.0" input.Name = "PVDriver" input.Action = "Uninstall" return &input } func createStubPluginInputUninstallLatest() *ConfigurePackagePluginInput { input := ConfigurePackagePluginInput{} input.Name = "PVDriver" input.Action = "Uninstall" return &input } func createStubInvalidPluginInput() *ConfigurePackagePluginInput { input := ConfigurePackagePluginInput{} input.Version = "7.2" input.Name = "" input.Action = "InvalidAction" return &input } func createStubPluginInputFoo() *ConfigurePackagePluginInput { input := ConfigurePackagePluginInput{} input.Version = "1.0.0" input.Name = "PVDriver" input.Action = "Foo" return &input } type mockConfigureUtility struct { packageFolder string createPackageFolderError error latestVersion string getLatestVersionError error s3Location string } func (u *mockConfigureUtility) GetLatestVersion(log log.T, name string) (latestVersion string, err error) { return u.latestVersion, u.getLatestVersionError } func (u *mockConfigureUtility) GetS3Location(packageName string, version string) (s3Location string) { return u.s3Location }
package mediators import ( "app/models" "github.com/gin-gonic/gin" . "app/helpers" ) type postsMediator struct { Posts *models.Posts Context *gin.Context Collection []*models.Post } func (self *postsMediator) Find() (*CR, error) { var err error self.Collection, err = self.Posts.Find() return &CR{self}, err } func (self *postsMediator) ToJSON( code int, obj interface{} ) { self.Context.JSON( code, obj ) } func (self *postsMediator) GetCollection() interface{} { return self.Collection } // Constructor func Posts(context *gin.Context) *postsMediator { return &postsMediator{Posts: &models.Posts{}, Context: context} }
package orb import ( "fmt" ) // Equal returns if the two geometrires are equal. func Equal(g1, g2 Geometry) bool { if g1 == nil || g2 == nil { return g1 == g2 } if g1.GeoJSONType() != g2.GeoJSONType() { return false } switch g1 := g1.(type) { case Point: return g1.Equal(g2.(Point)) case MultiPoint: return g1.Equal(g2.(MultiPoint)) case LineString: return g1.Equal(g2.(LineString)) case MultiLineString: return g1.Equal(g2.(MultiLineString)) case Ring: g2, ok := g2.(Ring) if !ok { return false } return g1.Equal(g2) case Polygon: g2, ok := g2.(Polygon) if !ok { return false } return g1.Equal(g2) case MultiPolygon: return g1.Equal(g2.(MultiPolygon)) case Collection: return g1.Equal(g2.(Collection)) case Bound: g2, ok := g2.(Bound) if !ok { return false } return g1.Equal(g2) } panic(fmt.Sprintf("geometry type not supported: %T", g1)) }
package matcher import ( "context" "fmt" "os/exec" "regexp" "strings" "time" log "github.com/sirupsen/logrus" "gitlab.com/yakshaving.art/chief-alert-executor/internal" "gitlab.com/yakshaving.art/chief-alert-executor/internal/metrics" ) // New creates a new Matcher with the provided configuration. // // May return an error if we fail to load the configuration func New(cnf internal.Configuration) (Matcher, error) { am := make([]*oneAlertMatcher, 0) for _, m := range cnf.Matchers { matcher, err := newAlertMatcher(m) if err != nil { return nil, err } am = append(am, matcher) } return matcherMap{ matchers: am, }, nil } // Matcher is the interface of the whatever loads the configuration and then is // used to match an alert to an executor type Matcher interface { Match(internal.AlertGroup) Match } type oneAlertMatcher struct { matcherName string labels map[string]*regexp.Regexp annotations map[string]*regexp.Regexp template *internal.MessageTemplate cmd string args []string timeout int } func (m oneAlertMatcher) Match(ag internal.AlertGroup) bool { for name, regex := range m.annotations { value, ok := ag.CommonAnnotations[name] if !ok { log.WithFields(log.Fields{ "alertgroup": ag, "annotation": name, "matcher": m.matcherName, }).Debugf("alert does not contain expected annotation") return false } if !regex.MatchString(value) { log.WithField("alertgroup", ag). WithField("annotation", name). WithField("value", value). WithField("matcher", m.matcherName). Debugf("alert does not match expected regex for annotation") return false } } for name, regex := range m.labels { value, ok := ag.CommonLabels[name] if !ok { log.WithFields(log.Fields{ "alertgroup": ag, "label": name, "matcher": m.matcherName, }).Debugf("alert does not contain expected label") return false } if !regex.MatchString(value) { log.WithField("alertgroup", ag). WithField("label", name). WithField("value", value). WithField("matcher", m.matcherName). Debugf("alert does not match expected regex for label") return false } } log.WithField("alertgroup", ag). WithField("matcher", m). Debugf("alert matched") return true } type matcherMap struct { matchers []*oneAlertMatcher } func (m matcherMap) Match(ag internal.AlertGroup) Match { for _, matcher := range m.matchers { if matcher.Match(ag) { metrics.AlertsMatchedToCommand. WithLabelValues(matcher.matcherName).Inc() log.WithFields(log.Fields{ "alertgroup": ag, "matcher": matcher}). Debugf("matched alergroup") return cmdExecutor{ template: matcher.template, matcherName: matcher.matcherName, cmd: matcher.cmd, args: matcher.args, timeout: time.Duration(matcher.timeout) * time.Second, } } } metrics.AlertsMissed.Inc() return nil } // Match represents a unit of work type Match interface { Name() string Template() *internal.MessageTemplate Execute() (string, error) } type cmdExecutor struct { template *internal.MessageTemplate matcherName string cmd string args []string timeout time.Duration } func (c cmdExecutor) Name() string { return c.matcherName } func (c cmdExecutor) Template() *internal.MessageTemplate { return c.template } func (c cmdExecutor) Execute() (string, error) { ctx, cancel := context.WithTimeout(context.Background(), c.timeout) defer cancel() startTime := time.Now() cmd := exec.CommandContext(ctx, c.cmd, c.args...) b, err := cmd.CombinedOutput() executionTime := time.Now().Sub(startTime) output := fmt.Sprintf("%s", b) logger := log.WithField("output", output). WithField("cmd", c.cmd). WithField("matcher", c.matcherName). WithField("args", strings.Join(c.args, ",")) if err != nil { logger.WithField("error", err). Error("Command failed execution") metrics.CommandsExecuted.WithLabelValues(c.matcherName, "false").Inc() metrics.CommandExecutionSeconds.WithLabelValues(c.matcherName, "false").Observe(executionTime.Seconds()) return output, err } logger.Debug("Command executed correctly") metrics.CommandsExecuted.WithLabelValues(c.matcherName, "true").Inc() metrics.CommandExecutionSeconds.WithLabelValues(c.matcherName, "true").Observe(executionTime.Seconds()) return output, nil } func newAlertMatcher(mc internal.MatcherConfiguration) (*oneAlertMatcher, error) { if strings.TrimSpace(mc.Name) == "" { return nil, fmt.Errorf("Metric name can't be empty in %#v", mc) } if strings.TrimSpace(mc.Command) == "" { return nil, fmt.Errorf("Command can't be empty in %#v", mc) } labels := make(map[string]*regexp.Regexp) for l, r := range mc.Labels { reg, err := regexp.Compile(r) if err != nil { return nil, fmt.Errorf("Failed to compile regex for label %s (%s): %s", l, r, err) } labels[l] = reg } annotations := make(map[string]*regexp.Regexp) for a, r := range mc.Annotations { reg, err := regexp.Compile(r) if err != nil { return nil, fmt.Errorf("Failed to compile regex for annotation %s (%s): %s", a, r, err) } annotations[a] = reg } timeout := mc.Timeout if timeout == 0 { timeout = 30 // By default, 30 seconds of command execution timeout } return &oneAlertMatcher{ labels: labels, annotations: annotations, matcherName: strings.TrimSpace(mc.Name), template: mc.Template, cmd: mc.Command, args: mc.Arguments, timeout: timeout, }, nil }
package test import ( "encoding/json" "net/http" "testing" "time" "github.com/HDIOES/su4na-API-main/integration" "github.com/HDIOES/su4na-API-main/models" "github.com/HDIOES/su4na-API-main/rest" "github.com/HDIOES/su4na-API-main/rest/util" "github.com/gorilla/mux" "github.com/pkg/errors" "github.com/stretchr/testify/assert" _ "github.com/lib/pq" ) func TestSearchAnimes_pagingSuccess(t *testing.T) { diContainer.Invoke(func(configuration *util.Configuration, job *integration.ShikimoriJob, newDao *models.NewDAO, animeDao *models.AnimeDAO, genreDao *models.GenreDAO, studioDao *models.StudioDAO, router *mux.Router) { if err := clearDb(newDao, animeDao, genreDao, studioDao); err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } prepareTestData(t, animeDao, genreDao, studioDao) //create request request, err := http.NewRequest("GET", "/api/animes/search?limit=2&offset=2&order=id", nil) if err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } recorder := executeRequest(request, router) //asserts abortIfFail(t, assert.Equal(t, 200, recorder.Code)) expectedAnimesRos := make([]rest.AnimeRO, 0, 2) animeExternalID2 := "2" animeName2 := "One Punch Man" russianAnimeName2 := "Один Удар Человек" animeURL2 := "/url.jpg" animePosterURL2 := "/url.jpg" animePosterURLRO2 := configuration.ShikimoriURL + animePosterURL2 animeURLRO2 := configuration.ShikimoriURL + animeURL2 animeRO2 := rest.AnimeRO{ ShikiID: animeExternalID2, Name: &animeName2, RussuanName: &russianAnimeName2, URL: &animeURLRO2, PosterURL: &animePosterURLRO2, } expectedAnimesRos = append(expectedAnimesRos, animeRO2) animeExternalID3 := "3" animeName3 := "One Punch Man" russianAnimeName3 := "Один Удар Человек" animeURL3 := "/url.jpg" animePosterURL3 := "/url.jpg" animePosterURLRO3 := configuration.ShikimoriURL + animePosterURL3 animeURLRO3 := configuration.ShikimoriURL + animeURL3 animeRO3 := rest.AnimeRO{ ShikiID: animeExternalID3, Name: &animeName3, RussuanName: &russianAnimeName3, URL: &animeURLRO3, PosterURL: &animePosterURLRO3, } expectedAnimesRos = append(expectedAnimesRos, animeRO3) //get actual data actualJSONResponseBody := recorder.Body.String() expectedJSONResponseBodyBytes, marshalErr := json.Marshal(&expectedAnimesRos) if marshalErr != nil { markAsFailAndAbortNow(t, errors.Wrap(marshalErr, "")) } abortIfFail(t, assert.JSONEq(t, string(expectedJSONResponseBodyBytes), actualJSONResponseBody)) }) } func TestSearchAnimes_byStatusSuccess(t *testing.T) { diContainer.Invoke(func(configuration *util.Configuration, job *integration.ShikimoriJob, newDao *models.NewDAO, animeDao *models.AnimeDAO, genreDao *models.GenreDAO, studioDao *models.StudioDAO, router *mux.Router) { if err := clearDb(newDao, animeDao, genreDao, studioDao); err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } prepareTestData(t, animeDao, genreDao, studioDao) //create request request, err := http.NewRequest("GET", "/api/animes/search?status=anons&order=id", nil) if err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } recorder := executeRequest(request, router) //asserts abortIfFail(t, assert.Equal(t, 200, recorder.Code)) expectedAnimesRos := make([]rest.AnimeRO, 0, 1) animeExternalID8 := "8" animeName8 := "One Punch Man" russianAnimeName8 := "Один Удар Человек" animeURL8 := "/url.jpg" animePosterURL8 := "/url.jpg" animePosterURLRO8 := configuration.ShikimoriURL + animePosterURL8 animeURLRO8 := configuration.ShikimoriURL + animeURL8 animeRO8 := rest.AnimeRO{ ShikiID: animeExternalID8, Name: &animeName8, RussuanName: &russianAnimeName8, URL: &animeURLRO8, PosterURL: &animePosterURLRO8, } expectedAnimesRos = append(expectedAnimesRos, animeRO8) //get actual data actualJSONResponseBody := recorder.Body.String() expectedJSONResponseBodyBytes, marshalErr := json.Marshal(&expectedAnimesRos) if marshalErr != nil { markAsFailAndAbortNow(t, errors.Wrap(marshalErr, "")) } abortIfFail(t, assert.JSONEq(t, string(expectedJSONResponseBodyBytes), actualJSONResponseBody)) }) } func TestSearchAnimes_byKindSuccess(t *testing.T) { diContainer.Invoke(func(configuration *util.Configuration, job *integration.ShikimoriJob, newDao *models.NewDAO, animeDao *models.AnimeDAO, genreDao *models.GenreDAO, studioDao *models.StudioDAO, router *mux.Router) { if err := clearDb(newDao, animeDao, genreDao, studioDao); err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } prepareTestData(t, animeDao, genreDao, studioDao) //create request request, err := http.NewRequest("GET", "/api/animes/search?kind=movie&order=id", nil) if err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } recorder := executeRequest(request, router) //asserts abortIfFail(t, assert.Equal(t, 200, recorder.Code)) expectedAnimesRos := make([]rest.AnimeRO, 0, 1) animeExternalID1 := "1" animeName1 := "One Punch Man" russianAnimeName1 := "Один Удар Человек" animeURL1 := "/url.jpg" animePosterURL1 := "/url.jpg" animePosterURLRO1 := configuration.ShikimoriURL + animePosterURL1 animeURLRO1 := configuration.ShikimoriURL + animeURL1 animeRO1 := rest.AnimeRO{ ShikiID: animeExternalID1, Name: &animeName1, RussuanName: &russianAnimeName1, URL: &animeURLRO1, PosterURL: &animePosterURLRO1, } expectedAnimesRos = append(expectedAnimesRos, animeRO1) animeExternalID10 := "10" animeName10 := "One Punch Man" russianAnimeName10 := "Один Удар Человек" animeURL10 := "/url.jpg" animePosterURL10 := "/url.jpg" animePosterURLRO10 := configuration.ShikimoriURL + animePosterURL10 animeURLRO10 := configuration.ShikimoriURL + animeURL10 animeRO10 := rest.AnimeRO{ ShikiID: animeExternalID10, Name: &animeName10, RussuanName: &russianAnimeName10, URL: &animeURLRO10, PosterURL: &animePosterURLRO10, } expectedAnimesRos = append(expectedAnimesRos, animeRO10) animeExternalID5 := "5" animeName5 := "One Punch Man" russianAnimeName5 := "Один Удар Человек" animeURL5 := "/url.jpg" animePosterURL5 := "/url.jpg" animePosterURLRO5 := configuration.ShikimoriURL + animePosterURL5 animeURLRO5 := configuration.ShikimoriURL + animeURL5 animeRO5 := rest.AnimeRO{ ShikiID: animeExternalID5, Name: &animeName5, RussuanName: &russianAnimeName5, URL: &animeURLRO5, PosterURL: &animePosterURLRO5, } expectedAnimesRos = append(expectedAnimesRos, animeRO5) //get actual data actualJSONResponseBody := recorder.Body.String() expectedJSONResponseBodyBytes, marshalErr := json.Marshal(&expectedAnimesRos) if marshalErr != nil { markAsFailAndAbortNow(t, errors.Wrap(marshalErr, "")) } abortIfFail(t, assert.JSONEq(t, string(expectedJSONResponseBodyBytes), actualJSONResponseBody)) }) } func TestSearchAnimes_byOrderSuccess(t *testing.T) { diContainer.Invoke(func(configuration *util.Configuration, job *integration.ShikimoriJob, newDao *models.NewDAO, animeDao *models.AnimeDAO, genreDao *models.GenreDAO, studioDao *models.StudioDAO, router *mux.Router) { if err := clearDb(newDao, animeDao, genreDao, studioDao); err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } prepareTestData(t, animeDao, genreDao, studioDao) //create request request, err := http.NewRequest("GET", "/api/animes/search?kind=movie&order=aired_on", nil) if err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } recorder := executeRequest(request, router) //asserts abortIfFail(t, assert.Equal(t, 200, recorder.Code)) expectedAnimesRos := make([]rest.AnimeRO, 0, 3) animeExternalID1 := "1" animeName1 := "One Punch Man" russianAnimeName1 := "Один Удар Человек" animeURL1 := "/url.jpg" animePosterURL1 := "/url.jpg" animePosterURLRO1 := configuration.ShikimoriURL + animePosterURL1 animeURLRO1 := configuration.ShikimoriURL + animeURL1 animeRO1 := rest.AnimeRO{ ShikiID: animeExternalID1, Name: &animeName1, RussuanName: &russianAnimeName1, URL: &animeURLRO1, PosterURL: &animePosterURLRO1, } expectedAnimesRos = append(expectedAnimesRos, animeRO1) animeExternalID10 := "10" animeName10 := "One Punch Man" russianAnimeName10 := "Один Удар Человек" animeURL10 := "/url.jpg" animePosterURL10 := "/url.jpg" animePosterURLRO10 := configuration.ShikimoriURL + animePosterURL10 animeURLRO10 := configuration.ShikimoriURL + animeURL10 animeRO10 := rest.AnimeRO{ ShikiID: animeExternalID10, Name: &animeName10, RussuanName: &russianAnimeName10, URL: &animeURLRO10, PosterURL: &animePosterURLRO10, } expectedAnimesRos = append(expectedAnimesRos, animeRO10) animeExternalID5 := "5" animeName5 := "One Punch Man" russianAnimeName5 := "Один Удар Человек" animeURL5 := "/url.jpg" animePosterURL5 := "/url.jpg" animePosterURLRO5 := configuration.ShikimoriURL + animePosterURL5 animeURLRO5 := configuration.ShikimoriURL + animeURL5 animeRO5 := rest.AnimeRO{ ShikiID: animeExternalID5, Name: &animeName5, RussuanName: &russianAnimeName5, URL: &animeURLRO5, PosterURL: &animePosterURLRO5, } expectedAnimesRos = append(expectedAnimesRos, animeRO5) //get actual data actualJSONResponseBody := recorder.Body.String() expectedJSONResponseBodyBytes, marshalErr := json.Marshal(&expectedAnimesRos) if marshalErr != nil { markAsFailAndAbortNow(t, errors.Wrap(marshalErr, "")) } abortIfFail(t, assert.JSONEq(t, string(expectedJSONResponseBodyBytes), actualJSONResponseBody)) }) } func TestSearchAnimes_byScoreSuccess(t *testing.T) { diContainer.Invoke(func(configuration *util.Configuration, job *integration.ShikimoriJob, newDao *models.NewDAO, animeDao *models.AnimeDAO, genreDao *models.GenreDAO, studioDao *models.StudioDAO, router *mux.Router) { if err := clearDb(newDao, animeDao, genreDao, studioDao); err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } prepareTestData(t, animeDao, genreDao, studioDao) //create request request, err := http.NewRequest("GET", "/api/animes/search?score=8&order=id", nil) if err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } recorder := executeRequest(request, router) //asserts abortIfFail(t, assert.Equal(t, 200, recorder.Code)) expectedAnimesRos := make([]rest.AnimeRO, 0, 2) animeExternalID10 := "10" animeName10 := "One Punch Man" russianAnimeName10 := "Один Удар Человек" animeURL10 := "/url.jpg" animePosterURL10 := "/url.jpg" animePosterURLRO10 := configuration.ShikimoriURL + animePosterURL10 animeURLRO10 := configuration.ShikimoriURL + animeURL10 animeRO10 := rest.AnimeRO{ ShikiID: animeExternalID10, Name: &animeName10, RussuanName: &russianAnimeName10, URL: &animeURLRO10, PosterURL: &animePosterURLRO10, } expectedAnimesRos = append(expectedAnimesRos, animeRO10) animeExternalID7 := "7" animeName7 := "One Punch Man" russianAnimeName7 := "Один Удар Человек" animeURL7 := "/url.jpg" animePosterURL7 := "/url.jpg" animePosterURLRO7 := configuration.ShikimoriURL + animePosterURL7 animeURLRO7 := configuration.ShikimoriURL + animeURL7 animeRO7 := rest.AnimeRO{ ShikiID: animeExternalID7, Name: &animeName7, RussuanName: &russianAnimeName7, URL: &animeURLRO7, PosterURL: &animePosterURLRO7, } expectedAnimesRos = append(expectedAnimesRos, animeRO7) //get actual data actualJSONResponseBody := recorder.Body.String() expectedJSONResponseBodyBytes, marshalErr := json.Marshal(&expectedAnimesRos) if marshalErr != nil { markAsFailAndAbortNow(t, errors.Wrap(marshalErr, "")) } abortIfFail(t, assert.JSONEq(t, string(expectedJSONResponseBodyBytes), actualJSONResponseBody)) }) } func TestSearchAnimes_byGenresIdsSuccess(t *testing.T) { diContainer.Invoke(func(configuration *util.Configuration, job *integration.ShikimoriJob, newDao *models.NewDAO, animeDao *models.AnimeDAO, genreDao *models.GenreDAO, studioDao *models.StudioDAO, router *mux.Router) { if err := clearDb(newDao, animeDao, genreDao, studioDao); err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } prepareTestData(t, animeDao, genreDao, studioDao) //create request request, err := http.NewRequest("GET", "/api/animes/search?genre=345&order=id", nil) if err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } recorder := executeRequest(request, router) //asserts abortIfFail(t, assert.Equal(t, 200, recorder.Code)) expectedAnimesRos := make([]rest.AnimeRO, 0, 3) animeExternalID2 := "2" animeName2 := "One Punch Man" russianAnimeName2 := "Один Удар Человек" animeURL2 := "/url.jpg" animePosterURL2 := "/url.jpg" animePosterURLRO2 := configuration.ShikimoriURL + animePosterURL2 animeURLRO2 := configuration.ShikimoriURL + animeURL2 animeRO2 := rest.AnimeRO{ ShikiID: animeExternalID2, Name: &animeName2, RussuanName: &russianAnimeName2, URL: &animeURLRO2, PosterURL: &animePosterURLRO2, } expectedAnimesRos = append(expectedAnimesRos, animeRO2) animeExternalID4 := "4" animeName4 := "One Punch Man" russianAnimeName4 := "Один Удар Человек" animeURL4 := "/url.jpg" animePosterURL4 := "/url.jpg" animePosterURLRO4 := configuration.ShikimoriURL + animePosterURL4 animeURLRO4 := configuration.ShikimoriURL + animeURL4 animeRO4 := rest.AnimeRO{ ShikiID: animeExternalID4, Name: &animeName4, RussuanName: &russianAnimeName4, URL: &animeURLRO4, PosterURL: &animePosterURLRO4, } expectedAnimesRos = append(expectedAnimesRos, animeRO4) animeExternalID6 := "6" animeName6 := "One Punch Man" russianAnimeName6 := "Один Удар Человек" animeURL6 := "/url.jpg" animePosterURL6 := "/url.jpg" animePosterURLRO6 := configuration.ShikimoriURL + animePosterURL6 animeURLRO6 := configuration.ShikimoriURL + animeURL6 animeRO6 := rest.AnimeRO{ ShikiID: animeExternalID6, Name: &animeName6, RussuanName: &russianAnimeName6, URL: &animeURLRO6, PosterURL: &animePosterURLRO6, } expectedAnimesRos = append(expectedAnimesRos, animeRO6) animeExternalID9 := "9" animeName9 := "One Punch Man" russianAnimeName9 := "Один Удар Человек" animeURL9 := "/url.jpg" animePosterURL9 := "/url.jpg" animePosterURLRO9 := configuration.ShikimoriURL + animePosterURL9 animeURLRO9 := configuration.ShikimoriURL + animeURL9 animeRO9 := rest.AnimeRO{ ShikiID: animeExternalID9, Name: &animeName9, RussuanName: &russianAnimeName9, URL: &animeURLRO9, PosterURL: &animePosterURLRO9, } expectedAnimesRos = append(expectedAnimesRos, animeRO9) //get actual data actualJSONResponseBody := recorder.Body.String() expectedJSONResponseBodyBytes, marshalErr := json.Marshal(&expectedAnimesRos) if marshalErr != nil { markAsFailAndAbortNow(t, errors.Wrap(marshalErr, "")) } abortIfFail(t, assert.JSONEq(t, string(expectedJSONResponseBodyBytes), actualJSONResponseBody)) }) } func TestSearchAnimes_byStudioIdsSuccess(t *testing.T) { diContainer.Invoke(func(configuration *util.Configuration, job *integration.ShikimoriJob, newDao *models.NewDAO, animeDao *models.AnimeDAO, genreDao *models.GenreDAO, studioDao *models.StudioDAO, router *mux.Router) { if err := clearDb(newDao, animeDao, genreDao, studioDao); err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } prepareTestData(t, animeDao, genreDao, studioDao) //create request request, err := http.NewRequest("GET", "/api/animes/search?studio=345&order=id", nil) if err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } recorder := executeRequest(request, router) //asserts abortIfFail(t, assert.Equal(t, 200, recorder.Code)) expectedAnimesRos := make([]rest.AnimeRO, 0, 1) animeExternalID5 := "5" animeName5 := "One Punch Man" russianAnimeName5 := "Один Удар Человек" animeURL5 := "/url.jpg" animePosterURL5 := "/url.jpg" animePosterURLRO5 := configuration.ShikimoriURL + animePosterURL5 animeURLRO5 := configuration.ShikimoriURL + animeURL5 animeRO5 := rest.AnimeRO{ ShikiID: animeExternalID5, Name: &animeName5, RussuanName: &russianAnimeName5, URL: &animeURLRO5, PosterURL: &animePosterURLRO5, } expectedAnimesRos = append(expectedAnimesRos, animeRO5) animeExternalID8 := "8" animeName8 := "One Punch Man" russianAnimeName8 := "Один Удар Человек" animeURL8 := "/url.jpg" animePosterURL8 := "/url.jpg" animePosterURLRO8 := configuration.ShikimoriURL + animePosterURL8 animeURLRO8 := configuration.ShikimoriURL + animeURL8 animeRO8 := rest.AnimeRO{ ShikiID: animeExternalID8, Name: &animeName8, RussuanName: &russianAnimeName8, URL: &animeURLRO8, PosterURL: &animePosterURLRO8, } expectedAnimesRos = append(expectedAnimesRos, animeRO8) //get actual data actualJSONResponseBody := recorder.Body.String() expectedJSONResponseBodyBytes, marshalErr := json.Marshal(&expectedAnimesRos) if marshalErr != nil { markAsFailAndAbortNow(t, errors.Wrap(marshalErr, "")) } abortIfFail(t, assert.JSONEq(t, string(expectedJSONResponseBodyBytes), actualJSONResponseBody)) }) } func TestSearchAnimes_byDurationSuccess(t *testing.T) { diContainer.Invoke(func(configuration *util.Configuration, job *integration.ShikimoriJob, newDao *models.NewDAO, animeDao *models.AnimeDAO, genreDao *models.GenreDAO, studioDao *models.StudioDAO, router *mux.Router) { if err := clearDb(newDao, animeDao, genreDao, studioDao); err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } //8,7,4,3 prepareTestData(t, animeDao, genreDao, studioDao) //create request request, err := http.NewRequest("GET", "/api/animes/search?duration=D&order=id", nil) if err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } recorder := executeRequest(request, router) //asserts abortIfFail(t, assert.Equal(t, 200, recorder.Code)) expectedAnimesRos := make([]rest.AnimeRO, 0, 4) animeExternalID3 := "3" animeName3 := "One Punch Man" russianAnimeName3 := "Один Удар Человек" animeURL3 := "/url.jpg" animePosterURL3 := "/url.jpg" animePosterURLRO3 := configuration.ShikimoriURL + animePosterURL3 animeURLRO3 := configuration.ShikimoriURL + animeURL3 animeRO3 := rest.AnimeRO{ ShikiID: animeExternalID3, Name: &animeName3, RussuanName: &russianAnimeName3, URL: &animeURLRO3, PosterURL: &animePosterURLRO3, } expectedAnimesRos = append(expectedAnimesRos, animeRO3) animeExternalID4 := "4" animeName4 := "One Punch Man" russianAnimeName4 := "Один Удар Человек" animeURL4 := "/url.jpg" animePosterURL4 := "/url.jpg" animePosterURLRO4 := configuration.ShikimoriURL + animePosterURL4 animeURLRO4 := configuration.ShikimoriURL + animeURL4 animeRO4 := rest.AnimeRO{ ShikiID: animeExternalID4, Name: &animeName4, RussuanName: &russianAnimeName4, URL: &animeURLRO4, PosterURL: &animePosterURLRO4, } expectedAnimesRos = append(expectedAnimesRos, animeRO4) animeExternalID7 := "7" animeName7 := "One Punch Man" russianAnimeName7 := "Один Удар Человек" animeURL7 := "/url.jpg" animePosterURL7 := "/url.jpg" animePosterURLRO7 := configuration.ShikimoriURL + animePosterURL7 animeURLRO7 := configuration.ShikimoriURL + animeURL7 animeRO7 := rest.AnimeRO{ ShikiID: animeExternalID7, Name: &animeName7, RussuanName: &russianAnimeName7, URL: &animeURLRO7, PosterURL: &animePosterURLRO7, } expectedAnimesRos = append(expectedAnimesRos, animeRO7) animeExternalID8 := "8" animeName8 := "One Punch Man" russianAnimeName8 := "Один Удар Человек" animeURL8 := "/url.jpg" animePosterURL8 := "/url.jpg" animePosterURLRO8 := configuration.ShikimoriURL + animePosterURL8 animeURLRO8 := configuration.ShikimoriURL + animeURL8 animeRO8 := rest.AnimeRO{ ShikiID: animeExternalID8, Name: &animeName8, RussuanName: &russianAnimeName8, URL: &animeURLRO8, PosterURL: &animePosterURLRO8, } expectedAnimesRos = append(expectedAnimesRos, animeRO8) //get actual data actualJSONResponseBody := recorder.Body.String() expectedJSONResponseBodyBytes, marshalErr := json.Marshal(&expectedAnimesRos) if marshalErr != nil { markAsFailAndAbortNow(t, errors.Wrap(marshalErr, "")) } abortIfFail(t, assert.JSONEq(t, string(expectedJSONResponseBodyBytes), actualJSONResponseBody)) }) } func TestSearchAnimes_byRatingSuccess(t *testing.T) { diContainer.Invoke(func(configuration *util.Configuration, job *integration.ShikimoriJob, newDao *models.NewDAO, animeDao *models.AnimeDAO, genreDao *models.GenreDAO, studioDao *models.StudioDAO, router *mux.Router) { if err := clearDb(newDao, animeDao, genreDao, studioDao); err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } prepareTestData(t, animeDao, genreDao, studioDao) //create request request, err := http.NewRequest("GET", "/api/animes/search?rating=r_plus&order=id", nil) if err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } recorder := executeRequest(request, router) //asserts abortIfFail(t, assert.Equal(t, 200, recorder.Code)) expectedAnimesRos := make([]rest.AnimeRO, 0, 2) animeExternalID3 := "3" animeName3 := "One Punch Man" russianAnimeName3 := "Один Удар Человек" animeURL3 := "/url.jpg" animePosterURL3 := "/url.jpg" animePosterURLRO3 := configuration.ShikimoriURL + animePosterURL3 animeURLRO3 := configuration.ShikimoriURL + animeURL3 animeRO3 := rest.AnimeRO{ ShikiID: animeExternalID3, Name: &animeName3, RussuanName: &russianAnimeName3, URL: &animeURLRO3, PosterURL: &animePosterURLRO3, } expectedAnimesRos = append(expectedAnimesRos, animeRO3) animeExternalID7 := "7" animeName7 := "One Punch Man" russianAnimeName7 := "Один Удар Человек" animeURL7 := "/url.jpg" animePosterURL7 := "/url.jpg" animePosterURLRO7 := configuration.ShikimoriURL + animePosterURL7 animeURLRO7 := configuration.ShikimoriURL + animeURL7 animeRO7 := rest.AnimeRO{ ShikiID: animeExternalID7, Name: &animeName7, RussuanName: &russianAnimeName7, URL: &animeURLRO7, PosterURL: &animePosterURLRO7, } expectedAnimesRos = append(expectedAnimesRos, animeRO7) //get actual data actualJSONResponseBody := recorder.Body.String() expectedJSONResponseBodyBytes, marshalErr := json.Marshal(&expectedAnimesRos) if marshalErr != nil { markAsFailAndAbortNow(t, errors.Wrap(marshalErr, "")) } abortIfFail(t, assert.JSONEq(t, string(expectedJSONResponseBodyBytes), actualJSONResponseBody)) }) } func TestSearchAnimes_byFranchiseSuccess(t *testing.T) { diContainer.Invoke(func(configuration *util.Configuration, job *integration.ShikimoriJob, newDao *models.NewDAO, animeDao *models.AnimeDAO, genreDao *models.GenreDAO, studioDao *models.StudioDAO, router *mux.Router) { if err := clearDb(newDao, animeDao, genreDao, studioDao); err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } prepareTestData(t, animeDao, genreDao, studioDao) //create request request, err := http.NewRequest("GET", "/api/animes/search?franchise=TheFiveWeddedBrides&order=id", nil) if err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } recorder := executeRequest(request, router) //asserts abortIfFail(t, assert.Equal(t, 200, recorder.Code)) expectedAnimesRos := make([]rest.AnimeRO, 0, 2) animeExternalID10 := "10" animeName10 := "One Punch Man" russianAnimeName10 := "Один Удар Человек" animeURL10 := "/url.jpg" animePosterURL10 := "/url.jpg" animePosterURLRO10 := configuration.ShikimoriURL + animePosterURL10 animeURLRO10 := configuration.ShikimoriURL + animeURL10 animeRO10 := rest.AnimeRO{ ShikiID: animeExternalID10, Name: &animeName10, RussuanName: &russianAnimeName10, URL: &animeURLRO10, PosterURL: &animePosterURLRO10, } expectedAnimesRos = append(expectedAnimesRos, animeRO10) animeExternalID2 := "2" animeName2 := "One Punch Man" russianAnimeName2 := "Один Удар Человек" animeURL2 := "/url.jpg" animePosterURL2 := "/url.jpg" animePosterURLRO2 := configuration.ShikimoriURL + animePosterURL2 animeURLRO2 := configuration.ShikimoriURL + animeURL2 animeRO7 := rest.AnimeRO{ ShikiID: animeExternalID2, Name: &animeName2, RussuanName: &russianAnimeName2, URL: &animeURLRO2, PosterURL: &animePosterURLRO2, } expectedAnimesRos = append(expectedAnimesRos, animeRO7) //get actual data actualJSONResponseBody := recorder.Body.String() expectedJSONResponseBodyBytes, marshalErr := json.Marshal(&expectedAnimesRos) if marshalErr != nil { markAsFailAndAbortNow(t, errors.Wrap(marshalErr, "")) } abortIfFail(t, assert.JSONEq(t, string(expectedJSONResponseBodyBytes), actualJSONResponseBody)) }) } func TestSearchAnimes_byIdsSuccess(t *testing.T) { diContainer.Invoke(func(configuration *util.Configuration, job *integration.ShikimoriJob, newDao *models.NewDAO, animeDao *models.AnimeDAO, genreDao *models.GenreDAO, studioDao *models.StudioDAO, router *mux.Router) { if err := clearDb(newDao, animeDao, genreDao, studioDao); err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } prepareTestData(t, animeDao, genreDao, studioDao) //create request request, err := http.NewRequest("GET", "/api/animes/search?ids=2,10&order=id", nil) if err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } recorder := executeRequest(request, router) //asserts abortIfFail(t, assert.Equal(t, 200, recorder.Code)) expectedAnimesRos := make([]rest.AnimeRO, 0, 2) animeExternalID10 := "10" animeName10 := "One Punch Man" russianAnimeName10 := "Один Удар Человек" animeURL10 := "/url.jpg" animePosterURL10 := "/url.jpg" animePosterURLRO10 := configuration.ShikimoriURL + animePosterURL10 animeURLRO10 := configuration.ShikimoriURL + animeURL10 animeRO10 := rest.AnimeRO{ ShikiID: animeExternalID10, Name: &animeName10, RussuanName: &russianAnimeName10, URL: &animeURLRO10, PosterURL: &animePosterURLRO10, } expectedAnimesRos = append(expectedAnimesRos, animeRO10) animeExternalID2 := "2" animeName2 := "One Punch Man" russianAnimeName2 := "Один Удар Человек" animeURL2 := "/url.jpg" animePosterURL2 := "/url.jpg" animePosterURLRO2 := configuration.ShikimoriURL + animePosterURL2 animeURLRO2 := configuration.ShikimoriURL + animeURL2 animeRO2 := rest.AnimeRO{ ShikiID: animeExternalID2, Name: &animeName2, RussuanName: &russianAnimeName2, URL: &animeURLRO2, PosterURL: &animePosterURLRO2, } expectedAnimesRos = append(expectedAnimesRos, animeRO2) //get actual data actualJSONResponseBody := recorder.Body.String() expectedJSONResponseBodyBytes, marshalErr := json.Marshal(&expectedAnimesRos) if marshalErr != nil { markAsFailAndAbortNow(t, errors.Wrap(marshalErr, "")) } abortIfFail(t, assert.JSONEq(t, string(expectedJSONResponseBodyBytes), actualJSONResponseBody)) }) } func TestSearchAnimes_byExludeIdsSuccess(t *testing.T) { diContainer.Invoke(func(configuration *util.Configuration, job *integration.ShikimoriJob, newDao *models.NewDAO, animeDao *models.AnimeDAO, genreDao *models.GenreDAO, studioDao *models.StudioDAO, router *mux.Router) { if err := clearDb(newDao, animeDao, genreDao, studioDao); err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } prepareTestData(t, animeDao, genreDao, studioDao) //create request request, err := http.NewRequest("GET", "/api/animes/search?exclude_ids=1,2,3,4,5,8,9,10&order=id", nil) if err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } recorder := executeRequest(request, router) //asserts abortIfFail(t, assert.Equal(t, 200, recorder.Code)) expectedAnimesRos := make([]rest.AnimeRO, 0, 2) animeExternalID6 := "6" animeName6 := "One Punch Man" russianAnimeName6 := "Один Удар Человек" animeURL6 := "/url.jpg" animePosterURL6 := "/url.jpg" animePosterURLRO6 := configuration.ShikimoriURL + animePosterURL6 animeURLRO6 := configuration.ShikimoriURL + animeURL6 animeRO6 := rest.AnimeRO{ ShikiID: animeExternalID6, Name: &animeName6, RussuanName: &russianAnimeName6, URL: &animeURLRO6, PosterURL: &animePosterURLRO6, } expectedAnimesRos = append(expectedAnimesRos, animeRO6) animeExternalID7 := "7" animeName7 := "One Punch Man" russianAnimeName7 := "Один Удар Человек" animeURL7 := "/url.jpg" animePosterURL7 := "/url.jpg" animePosterURLRO7 := configuration.ShikimoriURL + animePosterURL7 animeURLRO7 := configuration.ShikimoriURL + animeURL7 animeRO7 := rest.AnimeRO{ ShikiID: animeExternalID7, Name: &animeName7, RussuanName: &russianAnimeName7, URL: &animeURLRO7, PosterURL: &animePosterURLRO7, } expectedAnimesRos = append(expectedAnimesRos, animeRO7) //get actual data actualJSONResponseBody := recorder.Body.String() expectedJSONResponseBodyBytes, marshalErr := json.Marshal(&expectedAnimesRos) if marshalErr != nil { markAsFailAndAbortNow(t, errors.Wrap(marshalErr, "")) } abortIfFail(t, assert.JSONEq(t, string(expectedJSONResponseBodyBytes), actualJSONResponseBody)) }) } func TestSearchAnimes_limitFail(t *testing.T) { diContainer.Invoke(func(router *mux.Router) { request, err := http.NewRequest("GET", "/api/animes/search?limit=34df4", nil) if err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } recorder := executeRequest(request, router) assert.Equal(t, 400, recorder.Code) }) } func TestSearchAnimes_offsetFail(t *testing.T) { diContainer.Invoke(func(router *mux.Router) { request, err := http.NewRequest("GET", "/api/animes/search?offset=df44", nil) if err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } recorder := executeRequest(request, router) assert.Equal(t, 400, recorder.Code) }) } func TestSearchAnimes_scoreFail(t *testing.T) { diContainer.Invoke(func(router *mux.Router) { request, err := http.NewRequest("GET", "/api/animes/search?score=hnk", nil) if err != nil { markAsFailAndAbortNow(t, err) } recorder := executeRequest(request, router) assert.Equal(t, 400, recorder.Code) }) } func TestRandom_scoreFail(t *testing.T) { diContainer.Invoke(func(router *mux.Router) { request, err := http.NewRequest("GET", "/api/animes/random?score=hnk", nil) if err != nil { markAsFailAndAbortNow(t, err) } recorder := executeRequest(request, router) assert.Equal(t, 400, recorder.Code) }) } func TestSearchAnimes_ByGenresIds_threeGenresOfOneAnimeSuccess(t *testing.T) { diContainer.Invoke(func(configuration *util.Configuration, job *integration.ShikimoriJob, newDao *models.NewDAO, animeDao *models.AnimeDAO, genreDao *models.GenreDAO, studioDao *models.StudioDAO, router *mux.Router) { if err := clearDb(newDao, animeDao, genreDao, studioDao); err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } prepareTestData(t, animeDao, genreDao, studioDao) //create request request, err := http.NewRequest("GET", "/api/animes/search?genre=101,678&order=id", nil) if err != nil { markAsFailAndAbortNow(t, errors.Wrap(err, "")) } recorder := executeRequest(request, router) //asserts abortIfFail(t, assert.Equal(t, 200, recorder.Code)) expectedAnimesRos := make([]rest.AnimeRO, 0, 1) animeExternalID9 := "9" animeName9 := "One Punch Man" russianAnimeName9 := "Один Удар Человек" animeURL9 := "/url.jpg" animePosterURL9 := "/url.jpg" animePosterURLRO9 := configuration.ShikimoriURL + animePosterURL9 animeURLRO9 := configuration.ShikimoriURL + animeURL9 animeRO9 := rest.AnimeRO{ ShikiID: animeExternalID9, Name: &animeName9, RussuanName: &russianAnimeName9, URL: &animeURLRO9, PosterURL: &animePosterURLRO9, } expectedAnimesRos = append(expectedAnimesRos, animeRO9) //get actual data actualJSONResponseBody := recorder.Body.String() expectedJSONResponseBodyBytes, marshalErr := json.Marshal(&expectedAnimesRos) if marshalErr != nil { markAsFailAndAbortNow(t, errors.Wrap(marshalErr, "")) } abortIfFail(t, assert.JSONEq(t, string(expectedJSONResponseBodyBytes), actualJSONResponseBody)) }) } //prepareTestData prepares test data in db includes 10 animes with different externalId, 1 genre and 1 studio func prepareTestData(t *testing.T, animeDao *models.AnimeDAO, genreDao *models.GenreDAO, studioDao *models.StudioDAO) { //insert genre 1 to database genreExternalID1 := "234" genreName1 := "trashcore" genreRussianName1 := "трешкор" genreKind1 := "tv" genreDTO1 := models.GenreDTO{ ExternalID: genreExternalID1, Name: &genreName1, Russian: &genreRussianName1, Kind: &genreKind1, } genreID1, insertGenreErr := insertGenreToDatabase(genreDao, genreDTO1) if insertGenreErr != nil { markAsFailAndAbortNow(t, errors.Wrap(insertGenreErr, "")) } //insert genre 2 to database genreExternalID2 := "345" genreName2 := "trashcore" genreRussianName2 := "трешкор" genreKind2 := "tv" genreDTO2 := models.GenreDTO{ ExternalID: genreExternalID2, Name: &genreName2, Russian: &genreRussianName2, Kind: &genreKind2, } genreID2, insertGenreErr := insertGenreToDatabase(genreDao, genreDTO2) if insertGenreErr != nil { markAsFailAndAbortNow(t, errors.Wrap(insertGenreErr, "")) } //insert genre 3 to database genreExternalID3 := "678" genreName3 := "trashcore" genreRussianName3 := "трешкор" genreKind3 := "tv" genreDTO3 := models.GenreDTO{ ExternalID: genreExternalID3, Name: &genreName3, Russian: &genreRussianName3, Kind: &genreKind3, } genreID3, insertGenreErr := insertGenreToDatabase(genreDao, genreDTO3) if insertGenreErr != nil { markAsFailAndAbortNow(t, errors.Wrap(insertGenreErr, "")) } //insert genre 4 to database genreExternalID4 := "101" genreName4 := "trashcore" genreRussianName4 := "трешкор" genreKind4 := "tv" genreDTO4 := models.GenreDTO{ ExternalID: genreExternalID4, Name: &genreName4, Russian: &genreRussianName4, Kind: &genreKind4, } genreID4, insertGenreErr := insertGenreToDatabase(genreDao, genreDTO4) if insertGenreErr != nil { markAsFailAndAbortNow(t, errors.Wrap(insertGenreErr, "")) } //insert studio 1 to database studioExternalID1 := "234" studioName1 := "trash studio" studioFilteredName1 := "треш студия" studioIsReal1 := false studioImageURL1 := "/url.jpg" studioDTO1 := models.StudioDTO{ ExternalID: studioExternalID1, Name: &studioName1, FilteredStudioName: &studioFilteredName1, IsReal: &studioIsReal1, ImageURL: &studioImageURL1, } studioID1, insertStudioErr := insertStudioToDatabase(studioDao, studioDTO1) if insertStudioErr != nil { markAsFailAndAbortNow(t, errors.Wrap(insertStudioErr, "")) } //insert studio 2 to database studioExternalID2 := "345" studioName2 := "trash studio" studioFilteredName2 := "треш студия" studioIsReal2 := false studioImageURL2 := "/url.jpg" studioDTO2 := models.StudioDTO{ ExternalID: studioExternalID2, Name: &studioName2, FilteredStudioName: &studioFilteredName2, IsReal: &studioIsReal2, ImageURL: &studioImageURL2, } studioID2, insertStudioErr := insertStudioToDatabase(studioDao, studioDTO2) if insertStudioErr != nil { markAsFailAndAbortNow(t, errors.Wrap(insertStudioErr, "")) } buildAnime1(t, animeDao, studioID1, genreID1) buildAnime2(t, animeDao, studioID1, genreID2) buildAnime3(t, animeDao, studioID1, genreID1) buildAnime4(t, animeDao, studioID1, genreID2) buildAnime5(t, animeDao, studioID2, genreID1) buildAnime6(t, animeDao, studioID1, genreID2) buildAnime7(t, animeDao, studioID1, genreID1) buildAnime8(t, animeDao, studioID2, genreID1) buildAnime9(t, animeDao, studioID1, genreID1, genreID2, genreID3, genreID4) buildAnime10(t, animeDao, studioID1, genreID1) } func buildAnime1(t *testing.T, animeDao *models.AnimeDAO, studioID int64, genreIDs ...int64) { //insert anime 1 to database animeExternalID1 := "1" animeName1 := "One Punch Man" animeRussianName1 := "Один Удар Человек" animeURL1 := "/url.jpg" animeKind1 := "movie" animeStatus1 := "ongoing" var animeEpizodes1 int64 = 12 var animeEpizodesAired1 int64 = 6 animeAiredOn1 := time.Date(2009, 11, 17, 20, 20, 20, 0, time.UTC) animeReleasedOn1 := time.Date(2009, 11, 17, 20, 20, 20, 0, time.UTC) animePosterURL1 := "/url.jpg" animeScore1 := 7.12 animeDuration1 := 5.0 animeRating1 := "r" animeFranchise1 := "onepunchman" animeProcessed1 := false testAnimeDto1 := models.AnimeDTO{ ExternalID: animeExternalID1, Name: &animeName1, Russian: &animeRussianName1, AnimeURL: &animeURL1, Kind: &animeKind1, Status: &animeStatus1, Epizodes: &animeEpizodes1, EpizodesAired: &animeEpizodesAired1, AiredOn: &animeAiredOn1, ReleasedOn: &animeReleasedOn1, PosterURL: &animePosterURL1, Score: &animeScore1, Duration: &animeDuration1, Rating: &animeRating1, Franchise: &animeFranchise1, Processed: &animeProcessed1, } animeID1, insertAnimeErr := insertAnimeToDatabase(animeDao, testAnimeDto1) if insertAnimeErr != nil { markAsFailAndAbortNow(t, errors.Wrap(insertAnimeErr, "")) } for _, g := range genreIDs { if linkAnimeAndGenreErr := linkAnimeAndGenre(animeDao, animeID1, g); linkAnimeAndGenreErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndGenreErr, "")) } } if linkAnimeAndStudioErr := linkAnimeAndStudio(animeDao, animeID1, studioID); linkAnimeAndStudioErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndStudioErr, "")) } } func buildAnime2(t *testing.T, animeDao *models.AnimeDAO, studioID int64, genreIDs ...int64) { //insert anime 2 to database animeExternalID2 := "2" animeName2 := "One Punch Man" animeRussianName2 := "Один Удар Человек" animeURL2 := "/url.jpg" animeKind2 := "tv" animeStatus2 := "ongoing" var animeEpizodes2 int64 = 22 var animeEpizodesAired2 int64 = 6 animeAiredOn2 := time.Date(2009, 11, 17, 20, 20, 20, 0, time.UTC) animeReleasedOn2 := time.Date(2009, 11, 17, 20, 20, 20, 0, time.UTC) animePosterURL2 := "/url.jpg" animeScore2 := 7.22 animeDuration2 := 35.0 animeRating2 := "r" animeFranchise2 := "TheFiveWeddedBrides" animeProcessed2 := false testAnimeDto2 := models.AnimeDTO{ ExternalID: animeExternalID2, Name: &animeName2, Russian: &animeRussianName2, AnimeURL: &animeURL2, Kind: &animeKind2, Status: &animeStatus2, Epizodes: &animeEpizodes2, EpizodesAired: &animeEpizodesAired2, AiredOn: &animeAiredOn2, ReleasedOn: &animeReleasedOn2, PosterURL: &animePosterURL2, Score: &animeScore2, Duration: &animeDuration2, Rating: &animeRating2, Franchise: &animeFranchise2, Processed: &animeProcessed2, } animeID2, insertAnimeErr := insertAnimeToDatabase(animeDao, testAnimeDto2) if insertAnimeErr != nil { markAsFailAndAbortNow(t, errors.Wrap(insertAnimeErr, "")) } for _, g := range genreIDs { if linkAnimeAndGenreErr := linkAnimeAndGenre(animeDao, animeID2, g); linkAnimeAndGenreErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndGenreErr, "")) } } if linkAnimeAndStudioErr := linkAnimeAndStudio(animeDao, animeID2, studioID); linkAnimeAndStudioErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndStudioErr, "")) } } func buildAnime3(t *testing.T, animeDao *models.AnimeDAO, studioID int64, genreIDs ...int64) { //insert anime 3 to database animeExternalID3 := "3" animeName3 := "One Punch Man" animeRussianName3 := "Один Удар Человек" animeURL3 := "/url.jpg" animeKind3 := "tv" animeStatus3 := "ongoing" var animeEpizodes3 int64 = 33 var animeEpizodesAired3 int64 = 6 animeAiredOn3 := time.Date(2009, 11, 17, 20, 20, 20, 0, time.UTC) animeReleasedOn3 := time.Date(2009, 11, 17, 20, 20, 20, 0, time.UTC) animePosterURL3 := "/url.jpg" animeScore3 := 7.33 animeDuration3 := 15.0 animeRating3 := "r_plus" animeFranchise3 := "onepunchman" animeProcessed3 := false testAnimeDto3 := models.AnimeDTO{ ExternalID: animeExternalID3, Name: &animeName3, Russian: &animeRussianName3, AnimeURL: &animeURL3, Kind: &animeKind3, Status: &animeStatus3, Epizodes: &animeEpizodes3, EpizodesAired: &animeEpizodesAired3, AiredOn: &animeAiredOn3, ReleasedOn: &animeReleasedOn3, PosterURL: &animePosterURL3, Score: &animeScore3, Duration: &animeDuration3, Rating: &animeRating3, Franchise: &animeFranchise3, Processed: &animeProcessed3, } animeID3, insertAnimeErr := insertAnimeToDatabase(animeDao, testAnimeDto3) if insertAnimeErr != nil { markAsFailAndAbortNow(t, errors.Wrap(insertAnimeErr, "")) } for _, g := range genreIDs { if linkAnimeAndGenreErr := linkAnimeAndGenre(animeDao, animeID3, g); linkAnimeAndGenreErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndGenreErr, "")) } } if linkAnimeAndStudioErr := linkAnimeAndStudio(animeDao, animeID3, studioID); linkAnimeAndStudioErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndStudioErr, "")) } } func buildAnime4(t *testing.T, animeDao *models.AnimeDAO, studioID int64, genreIDs ...int64) { //insert anime 4 to database animeExternalID4 := "4" animeName4 := "One Punch Man" animeRussianName4 := "Один Удар Человек" animeURL4 := "/url.jpg" animeKind4 := "tv" animeStatus4 := "ongoing" var animeEpizodes4 int64 = 44 var animeEpizodesAired4 int64 = 6 animeAiredOn4 := time.Date(2009, 11, 17, 20, 20, 20, 0, time.UTC) animeReleasedOn4 := time.Date(2009, 11, 17, 20, 20, 20, 0, time.UTC) animePosterURL4 := "/url.jpg" animeScore4 := 7.44 animeDuration4 := 10.0 animeRating4 := "r" animeFranchise4 := "onepunchman" animeProcessed4 := false testAnimeDto4 := models.AnimeDTO{ ExternalID: animeExternalID4, Name: &animeName4, Russian: &animeRussianName4, AnimeURL: &animeURL4, Kind: &animeKind4, Status: &animeStatus4, Epizodes: &animeEpizodes4, EpizodesAired: &animeEpizodesAired4, AiredOn: &animeAiredOn4, ReleasedOn: &animeReleasedOn4, PosterURL: &animePosterURL4, Score: &animeScore4, Duration: &animeDuration4, Rating: &animeRating4, Franchise: &animeFranchise4, Processed: &animeProcessed4, } animeID4, insertAnimeErr := insertAnimeToDatabase(animeDao, testAnimeDto4) if insertAnimeErr != nil { markAsFailAndAbortNow(t, errors.Wrap(insertAnimeErr, "")) } for _, g := range genreIDs { if linkAnimeAndGenreErr := linkAnimeAndGenre(animeDao, animeID4, g); linkAnimeAndGenreErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndGenreErr, "")) } } if linkAnimeAndStudioErr := linkAnimeAndStudio(animeDao, animeID4, studioID); linkAnimeAndStudioErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndStudioErr, "")) } } func buildAnime5(t *testing.T, animeDao *models.AnimeDAO, studioID int64, genreIDs ...int64) { //insert anime 5 to database animeExternalID5 := "5" animeName5 := "One Punch Man" animeRussianName5 := "Один Удар Человек" animeURL5 := "/url.jpg" animeKind5 := "movie" animeStatus5 := "ongoing" var animeEpizodes5 int64 = 55 var animeEpizodesAired5 int64 = 6 animeAiredOn5 := time.Date(2011, 11, 17, 20, 20, 20, 0, time.UTC) animeReleasedOn5 := time.Date(2011, 11, 17, 20, 20, 20, 0, time.UTC) animePosterURL5 := "/url.jpg" animeScore5 := 7.55 animeDuration5 := 40.0 animeRating5 := "r" animeFranchise5 := "onepunchman" animeProcessed5 := false testAnimeDto5 := models.AnimeDTO{ ExternalID: animeExternalID5, Name: &animeName5, Russian: &animeRussianName5, AnimeURL: &animeURL5, Kind: &animeKind5, Status: &animeStatus5, Epizodes: &animeEpizodes5, EpizodesAired: &animeEpizodesAired5, AiredOn: &animeAiredOn5, ReleasedOn: &animeReleasedOn5, PosterURL: &animePosterURL5, Score: &animeScore5, Duration: &animeDuration5, Rating: &animeRating5, Franchise: &animeFranchise5, Processed: &animeProcessed5, } animeID5, insertAnimeErr := insertAnimeToDatabase(animeDao, testAnimeDto5) if insertAnimeErr != nil { markAsFailAndAbortNow(t, errors.Wrap(insertAnimeErr, "")) } for _, g := range genreIDs { if linkAnimeAndGenreErr := linkAnimeAndGenre(animeDao, animeID5, g); linkAnimeAndGenreErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndGenreErr, "")) } } if linkAnimeAndStudioErr := linkAnimeAndStudio(animeDao, animeID5, studioID); linkAnimeAndStudioErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndStudioErr, "")) } } func buildAnime6(t *testing.T, animeDao *models.AnimeDAO, studioID int64, genreIDs ...int64) { //insert anime 6 to database animeExternalID6 := "6" animeName6 := "One Punch Man" animeRussianName6 := "Один Удар Человек" animeURL6 := "/url.jpg" animeKind6 := "tv" animeStatus6 := "ongoing" var animeEpizodes6 int64 = 6 var animeEpizodesAired6 int64 = 6 animeAiredOn6 := time.Date(2009, 11, 17, 20, 20, 20, 0, time.UTC) animeReleasedOn6 := time.Date(2009, 11, 17, 20, 20, 20, 0, time.UTC) animePosterURL6 := "/url.jpg" animeScore6 := 7.66 animeDuration6 := 30.0 animeRating6 := "r" animeFranchise6 := "onepunchman" animeProcessed6 := false testAnimeDto6 := models.AnimeDTO{ ExternalID: animeExternalID6, Name: &animeName6, Russian: &animeRussianName6, AnimeURL: &animeURL6, Kind: &animeKind6, Status: &animeStatus6, Epizodes: &animeEpizodes6, EpizodesAired: &animeEpizodesAired6, AiredOn: &animeAiredOn6, ReleasedOn: &animeReleasedOn6, PosterURL: &animePosterURL6, Score: &animeScore6, Duration: &animeDuration6, Rating: &animeRating6, Franchise: &animeFranchise6, Processed: &animeProcessed6, } animeID6, insertAnimeErr := insertAnimeToDatabase(animeDao, testAnimeDto6) if insertAnimeErr != nil { markAsFailAndAbortNow(t, errors.Wrap(insertAnimeErr, "")) } for _, g := range genreIDs { if linkAnimeAndGenreErr := linkAnimeAndGenre(animeDao, animeID6, g); linkAnimeAndGenreErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndGenreErr, "")) } } if linkAnimeAndStudioErr := linkAnimeAndStudio(animeDao, animeID6, studioID); linkAnimeAndStudioErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndStudioErr, "")) } } func buildAnime7(t *testing.T, animeDao *models.AnimeDAO, studioID int64, genreIDs ...int64) { //insert anime 7 to database animeExternalID7 := "7" animeName7 := "One Punch Man" animeRussianName7 := "Один Удар Человек" animeURL7 := "/url.jpg" animeKind7 := "tv" animeStatus7 := "ongoing" var animeEpizodes7 int64 = 7 var animeEpizodesAired7 int64 = 7 animeAiredOn7 := time.Date(2009, 11, 17, 20, 20, 20, 0, time.UTC) animeReleasedOn7 := time.Date(2009, 11, 17, 20, 20, 20, 0, time.UTC) animePosterURL7 := "/url.jpg" animeScore7 := 8.77 animeDuration7 := 25.0 animeRating7 := "r_plus" animeFranchise7 := "onepunchman" animeProcessed7 := false testAnimeDto7 := models.AnimeDTO{ ExternalID: animeExternalID7, Name: &animeName7, Russian: &animeRussianName7, AnimeURL: &animeURL7, Kind: &animeKind7, Status: &animeStatus7, Epizodes: &animeEpizodes7, EpizodesAired: &animeEpizodesAired7, AiredOn: &animeAiredOn7, ReleasedOn: &animeReleasedOn7, PosterURL: &animePosterURL7, Score: &animeScore7, Duration: &animeDuration7, Rating: &animeRating7, Franchise: &animeFranchise7, Processed: &animeProcessed7, } animeID7, insertAnimeErr := insertAnimeToDatabase(animeDao, testAnimeDto7) if insertAnimeErr != nil { markAsFailAndAbortNow(t, errors.Wrap(insertAnimeErr, "")) } for _, g := range genreIDs { if linkAnimeAndGenreErr := linkAnimeAndGenre(animeDao, animeID7, g); linkAnimeAndGenreErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndGenreErr, "")) } } if linkAnimeAndStudioErr := linkAnimeAndStudio(animeDao, animeID7, studioID); linkAnimeAndStudioErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndStudioErr, "")) } } func buildAnime8(t *testing.T, animeDao *models.AnimeDAO, studioID int64, genreIDs ...int64) { //insert anime 8 to database animeExternalID8 := "8" animeName8 := "One Punch Man" animeRussianName8 := "Один Удар Человек" animeURL8 := "/url.jpg" animeKind8 := "tv" animeStatus8 := "anons" var animeEpizodes8 int64 = 8 var animeEpizodesAired8 int64 = 8 animeAiredOn8 := time.Date(2009, 11, 17, 20, 20, 20, 0, time.UTC) animeReleasedOn8 := time.Date(2009, 11, 17, 20, 20, 20, 0, time.UTC) animePosterURL8 := "/url.jpg" animeScore8 := 7.12 animeDuration8 := 25.0 animeRating8 := "r" animeFranchise8 := "onepunchman" animeProcessed8 := false testAnimeDto8 := models.AnimeDTO{ ExternalID: animeExternalID8, Name: &animeName8, Russian: &animeRussianName8, AnimeURL: &animeURL8, Kind: &animeKind8, Status: &animeStatus8, Epizodes: &animeEpizodes8, EpizodesAired: &animeEpizodesAired8, AiredOn: &animeAiredOn8, ReleasedOn: &animeReleasedOn8, PosterURL: &animePosterURL8, Score: &animeScore8, Duration: &animeDuration8, Rating: &animeRating8, Franchise: &animeFranchise8, Processed: &animeProcessed8, } animeID8, insertAnimeErr := insertAnimeToDatabase(animeDao, testAnimeDto8) if insertAnimeErr != nil { markAsFailAndAbortNow(t, errors.Wrap(insertAnimeErr, "")) } for _, g := range genreIDs { if linkAnimeAndGenreErr := linkAnimeAndGenre(animeDao, animeID8, g); linkAnimeAndGenreErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndGenreErr, "")) } } if linkAnimeAndStudioErr := linkAnimeAndStudio(animeDao, animeID8, studioID); linkAnimeAndStudioErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndStudioErr, "")) } } func buildAnime9(t *testing.T, animeDao *models.AnimeDAO, studioID int64, genreIDs ...int64) { //insert anime 9 to database animeExternalID9 := "9" animeName9 := "One Punch Man" animeRussianName9 := "Один Удар Человек" animeURL9 := "/url.jpg" animeKind9 := "tv" animeStatus9 := "ongoing" var animeEpizodes9 int64 = 9 var animeEpizodesAired9 int64 = 9 animeAiredOn9 := time.Date(2009, 11, 17, 20, 20, 20, 0, time.UTC) animeReleasedOn9 := time.Date(2009, 11, 17, 20, 20, 20, 0, time.UTC) animePosterURL9 := "/url.jpg" animeScore9 := 7.12 animeDuration9 := 2.0 animeRating9 := "r" animeFranchise9 := "onepunchman" animeProcessed9 := false testAnimeDto9 := models.AnimeDTO{ ExternalID: animeExternalID9, Name: &animeName9, Russian: &animeRussianName9, AnimeURL: &animeURL9, Kind: &animeKind9, Status: &animeStatus9, Epizodes: &animeEpizodes9, EpizodesAired: &animeEpizodesAired9, AiredOn: &animeAiredOn9, ReleasedOn: &animeReleasedOn9, PosterURL: &animePosterURL9, Score: &animeScore9, Duration: &animeDuration9, Rating: &animeRating9, Franchise: &animeFranchise9, Processed: &animeProcessed9, } animeID9, insertAnimeErr := insertAnimeToDatabase(animeDao, testAnimeDto9) if insertAnimeErr != nil { markAsFailAndAbortNow(t, errors.Wrap(insertAnimeErr, "")) } for _, g := range genreIDs { if linkAnimeAndGenreErr := linkAnimeAndGenre(animeDao, animeID9, g); linkAnimeAndGenreErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndGenreErr, "")) } } if linkAnimeAndStudioErr := linkAnimeAndStudio(animeDao, animeID9, studioID); linkAnimeAndStudioErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndStudioErr, "")) } } func buildAnime10(t *testing.T, animeDao *models.AnimeDAO, studioID int64, genreIDs ...int64) { //insert anime 10 to database animeExternalID10 := "10" animeName10 := "One Punch Man" animeRussianName10 := "Один Удар Человек" animeURL10 := "/url.jpg" animeKind10 := "movie" animeStatus10 := "ongoing" var animeEpizodes10 int64 = 10 var animeEpizodesAired10 int64 = 10 animeAiredOn10 := time.Date(2010, 11, 17, 20, 20, 20, 0, time.UTC) animeReleasedOn10 := time.Date(2010, 11, 17, 20, 20, 20, 0, time.UTC) animePosterURL10 := "/url.jpg" animeScore10 := 8.12 animeDuration10 := 50.0 animeRating10 := "r" animeFranchise10 := "TheFiveWeddedBrides" animeProcessed10 := false testAnimeDto10 := models.AnimeDTO{ ExternalID: animeExternalID10, Name: &animeName10, Russian: &animeRussianName10, AnimeURL: &animeURL10, Kind: &animeKind10, Status: &animeStatus10, Epizodes: &animeEpizodes10, EpizodesAired: &animeEpizodesAired10, AiredOn: &animeAiredOn10, ReleasedOn: &animeReleasedOn10, PosterURL: &animePosterURL10, Score: &animeScore10, Duration: &animeDuration10, Rating: &animeRating10, Franchise: &animeFranchise10, Processed: &animeProcessed10, } animeID10, insertAnimeErr := insertAnimeToDatabase(animeDao, testAnimeDto10) if insertAnimeErr != nil { markAsFailAndAbortNow(t, errors.Wrap(insertAnimeErr, "")) } for _, g := range genreIDs { if linkAnimeAndGenreErr := linkAnimeAndGenre(animeDao, animeID10, g); linkAnimeAndGenreErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndGenreErr, "")) } } if linkAnimeAndStudioErr := linkAnimeAndStudio(animeDao, animeID10, studioID); linkAnimeAndStudioErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndStudioErr, "")) } } func buildAnime5114(t *testing.T, animeDao *models.AnimeDAO, studioID int64, genreIDs ...int64) { //insert anime 5114 to database animeExternalID5114 := "5114" animeName5114 := "One Punch Man" animeRussianName5114 := "Один Удар Человек" animeURL5114 := "/url.jpg" animeKind5114 := "movie" animeStatus5114 := "ongoing" var animeEpizodes5114 int64 = 10 var animeEpizodesAired5114 int64 = 10 animeAiredOn5114 := time.Date(2010, 11, 17, 20, 20, 20, 0, time.UTC) animeReleasedOn5114 := time.Date(2010, 11, 17, 20, 20, 20, 0, time.UTC) animePosterURL5114 := "/url.jpg" animeScore5114 := 8.12 animeDuration5114 := 50.0 animeRating5114 := "r" animeFranchise5114 := "TheFiveWeddedBrides" animeProcessed5114 := false testAnimeDto5114 := models.AnimeDTO{ ExternalID: animeExternalID5114, Name: &animeName5114, Russian: &animeRussianName5114, AnimeURL: &animeURL5114, Kind: &animeKind5114, Status: &animeStatus5114, Epizodes: &animeEpizodes5114, EpizodesAired: &animeEpizodesAired5114, AiredOn: &animeAiredOn5114, ReleasedOn: &animeReleasedOn5114, PosterURL: &animePosterURL5114, Score: &animeScore5114, Duration: &animeDuration5114, Rating: &animeRating5114, Franchise: &animeFranchise5114, Processed: &animeProcessed5114, } animeID5114, insertAnimeErr := insertAnimeToDatabase(animeDao, testAnimeDto5114) if insertAnimeErr != nil { markAsFailAndAbortNow(t, errors.Wrap(insertAnimeErr, "")) } for _, g := range genreIDs { if linkAnimeAndGenreErr := linkAnimeAndGenre(animeDao, animeID5114, g); linkAnimeAndGenreErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndGenreErr, "")) } } if linkAnimeAndStudioErr := linkAnimeAndStudio(animeDao, animeID5114, studioID); linkAnimeAndStudioErr != nil { markAsFailAndAbortNow(t, errors.Wrap(linkAnimeAndStudioErr, "")) } }
// Copyright 2020 The ChromiumOS Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // Package wm provides Window Manager Helper functions. package wm import ( "context" "math" "time" "chromiumos/tast/common/android/ui" "chromiumos/tast/errors" "chromiumos/tast/local/arc" "chromiumos/tast/local/chrome" "chromiumos/tast/local/chrome/ash" "chromiumos/tast/local/chrome/display" "chromiumos/tast/local/coords" "chromiumos/tast/testing" ) const roundingError = 0.01 // TabletLaunchActivityInfo holds activity info. type TabletLaunchActivityInfo struct { // Test-case activity name. ActivityName string // Activity's desired orientation. DesiredDO display.OrientationType } // TabletDefaultLaunchHelper runs tablet default lunch test-cases by given activity names. func TabletDefaultLaunchHelper(ctx context.Context, tconn *chrome.TestConn, a *arc.ARC, d *ui.Device, activityInfo []TabletLaunchActivityInfo, isResizable bool) error { // Test-cases for activities with specified orientation. for _, tc := range activityInfo { if err := func() (err error) { // Set the display to the opposite orientation, so when activity starts, the display should adjust itself to match with activity's desired orientation. if err := setDisplayOrientation(ctx, tconn, getOppositeDisplayOrientation(tc.DesiredDO)); err != nil { return err } // Need to clear the orientation set by setDisplayOrientation. defer func() { if clearErr := clearDisplayRotation(ctx, tconn); clearErr != nil { testing.ContextLog(ctx, "Failed to clear display rotation: ", clearErr) if err == nil { err = clearErr } } }() // Start the activity. act, newActivityErr := arc.NewActivity(a, Pkg24, tc.ActivityName) if newActivityErr != nil { return newActivityErr } defer act.Close() if err := act.StartWithDefaultOptions(ctx, tconn); err != nil { return err } defer func() { if stopErr := act.Stop(ctx, tconn); stopErr != nil { testing.ContextLog(ctx, "Failed to stop the activity: ", stopErr) if err == nil { err = stopErr } } }() if err := WaitUntilActivityIsReady(ctx, tconn, act, d); err != nil { return err } if err := CheckMaximizeWindowInTabletMode(ctx, tconn, Pkg24); err != nil { return err } newDO, err := display.GetOrientation(ctx, tconn) if err != nil { return err } // Compare display orientation after activity is ready, it should be equal to activity's desired orientation. // As it's not guaranteed that the display rotates to a "primary" orientation, only check the "binary" orientation here. if isPortraitOrientation(tc.DesiredDO) != isPortraitOrientation(newDO.Type) { return errors.Errorf("invalid display orientation: got %q; want %q", newDO.Type, tc.DesiredDO) } return nil }(); err != nil { return errors.Wrapf(err, "%q test failed", tc.ActivityName) } } // Unspecified activity orientation. // Set the display to an orientation, then start Unspecified activity. // Unspecified activity shouldn't change the display orientation. for _, displayOrientation := range []display.OrientationType{ display.OrientationPortraitPrimary, display.OrientationLandscapePrimary, } { unActName := NonResizableUnspecifiedActivity if isResizable { unActName = ResizableUnspecifiedActivity } if err := checkUnspecifiedActivityInTabletMode(ctx, tconn, a, d, displayOrientation, unActName); err != nil { return errors.Wrapf(err, "%q test failed", NonResizableUnspecifiedActivity) } } return nil } // TabletShelfHideShowHelper runs tablet test-cases that hide and show the shelf. func TabletShelfHideShowHelper(ctx context.Context, tconn *chrome.TestConn, a *arc.ARC, d *ui.Device, activityInfo []TabletLaunchActivityInfo, checkFunc CheckFunc) error { // Get primary display info to set shelf behavior. primaryDisplayInfo, err := display.GetPrimaryInfo(ctx, tconn) if err != nil { return err } if primaryDisplayInfo == nil { return errors.New("failed to find primary display info") } for _, tc := range activityInfo { if err := showHideShelfHelper(ctx, tconn, a, d, tc, primaryDisplayInfo.ID, checkFunc); err != nil { return errors.Wrapf(err, "%q test failed", tc) } } return nil } // TabletDisplaySizeChangeHelper runs test-cases for tablet display size change. func TabletDisplaySizeChangeHelper(ctx context.Context, tconn *chrome.TestConn, a *arc.ARC, d *ui.Device, activityInfo []TabletLaunchActivityInfo) (err error) { defer func() { if clearErr := clearDisplayRotation(ctx, tconn); clearErr != nil { testing.ContextLog(ctx, "Failed to clear display rotation: ", err) if err == nil { err = clearErr } } }() for _, tc := range activityInfo { if err := displaySizeChangeHelper(ctx, tconn, a, d, tc); err != nil { return errors.Wrapf(err, "%q test failed", tc) } } return nil } // TabletImmerseViaAPI runs test-cases for immerse via API. func TabletImmerseViaAPI(ctx context.Context, tconn *chrome.TestConn, a *arc.ARC, d *ui.Device, activityInfo []TabletLaunchActivityInfo) error { // Get the default display orientation and set it back after all test-cases are completed. o, err := display.GetOrientation(ctx, tconn) if err != nil { return err } defer setDisplayOrientation(ctx, tconn, o.Type) for _, tc := range activityInfo { if err := tabletImmerseViaAPIHelper(ctx, tconn, a, d, tc); err != nil { return err } } return nil } // TabletFontSizeChangeHelper runs test-cases for tablet font size change. func TabletFontSizeChangeHelper(ctx context.Context, tconn *chrome.TestConn, a *arc.ARC, d *ui.Device, activityInfo []TabletLaunchActivityInfo) (err error) { defer func() { if clearErr := clearDisplayRotation(ctx, tconn); clearErr != nil { testing.ContextLog(ctx, "Failed to clear display rotation: ", err) if err == nil { err = clearErr } } }() for _, tc := range activityInfo { if err := tabletFontScaleChangeHelper(ctx, tconn, a, d, tc); err != nil { return errors.Wrapf(err, "%q test failed", tc) } } return nil } // tabletFontScaleChangeHelper changes the font scale for any given activity. func tabletFontScaleChangeHelper(ctx context.Context, tconn *chrome.TestConn, a *arc.ARC, d *ui.Device, actInfo TabletLaunchActivityInfo) error { // Font scale, this const must not be 1. const fsc = 1.2 // Start a new activity. act, err := arc.NewActivity(a, Pkg24, actInfo.ActivityName) if err != nil { return errors.Wrap(err, "unable to create new activity") } defer act.Close() if err := act.StartWithDefaultOptions(ctx, tconn); err != nil { return errors.Wrap(err, "unable to start new activity") } defer act.Stop(ctx, tconn) if err := WaitUntilActivityIsReady(ctx, tconn, act, d); err != nil { return errors.Wrap(err, "unable to wait until activity is ready") } // Wait until display rotates to activities desired orientation. if err := testing.Poll(ctx, func(ctx context.Context) error { newDO, err := display.GetOrientation(ctx, tconn) if err != nil { return testing.PollBreak(err) } if actInfo.DesiredDO != newDO.Type { return errors.Errorf("invalid display orientation: got %q; want %q", newDO.Type, actInfo.DesiredDO) } return nil }, &testing.PollOptions{Timeout: 5 * time.Second}); err != nil { return err } // Store original window info. owInfo, err := ash.GetARCAppWindowInfo(ctx, tconn, Pkg24) if err != nil { return errors.Wrap(err, "unable to get arc app window info") } // Change the font scale. if err := EnsureARCFontScaleChanged(ctx, a, fsc); err != nil { return errors.Wrap(err, "unable to change font scale") } defer EnsureARCFontScaleChanged(ctx, a, 1) // Get the font scale. nfs, err := GetARCFontScale(ctx, a) if err != nil { return errors.Wrap(err, "unable to get font scale") } // Get window info after font scale change. wInfo, err := ash.GetARCAppWindowInfo(ctx, tconn, Pkg24) if err != nil { return errors.Wrap(err, "unable to get arc app window info") } if owInfo.TargetBounds != wInfo.TargetBounds { return errors.Errorf("invalid window bounds after font scale is changed: got %q; want %q", wInfo.TargetBounds, owInfo.TargetBounds) } // Compare font scale before and after font scale change. if nfs != fsc { return errors.Errorf("invalid font scale after font scale is changed: got %.1f; want %.1f", nfs, fsc) } return nil } // tabletImmerseViaAPIHelper clicks on immersive button on the activity and switch it back to normal and assert window bounds accordingly. func tabletImmerseViaAPIHelper(ctx context.Context, tconn *chrome.TestConn, a *arc.ARC, d *ui.Device, activityInfo TabletLaunchActivityInfo) error { // Start a new activity. act, err := arc.NewActivity(a, Pkg24, activityInfo.ActivityName) if err != nil { return err } defer act.Close() if err := act.StartWithDefaultOptions(ctx, tconn); err != nil { return err } defer act.Stop(ctx, tconn) if err := WaitUntilActivityIsReady(ctx, tconn, act, d); err != nil { return err } // Wait until display rotates to activities desired orientation. Undefined activities are following the previous activity orientation. testing.Poll(ctx, func(ctx context.Context) error { newDO, err := display.GetOrientation(ctx, tconn) if err != nil { return testing.PollBreak(err) } if activityInfo.DesiredDO != newDO.Type { return errors.Errorf("invalid display orientation: got %q, want %q", newDO.Type, activityInfo.DesiredDO) } return nil }, &testing.PollOptions{Timeout: 5 * time.Second}) // Get window info before clicking on the immersive button. winBefore, err := ash.GetARCAppWindowInfo(ctx, tconn, Pkg24) if err != nil { return err } // Click on the immersive button. if err := UIClickImmersive(ctx, act, d); err != nil { return err } if err := WaitUntilActivityIsReady(ctx, tconn, act, d); err != nil { return err } if err != nil { return err } if err := ash.WaitWindowFinishAnimating(ctx, tconn, winBefore.ID); err != nil { return err } if err := CheckMaximizeToFullscreenToggle(ctx, tconn, winBefore.TargetBounds, Pkg24); err != nil { return err } // Click on the normal button. if err := UIClickNormal(ctx, act, d); err != nil { return err } if err := WaitUntilActivityIsReady(ctx, tconn, act, d); err != nil { return err } if err := ash.WaitWindowFinishAnimating(ctx, tconn, winBefore.ID); err != nil { return err } winAfter, err := ash.GetARCAppWindowInfo(ctx, tconn, Pkg24) if err != nil { return err } // The display orientation shouldn't be changed after clicking on normal button. newDO, err := display.GetOrientation(ctx, tconn) if err != nil { return err } if activityInfo.DesiredDO != newDO.Type { return errors.Errorf("invalid display orientation after normal button is clicked: got %q; want %q", newDO.Type, activityInfo.DesiredDO) } if winBefore.BoundsInRoot != winAfter.BoundsInRoot { return errors.Errorf("invalid window bounds after click on the immersive button: got %q; want %q", winAfter.BoundsInRoot, winBefore.BoundsInRoot) } return nil } // displaySizeChangeHelper runs display size change scenarios in tablet mode. func displaySizeChangeHelper(ctx context.Context, tconn *chrome.TestConn, a *arc.ARC, d *ui.Device, activityInfo TabletLaunchActivityInfo) (err error) { // Start a new activity. act, newActivityErr := arc.NewActivity(a, Pkg24, activityInfo.ActivityName) if newActivityErr != nil { return newActivityErr } defer act.Close() if err := act.StartWithDefaultOptions(ctx, tconn); err != nil { return err } defer func() { if stopErr := act.Stop(ctx, tconn); stopErr != nil { testing.ContextLog(ctx, "Failed to stop the activity: ", stopErr) if err == nil { err = stopErr } } }() if err := WaitUntilActivityIsReady(ctx, tconn, act, d); err != nil { return err } // Wait until display rotates to activities desired orientation. Undefined activities are following the previous activity orientation. testing.Poll(ctx, func(ctx context.Context) error { newDO, err := display.GetOrientation(ctx, tconn) if err != nil { return testing.PollBreak(err) } if activityInfo.DesiredDO != newDO.Type { return errors.Errorf("invalid display orientation: got %q; want %q", newDO.Type, activityInfo.DesiredDO) } return nil }, &testing.PollOptions{Timeout: 5 * time.Second}) // Get primary display info before zoom. dispInfoBeforeZoom, err := display.GetPrimaryInfo(ctx, tconn) if err != nil { return err } if dispInfoBeforeZoom == nil { return errors.New("failed to find primary display info") } displayID := dispInfoBeforeZoom.ID appWindowInfoBeforeZoom, err := ash.GetARCAppWindowInfo(ctx, tconn, Pkg24) if err != nil { return err } if dispInfoBeforeZoom.WorkArea != appWindowInfoBeforeZoom.BoundsInRoot { return errors.Errorf("invalid activity bounds, the activity must cover the display work area: got %q; want %q", appWindowInfoBeforeZoom.BoundsInRoot, dispInfoBeforeZoom.WorkArea) } displayZoomFactors := dispInfoBeforeZoom.AvailableDisplayZoomFactors newZoom := 0. for _, z := range displayZoomFactors { if z > 1 { newZoom = z break } } if newZoom == 0 { return errors.Errorf("invalid AvailableDisplayZoomFactors: got %v; want array with at least one value different than '1'", displayZoomFactors) } if err := ChangeDisplayZoomFactor(ctx, tconn, displayID, newZoom); err != nil { return err } defer ChangeDisplayZoomFactor(ctx, tconn, displayID, dispInfoBeforeZoom.DisplayZoomFactor) appWindowInfoAfterZoom, err := ash.GetARCAppWindowInfo(ctx, tconn, Pkg24) if err != nil { return err } // Get primary display info after display resolution change. dispInfoAfterZoom, err := display.GetPrimaryInfo(ctx, tconn) if err != nil { return err } if dispInfoAfterZoom == nil { return errors.New("failed to find primary display info") } testing.Poll(ctx, func(ctx context.Context) error { if dispInfoAfterZoom.WorkArea != appWindowInfoAfterZoom.BoundsInRoot { return errors.Errorf("invalid activity bounds, the activity must cover the display work area: got %q; want %q", appWindowInfoAfterZoom.BoundsInRoot, dispInfoAfterZoom.WorkArea) } return nil }, &testing.PollOptions{Timeout: 5 * time.Second}) // DPI before zoom divided by DPI after zoom should be equal to zoom coefficient. Because of possible roundings, the difference is calculated that should be less than 0.01 to have up to 2 decimal points of precision. if math.Abs(newZoom-dispInfoBeforeZoom.DPIX/dispInfoAfterZoom.DPIX) > roundingError { return errors.Errorf("invalid DPIX ratio after resolution changed: got %.3f; want %.3f", dispInfoBeforeZoom.DPIX/dispInfoAfterZoom.DPIX, newZoom) } if math.Abs(newZoom-dispInfoBeforeZoom.DPIY/dispInfoAfterZoom.DPIY) > roundingError { return errors.Errorf("invalid DPIY ratio after resolution changed: got %.3f; want %.3f", dispInfoBeforeZoom.DPIY/dispInfoAfterZoom.DPIY, newZoom) } return nil } // showHideShelfHelper runs shelf show/hide scenarios per activity on tablet. func showHideShelfHelper(ctx context.Context, tconn *chrome.TestConn, a *arc.ARC, d *ui.Device, activityInfo TabletLaunchActivityInfo, pdID string, checkFunc CheckFunc) (err error) { // Get initial shelf behavior to make sure it is never hide. initSB, initErr := ash.GetShelfBehavior(ctx, tconn, pdID) if initErr != nil { return initErr } if initSB != ash.ShelfBehaviorNeverAutoHide { // Set shelf behavior to never auto hide for test's initial state. if err := ash.SetShelfBehavior(ctx, tconn, pdID, ash.ShelfBehaviorNeverAutoHide); err != nil { return err } } defer func() { if clearErr := clearDisplayRotation(ctx, tconn); clearErr != nil { testing.ContextLog(ctx, "Failed to clear display rotation: ", clearErr) if err == nil { err = clearErr } } }() // Start the activity. act, newActivityErr := arc.NewActivity(a, Pkg24, activityInfo.ActivityName) if newActivityErr != nil { return newActivityErr } defer act.Close() if err := act.StartWithDefaultOptions(ctx, tconn); err != nil { return err } defer func() { if stopErr := act.Stop(ctx, tconn); stopErr != nil { testing.ContextLog(ctx, "Failed to stop the activity: ", stopErr) if err == nil { err = stopErr } } }() if err := WaitUntilActivityIsReady(ctx, tconn, act, d); err != nil { return err } if err := checkFunc(ctx, tconn, act, d); err != nil { return err } // Check the display orientation. displayOrientation, err := display.GetOrientation(ctx, tconn) if err != nil { return err } // Compare display orientation after activity is ready, it should be equal to activity's desired orientation. if activityInfo.DesiredDO != displayOrientation.Type { return errors.Errorf("invalid display orientation: got %q; want %q", displayOrientation.Type, activityInfo.DesiredDO) } // Store initial window info to compare with after hiding and showing the shelf. winInfoInitialState, err := ash.GetARCAppWindowInfo(ctx, tconn, Pkg24) if err != nil { return err } // Set shelf behavior to auto hide. if err := ash.SetShelfBehavior(ctx, tconn, pdID, ash.ShelfBehaviorAlwaysAutoHide); err != nil { return err } // Wait for shelf animation to complete. if err := WaitForShelfAnimationComplete(ctx, tconn); err != nil { return errors.Wrap(err, "failed to wait for shelf animation to complete") } // Compare window bounds before and after hiding the shelf. It should be larger when shelf is hidden. testing.Poll(ctx, func(ctx context.Context) error { winInfoShelfHidden, err := ash.GetARCAppWindowInfo(ctx, tconn, Pkg24) if err != nil { return err } if winInfoShelfHidden.BoundsInRoot.Height <= winInfoInitialState.BoundsInRoot.Height { return errors.Errorf("invalid window bounds when shelf is shown: got %s; want smaller than %s", winInfoInitialState.BoundsInRoot, winInfoShelfHidden.BoundsInRoot) } return nil }, &testing.PollOptions{Timeout: 5 * time.Second}) // Show the shelf. if err := ash.SetShelfBehavior(ctx, tconn, pdID, ash.ShelfBehaviorNeverAutoHide); err != nil { return err } // Wait for shelf animation to complete. if err := WaitForShelfAnimationComplete(ctx, tconn); err != nil { return errors.Wrap(err, "failed to wait for shelf animation to complete") } if err := checkFunc(ctx, tconn, act, d); err != nil { return err } // Compare window bounds after showing the shelf with initial bounds. They should be equal. return testing.Poll(ctx, func(ctx context.Context) error { winInfoShelfReShown, err := ash.GetARCAppWindowInfo(ctx, tconn, Pkg24) if err != nil { return err } if winInfoInitialState.BoundsInRoot != winInfoShelfReShown.BoundsInRoot { return errors.Errorf("invalid window bounds after hiding and showing the shelf: got %s; want %s", winInfoShelfReShown.BoundsInRoot, winInfoInitialState.BoundsInRoot) } return nil }, &testing.PollOptions{Timeout: 5 * time.Second}) } // getOppositeDisplayOrientation returns Portrait for Landscape orientation and vice versa. func getOppositeDisplayOrientation(orientation display.OrientationType) display.OrientationType { if orientation == display.OrientationPortraitPrimary { return display.OrientationLandscapePrimary } return display.OrientationPortraitPrimary } // isPortraitOrientation returns true if the given orientation is portrait-primary or portrait-secondary. func isPortraitOrientation(orientation display.OrientationType) bool { return orientation == display.OrientationPortraitPrimary || orientation == display.OrientationPortraitSecondary } // RotateToLandscape ensures to set the primary display orientation to landscape. func RotateToLandscape(ctx context.Context, tconn *chrome.TestConn) (func() error, error) { pdInfo, err := display.GetPrimaryInfo(ctx, tconn) if err != nil { return nil, err } if pdInfo.Bounds.Height > pdInfo.Bounds.Width { targetRotation := display.Rotate0 if pdInfo.Rotation == 0 || pdInfo.Rotation == 180 { targetRotation = display.Rotate90 } cleanupFunction, err := RotateDisplay(ctx, tconn, targetRotation) if err != nil { return nil, err } return cleanupFunction, nil } return func() error { return nil }, nil } // checkUnspecifiedActivityInTabletMode makes sure that the display orientation won't change for an activity with unspecified orientation. func checkUnspecifiedActivityInTabletMode(ctx context.Context, tconn *chrome.TestConn, a *arc.ARC, d *ui.Device, orientation display.OrientationType, unActName string) (err error) { // Set the display orientation. if err := setDisplayOrientation(ctx, tconn, orientation); err != nil { return err } // Need to clear the orientation of setDisplayOrientation. defer func() { if clearErr := clearDisplayRotation(ctx, tconn); clearErr != nil { testing.ContextLog(ctx, "Failed to clear display rotation: ", clearErr) if err == nil { err = clearErr } } }() // Start undefined activity. act, err := arc.NewActivity(a, Pkg24, unActName) if err != nil { return err } defer act.Close() if err := act.StartWithDefaultOptions(ctx, tconn); err != nil { return err } defer act.Stop(ctx, tconn) if err := WaitUntilActivityIsReady(ctx, tconn, act, d); err != nil { return err } newDO, err := display.GetOrientation(ctx, tconn) if err != nil { return err } // Compare display orientation after the activity is ready, it should be equal to the initial display orientation. if isPortraitOrientation(orientation) != isPortraitOrientation(newDO.Type) { return errors.Errorf("invalid display orientation for unspecified activity: got %q; want %q", newDO.Type, orientation) } windowInfo, err := ash.GetARCAppWindowInfo(ctx, tconn, Pkg24) if err != nil { return err } if isPortraitRect(windowInfo.BoundsInRoot) { // If app is portrait but the display is not, then return error. if !isPortraitOrientation(orientation) { return errors.New("invalid unspecified activity orientation: got Portrait; want Landscape") } } else { // App is Landscape // If app is Landscape but the display is not, then return error. if isPortraitOrientation(orientation) { return errors.New("invalid unspecified activity orientation: got Landscape; want Portrait") } } return nil } // setDisplayOrientation sets the display orientation by OrientationType. func setDisplayOrientation(ctx context.Context, tconn *chrome.TestConn, desiredOrientation display.OrientationType) error { // Get display orientation initialDO, err := display.GetOrientation(ctx, tconn) if err != nil { return err } if initialDO.Type != desiredOrientation { rotationAngle := display.Rotate0 if desiredOrientation == display.OrientationPortraitPrimary { rotationAngle = display.Rotate270 } _, err := RotateDisplay(ctx, tconn, rotationAngle) if err != nil { return err } } return nil } // clearDisplayRotation clears the display rotation and resets to the default // auto-rotation status in tablet mode. func clearDisplayRotation(ctx context.Context, tconn *chrome.TestConn) error { info, err := display.GetPrimaryInfo(ctx, tconn) if err != nil { return errors.Wrap(err, "failed to get the primary display info") } return display.SetDisplayRotationSync(ctx, tconn, info.ID, display.RotateAny) } // isPortraitRect returns true if width is greater than height. func isPortraitRect(rect coords.Rect) bool { if rect.Width < rect.Height { return true } return false }
package openrtb_ext import "fmt" const DefaultBidLimit = 1 const MaxBidLimit = 9 func ValidateAndBuildExtMultiBid(prebid *ExtRequestPrebid) ([]*ExtMultiBid, []error) { if prebid == nil || prebid.MultiBid == nil { return nil, nil } var validationErrs, errs []error var validatedMultiBids, newMultiBids []*ExtMultiBid //returning slice instead of map to keep the downstream req.Ext payload consistent multiBidMap := make(map[string]struct{}) // map is needed temporarily for validate of duplicate entries, etc. for _, multiBid := range prebid.MultiBid { newMultiBids, errs = addMultiBid(multiBidMap, multiBid) if len(errs) != 0 { validationErrs = append(validationErrs, errs...) } if len(newMultiBids) != 0 { validatedMultiBids = append(validatedMultiBids, newMultiBids...) } } return validatedMultiBids, validationErrs } // Validate and add multiBid func addMultiBid(multiBidMap map[string]struct{}, multiBid *ExtMultiBid) ([]*ExtMultiBid, []error) { errs := make([]error, 0) if multiBid.MaxBids == nil { errs = append(errs, fmt.Errorf("maxBids not defined for %v", *multiBid)) return nil, errs } if *multiBid.MaxBids < DefaultBidLimit { errs = append(errs, fmt.Errorf("invalid maxBids value, using minimum %d limit for %v", DefaultBidLimit, *multiBid)) *multiBid.MaxBids = DefaultBidLimit } if *multiBid.MaxBids > MaxBidLimit { errs = append(errs, fmt.Errorf("invalid maxBids value, using maximum %d limit for %v", MaxBidLimit, *multiBid)) *multiBid.MaxBids = MaxBidLimit } var validatedMultiBids []*ExtMultiBid if multiBid.Bidder != "" { if _, ok := multiBidMap[multiBid.Bidder]; ok { errs = append(errs, fmt.Errorf("multiBid already defined for %s, ignoring this instance %v", multiBid.Bidder, *multiBid)) return nil, errs } if multiBid.Bidders != nil { errs = append(errs, fmt.Errorf("ignoring bidders from %v", *multiBid)) multiBid.Bidders = nil } multiBidMap[multiBid.Bidder] = struct{}{} validatedMultiBids = append(validatedMultiBids, multiBid) } else if len(multiBid.Bidders) > 0 { var bidders []string for _, bidder := range multiBid.Bidders { if _, ok := multiBidMap[bidder]; ok { errs = append(errs, fmt.Errorf("multiBid already defined for %s, ignoring this instance %v", bidder, *multiBid)) continue } multiBidMap[bidder] = struct{}{} bidders = append(bidders, bidder) } if multiBid.TargetBidderCodePrefix != "" { errs = append(errs, fmt.Errorf("ignoring targetbiddercodeprefix for %v", *multiBid)) multiBid.TargetBidderCodePrefix = "" } if len(bidders) != 0 { validatedMultiBids = append(validatedMultiBids, &ExtMultiBid{ MaxBids: multiBid.MaxBids, Bidders: bidders, }) } } else { errs = append(errs, fmt.Errorf("bidder(s) not specified for %v", *multiBid)) } return validatedMultiBids, errs }
package main import "fmt" /* 考点: for range循环,会创建每个元素的副本,所以v的地址是相同的 range 表达式复制一个副本_, v := range slice, 这里的slice是副本引用 */ func main() { slice := []int{1,2,3,4} //这里的v是循环对象每个元素的副本 for _, v := range slice { fmt.Printf("%p\n", &v) fmt.Println(v) } m := make(map[int]*int) for key, val:= range slice { //将切片的每个地址作为map值,需要赋值一个新的变量 value := val m[key] = &value } //此时得到的结果为0->1, 1->2 for k, v := range m { fmt.Println(k, "----->", *v) } /* 1 0xc000094000 2 0xc000094000 3 0xc000094000 4 0xc000094000 0 -----> 1 1 -----> 2 2 -----> 3 3 -----> 4 */ }
package bindings import ( "os" "path/filepath" "reflect" "strings" "github.com/containers/podman/v3/pkg/bindings/containers" "github.com/containers/podman/v3/pkg/bindings/images" "github.com/containers/podman/v3/pkg/specgen" ) func (p *PodmanClient) PullImage(nameOrID string) (*PullImageReport, error) { var quiet bool = true out, err := images.Pull(p.Context, nameOrID, &images.PullOptions{ Quiet: &quiet, }) if err != nil { return nil, err } return &PullImageReport{ Output: strings.Join(out, "\n"), Tool: reflect.TypeOf(p).String(), }, nil } func (p *PodmanClient) SaveImage(nameOrID string) (*SaveImageReport, error) { response := &SaveImageReport{ Filename: getTarballName(nameOrID), Directory: imagePullPath(), } response.AbsPath = filepath.Join(response.Directory, response.Filename) outfile, err := os.Create(response.AbsPath) if err != nil { return nil, err } defer outfile.Close() var compress bool = true err = images.Export(p.Context, []string{nameOrID}, outfile, &images.ExportOptions{ Compress: &compress, }) if err != nil { return nil, err } return response, nil } func (p *PodmanClient) InspectImage(nameOrID string) (*InspectImageReport, error) { if _, err := p.PullImage(nameOrID); err != nil { return nil, err } report, err := images.GetImage(p.Context, nameOrID, &images.GetOptions{}) if err != nil { return nil, err } return &InspectImageReport{ ImageData: report.ImageData, }, nil } func (p *PodmanClient) ListImages() (*ListImageReport, error) { list, err := images.List(p.Context, &images.ListOptions{}) if err != nil { return nil, err } return &ListImageReport{ Images: list, }, nil } func (p *PodmanClient) RemoveImage(nameOrID string) (*RemoveImageReport, error) { responses, err := images.Remove(p.Context, []string{nameOrID}, &images.RemoveOptions{}) if err != nil { return nil, err[0] } return &RemoveImageReport{ IDs: responses.Deleted, }, nil } func (p *PodmanClient) RunContainer(nameOrID string, options RunOptions) (*RunContainerReport, error) { if _, err := p.PullImage(nameOrID); err != nil { return nil, err } spec := specgen.NewSpecGenerator(nameOrID, false) if !reflect.DeepEqual(options, RunOptions{}) { spec.Terminal = options.Tty if len(options.Entrypoint) > 0 { spec.ContainerBasicConfig.Entrypoint = options.Entrypoint } if len(options.Cmd) > 0 { spec.ContainerBasicConfig.Command = options.Cmd } } container, err := containers.CreateWithSpec(p.Context, spec, &containers.CreateOptions{}) if err != nil { return nil, err } if err := containers.Start(p.Context, container.ID, &containers.StartOptions{}); err != nil { return nil, err } return &RunContainerReport{ ID: container.ID, }, nil } func (p *PodmanClient) InspectContainer(nameOrID string) (*InspectContainerReport, error) { response, err := containers.Inspect(p.Context, nameOrID, &containers.InspectOptions{}) if err != nil { return nil, err } return &InspectContainerReport{ InspectContainerData: response, }, nil } func (d *PodmanClient) ListContainers() (*ListContainerReport, error) { containerList, err := containers.List(d.Context, &containers.ListOptions{}) if err != nil { return nil, err } return &ListContainerReport{ Containers: containerList, }, nil } func (p *PodmanClient) RemoveContainer(nameOrID string) error { var forceDelete bool = true return containers.Remove(p.Context, nameOrID, &containers.RemoveOptions{ Force: &forceDelete, }) }
package main import( "fmt" log "github.com/Sirupsen/logrus" "github.com/codegangsta/cli" "gopkg.in/yaml.v2" "github.com/armon/consul-api" "os" "io/ioutil" ) //Connect establishes a connection to local running consul agent. //Currently only localhost:8500 is supported. func Connect() *consulapi.Client { client, err := consulapi.NewClient(consulapi.DefaultConfig()) if err != nil { log.Fatal(err) } return client } type BackupKV struct { Key, Value string } func main() { app := cli.NewApp() app.Name = "consul-backup" app.Usage = "backup consul kv database!" app.Version = "0.0.1" app.Commands = []cli.Command{ { Name: "version", ShortName: "v", Usage: "consul-backup version", Action: func(c *cli.Context) { fmt.Println(app.Version) }, }, { Name: "backup", ShortName: "b", Usage: "backup kv database", Flags: []cli.Flag{ cli.StringFlag{ Name: "file", Value: "backup.yaml", Usage: "backup file", }, cli.StringFlag{ Name: "root", Value: "", Usage: "Root key. Leave blank for all keys", }, }, Action: func(c *cli.Context) { client := Connect() kvp, _, _ := client.KV().List(c.String("root"),nil) var kv []BackupKV for _, a := range kvp { kv = append(kv, BackupKV{Key: a.Key, Value: string(a.Value)}) } d, err := yaml.Marshal(kv) if err != nil { log.Fatalf("error: %v", err) } fmt.Printf("--- t dump:\n%s\n\n", string(d)) ioutil.WriteFile(c.String("file"), d, 0644) }, }, { Name: "restore", ShortName: "r", Usage: "restore kv database", Flags: []cli.Flag{ cli.StringFlag{ Name: "file", Value: "backup.yaml", Usage: "backup file", }, }, Action: func(c *cli.Context) { client := Connect() var kv []BackupKV vals,err:=ioutil.ReadFile(c.String("file")) if err != nil { log.Fatalf("error: %v", err) } err = yaml.Unmarshal(vals, &kv) if err != nil { log.Fatalf("error: %v", err) } kvc := client.KV() for _, a := range kv { fmt.Printf("%s\n%s\n", a.Key, string(a.Value)) _, err := kvc.Put(&consulapi.KVPair{Key: a.Key, Value: []byte(a.Value) }, nil) if err != nil { log.Fatalf("error: %v", err) } } }, }, } app.Run(os.Args) }
package odoo import ( "fmt" ) // BaseImportTestsModelsM2ORequired represents base_import.tests.models.m2o.required model. type BaseImportTestsModelsM2ORequired struct { LastUpdate *Time `xmlrpc:"__last_update,omptempty"` CreateDate *Time `xmlrpc:"create_date,omptempty"` CreateUid *Many2One `xmlrpc:"create_uid,omptempty"` DisplayName *String `xmlrpc:"display_name,omptempty"` Id *Int `xmlrpc:"id,omptempty"` Value *Many2One `xmlrpc:"value,omptempty"` WriteDate *Time `xmlrpc:"write_date,omptempty"` WriteUid *Many2One `xmlrpc:"write_uid,omptempty"` } // BaseImportTestsModelsM2ORequireds represents array of base_import.tests.models.m2o.required model. type BaseImportTestsModelsM2ORequireds []BaseImportTestsModelsM2ORequired // BaseImportTestsModelsM2ORequiredModel is the odoo model name. const BaseImportTestsModelsM2ORequiredModel = "base_import.tests.models.m2o.required" // Many2One convert BaseImportTestsModelsM2ORequired to *Many2One. func (btmmr *BaseImportTestsModelsM2ORequired) Many2One() *Many2One { return NewMany2One(btmmr.Id.Get(), "") } // CreateBaseImportTestsModelsM2ORequired creates a new base_import.tests.models.m2o.required model and returns its id. func (c *Client) CreateBaseImportTestsModelsM2ORequired(btmmr *BaseImportTestsModelsM2ORequired) (int64, error) { ids, err := c.CreateBaseImportTestsModelsM2ORequireds([]*BaseImportTestsModelsM2ORequired{btmmr}) if err != nil { return -1, err } if len(ids) == 0 { return -1, nil } return ids[0], nil } // CreateBaseImportTestsModelsM2ORequired creates a new base_import.tests.models.m2o.required model and returns its id. func (c *Client) CreateBaseImportTestsModelsM2ORequireds(btmmrs []*BaseImportTestsModelsM2ORequired) ([]int64, error) { var vv []interface{} for _, v := range btmmrs { vv = append(vv, v) } return c.Create(BaseImportTestsModelsM2ORequiredModel, vv) } // UpdateBaseImportTestsModelsM2ORequired updates an existing base_import.tests.models.m2o.required record. func (c *Client) UpdateBaseImportTestsModelsM2ORequired(btmmr *BaseImportTestsModelsM2ORequired) error { return c.UpdateBaseImportTestsModelsM2ORequireds([]int64{btmmr.Id.Get()}, btmmr) } // UpdateBaseImportTestsModelsM2ORequireds updates existing base_import.tests.models.m2o.required records. // All records (represented by ids) will be updated by btmmr values. func (c *Client) UpdateBaseImportTestsModelsM2ORequireds(ids []int64, btmmr *BaseImportTestsModelsM2ORequired) error { return c.Update(BaseImportTestsModelsM2ORequiredModel, ids, btmmr) } // DeleteBaseImportTestsModelsM2ORequired deletes an existing base_import.tests.models.m2o.required record. func (c *Client) DeleteBaseImportTestsModelsM2ORequired(id int64) error { return c.DeleteBaseImportTestsModelsM2ORequireds([]int64{id}) } // DeleteBaseImportTestsModelsM2ORequireds deletes existing base_import.tests.models.m2o.required records. func (c *Client) DeleteBaseImportTestsModelsM2ORequireds(ids []int64) error { return c.Delete(BaseImportTestsModelsM2ORequiredModel, ids) } // GetBaseImportTestsModelsM2ORequired gets base_import.tests.models.m2o.required existing record. func (c *Client) GetBaseImportTestsModelsM2ORequired(id int64) (*BaseImportTestsModelsM2ORequired, error) { btmmrs, err := c.GetBaseImportTestsModelsM2ORequireds([]int64{id}) if err != nil { return nil, err } if btmmrs != nil && len(*btmmrs) > 0 { return &((*btmmrs)[0]), nil } return nil, fmt.Errorf("id %v of base_import.tests.models.m2o.required not found", id) } // GetBaseImportTestsModelsM2ORequireds gets base_import.tests.models.m2o.required existing records. func (c *Client) GetBaseImportTestsModelsM2ORequireds(ids []int64) (*BaseImportTestsModelsM2ORequireds, error) { btmmrs := &BaseImportTestsModelsM2ORequireds{} if err := c.Read(BaseImportTestsModelsM2ORequiredModel, ids, nil, btmmrs); err != nil { return nil, err } return btmmrs, nil } // FindBaseImportTestsModelsM2ORequired finds base_import.tests.models.m2o.required record by querying it with criteria. func (c *Client) FindBaseImportTestsModelsM2ORequired(criteria *Criteria) (*BaseImportTestsModelsM2ORequired, error) { btmmrs := &BaseImportTestsModelsM2ORequireds{} if err := c.SearchRead(BaseImportTestsModelsM2ORequiredModel, criteria, NewOptions().Limit(1), btmmrs); err != nil { return nil, err } if btmmrs != nil && len(*btmmrs) > 0 { return &((*btmmrs)[0]), nil } return nil, fmt.Errorf("base_import.tests.models.m2o.required was not found with criteria %v", criteria) } // FindBaseImportTestsModelsM2ORequireds finds base_import.tests.models.m2o.required records by querying it // and filtering it with criteria and options. func (c *Client) FindBaseImportTestsModelsM2ORequireds(criteria *Criteria, options *Options) (*BaseImportTestsModelsM2ORequireds, error) { btmmrs := &BaseImportTestsModelsM2ORequireds{} if err := c.SearchRead(BaseImportTestsModelsM2ORequiredModel, criteria, options, btmmrs); err != nil { return nil, err } return btmmrs, nil } // FindBaseImportTestsModelsM2ORequiredIds finds records ids by querying it // and filtering it with criteria and options. func (c *Client) FindBaseImportTestsModelsM2ORequiredIds(criteria *Criteria, options *Options) ([]int64, error) { ids, err := c.Search(BaseImportTestsModelsM2ORequiredModel, criteria, options) if err != nil { return []int64{}, err } return ids, nil } // FindBaseImportTestsModelsM2ORequiredId finds record id by querying it with criteria. func (c *Client) FindBaseImportTestsModelsM2ORequiredId(criteria *Criteria, options *Options) (int64, error) { ids, err := c.Search(BaseImportTestsModelsM2ORequiredModel, criteria, options) if err != nil { return -1, err } if len(ids) > 0 { return ids[0], nil } return -1, fmt.Errorf("base_import.tests.models.m2o.required was not found with criteria %v and options %v", criteria, options) }
package pluginregistry import "github.com/janoszen/exoscale-account-wiper/plugin" func New() *PluginRegistry { return &PluginRegistry{ plugins: []plugin.DeletePlugin{}, pluginsByKey: make(map[string]plugin.DeletePlugin), enabledPlugins: make(map[string]bool), } }
package main import ( "fmt" "math" "sort" "strings" "testing" ) func TestPackageProblem(t *testing.T) { for k, v := range map[string]string{ "81 : (1,53.38,$45) (2,88.62,$98) (3,78.48,$3) (4,72.30,$76) (5,30.18,$9) (6,46.34,$48)": "4", "8 : (1,15.3,$34)": "-", "75 : (1,85.31,$29) (2,14.55,$74) (3,3.98,$16) (4,26.24,$55) (5,63.69,$52) (6,76.25,$75) (7,60.02,$74) (8,93.18,$35) (9,89.95,$78)": "2,7", "56 : (1,90.72,$13) (2,33.80,$40) (3,43.15,$10) (4,37.97,$16) (5,46.81,$36) (6,48.77,$79) (7,81.80,$45) (8,19.36,$79) (9,6.76,$64)": "8,9"} { if r := packageProblem(k); r != v { t.Errorf("failed: packageProblem %s is %s, got %s", k, v, r) } } } type item struct { id, value, weight int64 } type items []item func (slice items) Len() int { return len(slice) } func (slice items) Less(i, j int) bool { return (float64(slice[i].value) / float64(slice[i].weight)) > (float64(slice[j].value) / float64(slice[j].weight)) } func (slice items) Swap(i, j int) { slice[i], slice[j] = slice[j], slice[i] } type bäst []int64 func (slice bäst) Len() int { return len(slice) } func (slice bäst) Less(i, j int) bool { return slice[i] < slice[j] } func (slice bäst) Swap(i, j int) { slice[i], slice[j] = slice[j], slice[i] } type task struct { level, bound, value, room int64 stuff []int64 } func boundary(room, value int64, stuff []item, itl []int64) (t, b int64, tl []int64) { totalRoom, f := room, false t, b = value, value for _, i := range stuff { if i.weight > totalRoom { continue } else if i.weight > room { if !f { b += int64(float64(i.value*room) / float64(i.weight)) f = true } continue } room, t, tl = room-i.weight, t+i.value, append(tl, i.id) if !f { b += i.value } if room == 0 { break } } return t, b, tl } func packageProblem(q string) string { var ( ks, s0, s2 int64 s1 float64 ) ts := strings.Split(q, "(") n := len(ts) - 1 fmt.Sscanf(ts[0], "%d : ", &ks) var ( stuff []item minWeight int64 = math.MaxInt64 ) n0 := n for i := 1; i <= n0; i++ { fmt.Sscanf(ts[i], "%d,%f,$%d)", &s0, &s1, &s2) if s1 > float64(ks) { n-- } else { stuff = append(stuff, item{s0, s2, int64(s1 * 100)}) } if s2 < minWeight { minWeight = s2 } } if n == 0 { return "-" } sort.Sort(items(stuff)) ks *= 100 t, bound, tl := boundary(ks, int64(0), stuff[1:], []int64{}) best, bestl := t, make([]int64, len(tl)) todo := []task{task{bound: bound, room: ks}} copy(bestl, tl) t, bound, tl = boundary(ks, int64(0), stuff, []int64{}) if t > best { best, bestl = t, make([]int64, len(tl)) copy(bestl, tl) } if stuff[0].weight <= ks { todo = append(todo, task{int64(0), bound, stuff[0].value, ks - stuff[0].weight, []int64{stuff[0].id}}) } for len(todo) > 0 { curr := todo[len(todo)-1] todo = todo[:len(todo)-1] if curr.bound <= best { continue } if curr.level == int64(n-2) { if curr.room >= stuff[n-1].weight { t = stuff[n-1].value + curr.value if t > best { best, bestl = t, make([]int64, len(curr.stuff)+1) copy(bestl, append(curr.stuff, stuff[n-1].id)) } } if curr.value > best { best, bestl = curr.value, make([]int64, len(curr.stuff)) copy(bestl, curr.stuff) } continue } t, bound, tl = boundary(curr.room, curr.value, stuff[curr.level+1:], curr.stuff) if t > best { tl = append(curr.stuff, tl...) best, bestl = t, make([]int64, len(tl)) copy(bestl, tl) } if bound > best && curr.room > minWeight { todo = append(todo, task{curr.level + 1, bound, curr.value, curr.room, curr.stuff}) } if curr.room >= stuff[curr.level+1].weight+minWeight { todo = append(todo, task{curr.level + 1, curr.bound, curr.value + stuff[curr.level+1].value, curr.room - stuff[curr.level+1].weight, append(curr.stuff, stuff[curr.level+1].id)}) } } var st []string sort.Sort(bäst(bestl)) for i := int64(1); i <= int64(n0); i++ { if len(bestl) > 0 && i == bestl[0] { st = append(st, fmt.Sprint(i)) bestl = bestl[1:] } } return strings.Join(st, ",") }
package geometry import ( "math" "testing" ) const ( x = 4.5 y = 2.4 z = 3.4 x2 = 8.7 y2 = 1.4 z2 = 3.1 ) func CreateVector() *Vector { v := new(Vector) v.x = x v.y = y v.z = z return v } func CreateVector2() *Vector { v := new(Vector) v.x = x2 v.y = y2 v.z = z2 return v } func TestCreateVector(t *testing.T) { v := CreateVector() if v.x != x || v.y != y || v.z != z { t.Fatal("Vector values not assigned correctly") } } func TestCloneVector(t *testing.T) { v := CreateVector() v2 := v.Clone() if v2.x != x || v2.y != y || v2.z != z { t.Fatalf("Vector values not copied correctly %f - %f, %f - %f, %f - %f", v2.x, x, v2.y, y, v2.z, z) } } func TestCloneVectorAndReAssign(t *testing.T) { v := CreateVector() v2 := v.Clone() v.x = x2 v.y = y2 v.z = z2 if v == v2 { t.Fatalf("Vector addresses are the same: %v - %v", v, v2) } if v.x != x2 || v2.x != x || v.y != y2 || v2.y != y || v.z != z2 || v2.z != z { t.Fatalf("Vector values not copied correctly and assigned %f - %f, %f - %f, %f - %f", v.x, v2.x, v.y, v2.y, v.z, v2.z) } } func TestVectorMagnitude(t *testing.T) { v := CreateVector() m := math.Sqrt(x*x + y*y + z*z) magnitude := v.Magnitude() if magnitude != m { t.Fatalf("Magnitude calculated incorrectly %f - %f", magnitude, m) } } func TestVectorNormalisation(t *testing.T) { v := CreateVector() v2, _ := v.Normalize() if v2.x != 0.7341620244157445 || v2.y != 0.3915530796883971 || v2.z != 0.5547001962252291 { t.Fatal("Could not normalize the Vector correctly", v2.x, v2.y, v2.z) } if v.x != x || v.y != y || v.z != z { t.Fatal("Original Vector shouldn't change when performing a normalisation") } } func TestVectorDotProduct(t *testing.T) { v := CreateVector() v2 := CreateVector2() dotProduct := v.DotProduct(v2) dotProductReverse := v2.DotProduct(v) if dotProduct != dotProductReverse || dotProduct != 53.05 { t.Fatalf("Dot product calculated incorrectly", dotProduct) } } func TestVectorCrossProduct(t *testing.T) { v := CreateVector() v2 := CreateVector2() v3 := v.CrossProduct(v2) if v3.x != 2.6799999999999997 || v3.y != 15.629999999999997 || v3.z != -14.579999999999998 { t.Fatalf("Cross product calculated incorrectly", v3) } } func TestVectorAdd(t *testing.T) { v := CreateVector() v2 := CreateVector2() v3 := v.Add(v2) if v3.x != 13.2 || v3.y != 3.8 || v3.z != 6.5 { t.Fatalf("Could not add two Vectors together", v3) } }
package operations // This file was generated by the swagger tool. // Editing this file might prove futile when you re-run the swagger generate command import ( "fmt" "github.com/go-openapi/runtime" strfmt "github.com/go-openapi/strfmt" ) // GetJdbcDriversHipermediaRepresentationReader is a Reader for the GetJdbcDriversHipermediaRepresentation structure. type GetJdbcDriversHipermediaRepresentationReader struct { formats strfmt.Registry } // ReadResponse reads a server response into the recieved o. func (o *GetJdbcDriversHipermediaRepresentationReader) ReadResponse(response runtime.ClientResponse, consumer runtime.Consumer) (interface{}, error) { switch response.Code() { case 200: result := NewGetJdbcDriversHipermediaRepresentationOK() if err := result.readResponse(response, consumer, o.formats); err != nil { return nil, err } return result, nil default: return nil, runtime.NewAPIError("unknown error", response, response.Code()) } } // NewGetJdbcDriversHipermediaRepresentationOK creates a GetJdbcDriversHipermediaRepresentationOK with default headers values func NewGetJdbcDriversHipermediaRepresentationOK() *GetJdbcDriversHipermediaRepresentationOK { return &GetJdbcDriversHipermediaRepresentationOK{} } /*GetJdbcDriversHipermediaRepresentationOK handles this case with default header values. GetJdbcDriversHipermediaRepresentationOK get jdbc drivers hipermedia representation o k */ type GetJdbcDriversHipermediaRepresentationOK struct { } func (o *GetJdbcDriversHipermediaRepresentationOK) Error() string { return fmt.Sprintf("[GET /jdbcDrivers][%d] getJdbcDriversHipermediaRepresentationOK ", 200) } func (o *GetJdbcDriversHipermediaRepresentationOK) readResponse(response runtime.ClientResponse, consumer runtime.Consumer, formats strfmt.Registry) error { return nil }
// Copyright 2021 The ChromiumOS Authors // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package ash import ( "context" "chromiumos/tast/local/chrome" ) // ClipboardTextData returns clipboard text data. func ClipboardTextData(ctx context.Context, tconn *chrome.TestConn) (string, error) { var data string if err := tconn.Call(ctx, &data, `tast.promisify(chrome.autotestPrivate.getClipboardTextData)`); err != nil { return "", err } return data, nil } // SetClipboard forcibly sets the clipboard to the given data. func SetClipboard(ctx context.Context, tconn *chrome.TestConn, data string) error { return tconn.Call(ctx, nil, `tast.promisify(chrome.autotestPrivate.setClipboardTextData)`, data) }
package proteus // Wrapper is now a no-op func that exists for backward compatibility. It is now deprecated and will be removed in the // 1.0 release of proteus. func Wrap(sqle Wrapper) Wrapper { return sqle }
package audit import ( "fmt" "math" "net/http" "net/http/httptest" "net/url" "sort" "strconv" "strings" "testing" "github.com/google/uuid" "github.com/jrapoport/gothic/config" "github.com/jrapoport/gothic/core/audit" "github.com/jrapoport/gothic/core/context" "github.com/jrapoport/gothic/hosts/rest" "github.com/jrapoport/gothic/models/auditlog" "github.com/jrapoport/gothic/models/types" "github.com/jrapoport/gothic/models/types/key" "github.com/jrapoport/gothic/store" "github.com/jrapoport/gothic/test/thttp" "github.com/jrapoport/gothic/test/tsrv" "github.com/segmentio/encoding/json" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" ) type TestCase struct { act auditlog.Action uid uuid.UUID fields types.Map logID uint } var ( testUID = uuid.New() testBook = uuid.New() ) func SetupTestLogs(t *testing.T, c *config.Config, uid, bid uuid.UUID) []TestCase { var tests = []TestCase{ {auditlog.Startup, uuid.New(), nil, 0}, {auditlog.Shutdown, uuid.New(), nil, 0}, {auditlog.Signup, uuid.New(), nil, 0}, {auditlog.ConfirmSent, uuid.New(), nil, 0}, {auditlog.Confirmed, uuid.New(), nil, 0}, {auditlog.Granted, uuid.New(), nil, 0}, {auditlog.Revoked, uuid.New(), nil, 0}, {auditlog.RevokedAll, uuid.New(), nil, 0}, {auditlog.Login, uuid.New(), nil, 0}, {auditlog.Logout, uuid.New(), nil, 0}, {auditlog.Password, uuid.New(), nil, 0}, {auditlog.Email, uuid.New(), nil, 0}, {auditlog.Updated, uuid.New(), nil, 0}, } for _, test := range tests { tst := test tst.uid = uid tests = append(tests, tst) } var idx int for _, bk := range []interface{}{ "thing2", bid.String(), uuid.New().String(), } { for _, test := range tests { test.fields = types.Map{ "dr_suess": "thing1", "book": bk, } if idx < 50 { test.fields["sorted"] = "yes" } idx++ tests = append(tests, test) } } ctx := context.Background() ctx.SetProvider(c.Provider()) ctx.SetIPAddress("127.0.0.1") conn, err := store.Dial(c, nil) require.NoError(t, err) err = conn.Transaction(func(tx *store.Connection) error { for i, test := range tests { le, err := audit.CreateLogEntry(ctx, tx, test.act, test.uid, test.fields) require.NoError(t, err) tests[i].logID = le.ID } return nil }) require.NoError(t, err) return tests } type AuditServerTestSuite struct { suite.Suite srv *auditServer conn *store.Connection tests []TestCase uid uuid.UUID } func TestAuditLogs(t *testing.T) { t.Parallel() ts := &AuditServerTestSuite{} suite.Run(t, ts) } func (ts *AuditServerTestSuite) SetupSuite() { s, _ := tsrv.RESTServer(ts.T(), false) ts.srv = newAuditServer(s) conn, err := store.Dial(ts.srv.Config(), nil) ts.Require().NoError(err) ts.conn = conn ts.tests = SetupTestLogs(ts.T(), ts.srv.Config(), testUID, testBook) } func (ts *AuditServerTestSuite) searchAuditLogs(ep string, v url.Values) *httptest.ResponseRecorder { r := thttp.Request(ts.T(), http.MethodGet, ep, "", v, nil) w := httptest.NewRecorder() ts.srv.SearchAuditLogs(w, r) return w } func (ts *AuditServerTestSuite) TestErrors() { // invalid req r := thttp.Request(ts.T(), http.MethodGet, Audit, "", nil, []byte("\n")) w := httptest.NewRecorder() ts.srv.SearchAuditLogs(w, r) ts.NotEqual(http.StatusOK, w.Code) // bad paging (we handle this now) r = thttp.Request(ts.T(), http.MethodGet, Audit, "", url.Values{ key.Page: []string{"\n"}, }, nil) w = httptest.NewRecorder() ts.srv.SearchAuditLogs(w, r) ts.Equal(http.StatusOK, w.Code) } func (ts *AuditServerTestSuite) TestPageHeaders() { res := ts.searchAuditLogs(Audit, nil) ts.Equal(http.StatusOK, res.Code) var logs []interface{} err := json.NewDecoder(res.Body).Decode(&logs) ts.NoError(err) ts.Len(logs, store.MaxPageSize) e := logs[0].(map[string]interface{}) f := e[key.Fields].(map[string]interface{}) id := uint(e["ID"].(float64)) le, err := audit.GetLogEntry(ts.conn, id) ts.Require().NoError(err) ts.Equal(id, le.ID) ts.Equal(f["dr_suess"], le.Fields["dr_suess"]) pn := res.Header().Get(rest.PageNumber) ts.Equal("1", pn) pc := res.Header().Get(rest.PageCount) cnt := int(math.Ceil(float64(len(ts.tests)) / float64(store.MaxPageSize))) testCount := strconv.Itoa(cnt) ts.Equal(testCount, pc) sz := res.Header().Get(rest.PageSize) testLen := strconv.Itoa(store.MaxPageSize) ts.Equal(testLen, sz) tot := res.Header().Get(rest.PageTotal) // +1 because of audit.LogStartup testTotal := strconv.Itoa(len(ts.tests) + 1) ts.Equal(testTotal, tot) } func (ts *AuditServerTestSuite) TestPageLinks() { startLink := func() string { return fmt.Sprintf("%s?%s=1&%s=%d", Audit, key.Page, key.PerPage, store.MaxPageSize) } var nextLink = startLink() for { if nextLink == "" { break } u, err := url.Parse(nextLink) ts.Require().NoError(err) nextLink = "" u.Scheme = "" u.Host = "" uri := u.String() res := ts.searchAuditLogs(uri, nil) ts.Equal(http.StatusOK, res.Code) var logs []interface{} err = json.NewDecoder(res.Body).Decode(&logs) ts.Require().NoError(err) sz := res.Header().Get(rest.PageSize) cnt, err := strconv.Atoi(sz) ts.Require().NoError(err) ts.Len(logs, cnt) l := res.Header().Get(rest.Link) links := strings.Split(l, ",") if len(links) <= 0 { break } for _, lnk := range links { next := `rel="next"` if strings.HasSuffix(lnk, next) { nextLink = strings.ReplaceAll(lnk, next, "") nextLink = strings.Trim(nextLink, " <>;") break } } } } func (ts *AuditServerTestSuite) TestSearchFilters() { tests := []struct { v url.Values comp func(e map[string]interface{}) }{ { url.Values{ key.UserID: []string{testUID.String()}, }, func(e map[string]interface{}) { uid := e[key.UserID].(string) ts.Equal(testUID.String(), uid) }, }, { url.Values{ key.Action: []string{auditlog.Startup.String()}, }, func(e map[string]interface{}) { act := e[key.Action].(string) ts.Equal(auditlog.Startup.String(), act) }, }, { url.Values{ "dr_suess": []string{"thing1"}, }, func(e map[string]interface{}) { f := e[key.Fields].(map[string]interface{}) ts.Equal("thing1", f["dr_suess"]) }, }, { url.Values{ key.Type: []string{auditlog.Account.String()}, "dr_suess": []string{"thing1"}, }, func(e map[string]interface{}) { typ := auditlog.Type(e[key.Type].(float64)) ts.Equal(auditlog.Account, typ) f := e[key.Fields].(map[string]interface{}) ts.Equal("thing1", f["dr_suess"]) }, }, { url.Values{ "dr_suess": []string{"thing1"}, "book": []string{testBook.String()}, }, func(e map[string]interface{}) { f := e[key.Fields].(map[string]interface{}) ts.Equal("thing1", f["dr_suess"]) ts.Equal(testBook.String(), f["book"]) }, }, } for _, test := range tests { res := ts.searchAuditLogs(Audit, test.v) ts.Equal(http.StatusOK, res.Code) var logs []interface{} err := json.NewDecoder(res.Body).Decode(&logs) ts.NoError(err) ts.Greater(len(logs), 0) for _, log := range logs { e := log.(map[string]interface{}) test.comp(e) } } } func (ts *AuditServerTestSuite) TestSearchSort() { // search Ascending v := url.Values{ key.Sort: []string{string(store.Ascending)}, "dr_suess": []string{"thing1"}, "sorted": []string{"yes"}, } var logs []interface{} res := ts.searchAuditLogs(Audit, v) ts.Equal(http.StatusOK, res.Code) err := json.NewDecoder(res.Body).Decode(&logs) ts.NoError(err) ts.Greater(len(logs), 0) testIdx := make([]int, len(logs)) for i, log := range logs { e := log.(map[string]interface{}) f := e[key.Fields].(map[string]interface{}) ts.Equal("thing1", f["dr_suess"]) ts.Equal("yes", f["sorted"]) testIdx[i] = int(e["ID"].(float64)) } // search Descending v[key.Sort] = []string{string(store.Descending)} res = ts.searchAuditLogs(Audit, v) ts.Equal(http.StatusOK, res.Code) err = json.NewDecoder(res.Body).Decode(&logs) ts.NoError(err) ts.Greater(len(logs), 0) ts.Require().Len(logs, len(testIdx)) descIdx := make([]int, len(logs)) for i, log := range logs { e := log.(map[string]interface{}) f := e[key.Fields].(map[string]interface{}) ts.Equal("thing1", f["dr_suess"]) ts.Equal("yes", f["sorted"]) descIdx[i] = int(e["ID"].(float64)) } // reverse the indexes sort.Ints(descIdx) ts.Equal(testIdx, descIdx) }
package store import ( "context" "errors" "testing" "time" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws/request" "github.com/aws/aws-sdk-go/service/ssm/ssmiface" "github.com/aws/aws-sdk-go/service/ssm" ) var ( past, _ = time.Parse("Mon, 01/02/06, 03:04PM", "Thu, 05/19/11, 10:47PM") ) func TestParamStoreAttributeFuncs(t *testing.T) { t.Run("New", func(t *testing.T) { ps := New() if ps == nil { t.Fatalf("Unable to create param store") } }) t.Run("Retries", func(t *testing.T) { ps := New() ps.Retries(1) if ps.RequestRetries != 1 { t.Fatalf("Request Retries isn't 1") } }) t.Run("Timeout", func(t *testing.T) { ps := New() ps.Timeout(time.Second) if ps.RequestTimeout != time.Second { t.Fatalf("Request Retries isn't 1") } }) } func TestParam(t *testing.T) { t.Run("Get Param Success", func(t *testing.T) { var ( name = "foo" value = "bar" ) mockClient := &mockSSMClient{} mockClient.paramFn = func(ctx context.Context, input *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { param := &ssm.Parameter{ Name: aws.String(name), Value: aws.String(value), } out := &ssm.GetParameterOutput{ Parameter: param, } return out, nil } ps := New() ps.client = mockClient out, _ := ps.Param(name) outValue, err := out.StringValue() if err != nil { t.Fatalf("Error returning value: %v", err) } if out.Name != name && outValue != value { t.Fatalf("Invalid parameter returned") } }) t.Run("Get Param Error", func(t *testing.T) { var ( name = "foo" ) mockClient := &mockSSMClient{} mockClient.paramFn = func(ctx context.Context, input *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { return nil, errors.New("Unable to get param") } ps := New() ps.client = mockClient _, err := ps.Param(name) if err == nil { t.Fatalf("We should have had an error") } }) t.Run("Get Param Error Timeout", func(t *testing.T) { var ( name = "foo" ) mockClient := &mockSSMClient{} mockClient.paramFn = func(ctx context.Context, input *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { select { case <-ctx.Done(): return nil, ctx.Err() } } ps := New() ps.client = mockClient _, err := ps.Param(name) if err == nil { t.Fatalf("We should have had an error") } }) t.Run("Get Param Refresh", func(t *testing.T) { var ( name = "foo" value = "bar" newValue = "baz" ) mockClient := &mockSSMClient{} mockClient.paramFn = func(ctx context.Context, input *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { param := &ssm.Parameter{ Name: aws.String(name), Value: aws.String(value), } out := &ssm.GetParameterOutput{ Parameter: param, } return out, nil } ps := New() ps.client = mockClient out, _ := ps.Param(name) out.lastRefresh = past mockClient.paramFn = func(ctx context.Context, input *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) { param := &ssm.Parameter{ Name: aws.String(name), Value: aws.String(newValue), } out := &ssm.GetParameterOutput{ Parameter: param, } return out, nil } outValue, err := out.StringValue() if err != nil { t.Fatalf("Error returning value: %v", err) } if out.Name != name && outValue != newValue { t.Fatalf("Invalid parameter returned") } }) } func TestParamsByPath(t *testing.T) { var ( paramPath = "/aws/ssm/param" parameters = []*ssm.Parameter{ &ssm.Parameter{ Name: aws.String("/aws/ssm/param"), Value: aws.String("foo"), }, } ) t.Run("Success", func(t *testing.T) { mockClient := &mockSSMClient{} mockClient.paramsByPathFn = func(ctx context.Context, input *ssm.GetParametersByPathInput) (*ssm.GetParametersByPathOutput, error) { out := &ssm.GetParametersByPathOutput{ Parameters: parameters, } return out, nil } ps := New() ps.client = mockClient out, err := ps.ParamsByPath(paramPath, nil) if err != nil { t.Fatalf("error: %v", err) } if len(out) != 1 { t.Fatalf("Not enough parameters returned") } if out[0].Name != paramPath { t.Fatalf("Wrong param path found") } }) t.Run("Error", func(t *testing.T) { mockClient := &mockSSMClient{} mockClient.paramsByPathFn = func(ctx context.Context, input *ssm.GetParametersByPathInput) (*ssm.GetParametersByPathOutput, error) { return nil, errors.New("an error") } ps := New() ps.client = mockClient _, err := ps.ParamsByPath(paramPath, nil, nil) if err == nil { t.Fatalf("no error: %v", err) } }) t.Run("Error No Params", func(t *testing.T) { mockClient := &mockSSMClient{} mockClient.paramsByPathFn = func(ctx context.Context, input *ssm.GetParametersByPathInput) (*ssm.GetParametersByPathOutput, error) { out := &ssm.GetParametersByPathOutput{} return out, nil } ps := New() ps.client = mockClient _, err := ps.ParamsByPath(paramPath, nil, nil) if err == nil { t.Fatalf("no error: %v", err) } }) } func TestPath(t *testing.T) { if Path("foo", "bar") != "/foo/bar" { t.Fatalf("Incorrect path returned") } } type mockSSMClient struct { ssmiface.SSMAPI paramFn func(ctx context.Context, input *ssm.GetParameterInput) (*ssm.GetParameterOutput, error) paramsByPathFn func(ctx context.Context, input *ssm.GetParametersByPathInput) (*ssm.GetParametersByPathOutput, error) } func (m *mockSSMClient) GetParameterWithContext(ctx aws.Context, input *ssm.GetParameterInput, opts ...request.Option) (*ssm.GetParameterOutput, error) { return m.paramFn(ctx, input) } func (m *mockSSMClient) GetParametersByPathWithContext(ctx aws.Context, input *ssm.GetParametersByPathInput, opts ...request.Option) (*ssm.GetParametersByPathOutput, error) { return m.paramsByPathFn(ctx, input) }
package routing import ( "allbooks/models" "fmt" "net/url" "strconv" "strings" ) const ( MaxPerPage = 200 ) func makeLink( context *CollectionContext, page uint64, perPage uint64, rel string, ) string { strPage := strconv.FormatUint(page, 10) strPerPage := strconv.FormatUint(perPage, 10) Url, err := url.Parse(Domain) if err != nil { panic(err) } Url.Path += context.CollectionName() parameters := url.Values{} for key, values := range context.Request().Form { if key != "page" && key != "perPage" { for _, value := range values { parameters.Add(key, value) } } } parameters.Add("page", strPage) parameters.Add("perPage", strPerPage) Url.RawQuery = parameters.Encode() return `<` + Url.String() + `>; rel="` + rel + `"` } func Pagination(action Action) Action { return func(context Context) { if context.Stop() { return } newContext := ToCollectionContext(context) page := ParseIntParam(context, "page", 1) perPage := ParseIntParam(context, "perPage", 10) newContext.SetPerPage(perPage) newContext.SetPage(page) count, lastPage := models.Count(newContext) if count == 0 { context.RespondWithError( 400, "Not Found", "Nothing was found for this request.", ) return } if page > lastPage { message := fmt.Sprintf( "The `page` request param must be lower or equal to %d", lastPage) details := fmt.Sprintf( "When `perPage` is passed as `%d`, the maximum possible page is "+ "`%d`, because the number of the results in the DB is `%d`", perPage, lastPage, count) context.RespondWithError(422, message, details) return } if perPage > MaxPerPage || perPage <= 0 { message := fmt.Sprintf( "The `perPage` request param must be lower or equal to %d and must "+ "be greater than zero", MaxPerPage) context.RespondWithError(422, message, message) return } action(newContext) links := make([]string, 0, 4) if page < lastPage { links = append(links, makeLink(newContext, page+1, perPage, "next")) links = append( links, makeLink(newContext, lastPage, perPage, "last")) } if page > 1 { links = append(links, makeLink(newContext, 1, perPage, "first")) links = append(links, makeLink(newContext, page-1, perPage, "prev")) } linkHeader := strings.Join(links[:], ", ") newContext.SetResponseHeader("Link", linkHeader) } }
// Copyright 2018 The gVisor Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package cmd import ( "flag" "fmt" "os" "testing" specs "github.com/opencontainers/runtime-spec/specs-go" "github.com/syndtr/gocapability/capability" "gvisor.dev/gvisor/pkg/log" "gvisor.dev/gvisor/pkg/test/testutil" "gvisor.dev/gvisor/runsc/config" "gvisor.dev/gvisor/runsc/container" "gvisor.dev/gvisor/runsc/specutils" ) func init() { log.SetLevel(log.Debug) if err := testutil.ConfigureExePath(); err != nil { panic(err.Error()) } } func checkProcessCaps(pid int, wantCaps *specs.LinuxCapabilities) error { curCaps, err := capability.NewPid2(pid) if err != nil { return fmt.Errorf("capability.NewPid2(%d) failed: %v", pid, err) } if err := curCaps.Load(); err != nil { return fmt.Errorf("unable to load capabilities: %v", err) } fmt.Printf("Capabilities (PID: %d): %v\n", pid, curCaps) for _, c := range allCapTypes { if err := checkCaps(c, curCaps, wantCaps); err != nil { return err } } return nil } func checkCaps(which capability.CapType, curCaps capability.Capabilities, wantCaps *specs.LinuxCapabilities) error { wantNames := getCaps(which, wantCaps) for name, c := range capFromName { want := specutils.ContainsStr(wantNames, name) got := curCaps.Get(which, c) if want != got { if want { return fmt.Errorf("capability %v:%s should be set", which, name) } return fmt.Errorf("capability %v:%s should NOT be set", which, name) } } return nil } func TestCapabilities(t *testing.T) { t.Run("directfs", func(t *testing.T) { testCapabilities(t, true) }) t.Run("lisafs", func(t *testing.T) { testCapabilities(t, false) }) } func testCapabilities(t *testing.T, directfs bool) { stop := testutil.StartReaper() defer stop() spec := testutil.NewSpecWithArgs("/bin/sleep", "10000") caps := []string{ "CAP_CHOWN", "CAP_SYS_PTRACE", // ptrace is added due to the platform choice. } spec.Process.Capabilities = &specs.LinuxCapabilities{ Permitted: caps, Bounding: caps, Effective: caps, Inheritable: caps, } conf := testutil.TestConfig(t) conf.DirectFS = directfs // Use --network=host to make sandbox use spec's capabilities. conf.Network = config.NetworkHost _, bundleDir, cleanup, err := testutil.SetupContainer(spec, conf) if err != nil { t.Fatalf("error setting up container: %v", err) } defer cleanup() // Create and start the container. args := container.Args{ ID: testutil.RandomContainerID(), Spec: spec, BundleDir: bundleDir, } c, err := container.New(conf, args) if err != nil { t.Fatalf("error creating container: %v", err) } defer c.Destroy() if err := c.Start(conf); err != nil { t.Fatalf("error starting container: %v", err) } wantSandboxCaps := spec.Process.Capabilities if directfs { // With directfs, the sandbox has additional capabilities. wantSandboxCaps = specutils.MergeCapabilities(wantSandboxCaps, directfsSandboxLinuxCaps) } // Check that sandbox and gofer have the proper capabilities. if err := checkProcessCaps(c.Sandbox.Getpid(), wantSandboxCaps); err != nil { t.Error(err) } if err := checkProcessCaps(c.GoferPid, goferCaps); err != nil { t.Error(err) } } func TestMain(m *testing.M) { flag.Parse() if err := specutils.MaybeRunAsRoot(); err != nil { fmt.Fprintf(os.Stderr, "Error running as root: %v", err) os.Exit(123) } os.Exit(m.Run()) }
package singleton import "testing" func TestSingleton(t *testing.T) { a := GetInstance() b := GetInstance() if a != b { t.Errorf("different") } }
// Copyright 2017 Zhang Peihao <zhangpeihao@gmail.com> package serialize import ( "bufio" "bytes" "io" "github.com/golang/glog" "github.com/zhangpeihao/zim/pkg/define" "github.com/zhangpeihao/zim/pkg/protocol" ) // ParseEngine 解析引擎函数 type ParseEngine interface { Parse(br *bufio.Reader) (cmd *protocol.Command, err error) Close() error } // Parser 解析器 type Parser struct { // reader bufio reader reader *bufio.Reader // engine 解析引擎 engine ParseEngine // probeByte 解析用字节 probeByte byte } // NewParser 新建解析器 func NewParser(r io.Reader) (parser *Parser) { return &Parser{ reader: bufio.NewReader(r), } } // ReadCommand 将数据写入解析器,如果能够解析出命令,则回调 func (parser *Parser) ReadCommand() (cmd *protocol.Command, err error) { var ( serializer *Serializer ok bool probeByte []byte ) if parser.engine == nil { // 重新构建 probeByte, err = parser.reader.Peek(1) if err != nil { return } if probeByte == nil || len(probeByte) == 0 { glog.Warningln("protocol::serialize::Parser::ReadFrom() probe byte is empty") return nil, io.EOF } serializer, ok = probeByteRegisters[probeByte[0]] if !ok { err = define.ErrUnsupportProtocol glog.Warningf("protocol::serialize::Parser::ReadFrom() Unsupport probe byte: 0X%2X\n", probeByte) return } parser.engine = serializer.NewParseEngine() } return parser.engine.Parse(parser.reader) } // Close 关闭 func (parser *Parser) Close() error { if parser.engine != nil { parser.engine.Close() parser.engine = nil } return nil } // Parse 通过字节解析 func Parse(data []byte) (cmd *protocol.Command, err error) { buf := bytes.NewBuffer(data) parser := NewParser(buf) return parser.ReadCommand() }
package zone import ( "github.com/pkg/errors" "github.com/kumahq/kuma/pkg/tokens/builtin/zoneingress" "github.com/kumahq/kuma/pkg/config/core/resources/store" "github.com/kumahq/kuma/pkg/core/runtime/component" "github.com/kumahq/kuma/pkg/kds/mux" kds_server "github.com/kumahq/kuma/pkg/kds/server" resources_k8s "github.com/kumahq/kuma/pkg/plugins/resources/k8s" k8s_model "github.com/kumahq/kuma/pkg/plugins/resources/k8s/native/pkg/model" "github.com/kumahq/kuma/pkg/core/resources/apis/system" "github.com/kumahq/kuma/pkg/core" "github.com/kumahq/kuma/pkg/core/resources/apis/mesh" "github.com/kumahq/kuma/pkg/core/resources/model" core_runtime "github.com/kumahq/kuma/pkg/core/runtime" kds_client "github.com/kumahq/kuma/pkg/kds/client" sync_store "github.com/kumahq/kuma/pkg/kds/store" "github.com/kumahq/kuma/pkg/kds/util" ) var ( kdsZoneLog = core.Log.WithName("kds-zone") // ProvidedTypes lists the resource types provided by the Zone // CP to the Global CP. ProvidedTypes = []model.ResourceType{ mesh.DataplaneInsightType, mesh.DataplaneType, mesh.ZoneIngressInsightType, mesh.ZoneIngressType, } // ConsumedTypes lists the resource types consumed from the // Global CP by the Zone CP. ConsumedTypes = []model.ResourceType{ mesh.CircuitBreakerType, mesh.DataplaneType, mesh.ExternalServiceType, mesh.FaultInjectionType, mesh.HealthCheckType, mesh.MeshType, mesh.ProxyTemplateType, mesh.RateLimitType, mesh.RetryType, mesh.TimeoutType, mesh.TrafficLogType, mesh.TrafficPermissionType, mesh.TrafficRouteType, mesh.TrafficTraceType, mesh.ZoneIngressType, system.ConfigType, system.GlobalSecretType, system.SecretType, } ) func Setup(rt core_runtime.Runtime) error { zone := rt.Config().Multizone.Zone.Name kdsServer, err := kds_server.New(kdsZoneLog, rt, ProvidedTypes, zone, rt.Config().Multizone.Zone.KDS.RefreshInterval, rt.KDSContext().ZoneProvidedFilter, false) if err != nil { return err } resourceSyncer := sync_store.NewResourceSyncer(kdsZoneLog, rt.ResourceStore()) kubeFactory := resources_k8s.NewSimpleKubeFactory() onSessionStarted := mux.OnSessionStartedFunc(func(session mux.Session) error { log := kdsZoneLog.WithValues("peer-id", session.PeerID()) log.Info("new session created") go func() { if err := kdsServer.StreamKumaResources(session.ServerStream()); err != nil { log.Error(err, "StreamKumaResources finished with an error") } }() sink := kds_client.NewKDSSink(log, ConsumedTypes, kds_client.NewKDSStream(session.ClientStream(), zone), Callbacks(rt, resourceSyncer, rt.Config().Store.Type == store.KubernetesStore, zone, kubeFactory), ) go func() { if err := sink.Start(session.Done()); err != nil { log.Error(err, "KDSSink finished with an error") } }() return nil }) muxClient := mux.NewClient( rt.Config().Multizone.Zone.GlobalAddress, zone, onSessionStarted, *rt.Config().Multizone.Zone.KDS, rt.Metrics(), rt.KDSContext().ZoneClientCtx, ) return rt.Add(component.NewResilientComponent(kdsZoneLog.WithName("mux-client"), muxClient)) } func Callbacks(rt core_runtime.Runtime, syncer sync_store.ResourceSyncer, k8sStore bool, localZone string, kubeFactory resources_k8s.KubeFactory) *kds_client.Callbacks { return &kds_client.Callbacks{ OnResourcesReceived: func(clusterID string, rs model.ResourceList) error { if k8sStore && rs.GetItemType() != system.ConfigType && rs.GetItemType() != system.SecretType && rs.GetItemType() != system.GlobalSecretType { // if type of Store is Kubernetes then we want to store upstream resources in dedicated Namespace. // KubernetesStore parses Name and considers substring after the last dot as a Namespace's Name. // System resources are not in the kubeFactory therefore we need explicit ifs for them kubeObject, err := kubeFactory.NewObject(rs.NewItem()) if err != nil { return errors.Wrap(err, "could not convert object") } if kubeObject.Scope() == k8s_model.ScopeNamespace { util.AddSuffixToNames(rs.GetItems(), "default") } } if rs.GetItemType() == mesh.DataplaneType { return syncer.Sync(rs, sync_store.PrefilterBy(func(r model.Resource) bool { return r.(*mesh.DataplaneResource).Spec.IsZoneIngress(localZone) })) } if rs.GetItemType() == mesh.ZoneIngressType { return syncer.Sync(rs, sync_store.PrefilterBy(func(r model.Resource) bool { return r.(*mesh.ZoneIngressResource).IsRemoteIngress(localZone) })) } if rs.GetItemType() == system.ConfigType { return syncer.Sync(rs, sync_store.PrefilterBy(func(r model.Resource) bool { return rt.KDSContext().Configs[r.GetMeta().GetName()] })) } if rs.GetItemType() == system.GlobalSecretType { return syncer.Sync(rs, sync_store.PrefilterBy(func(r model.Resource) bool { return r.GetMeta().GetName() == zoneingress.SigningKeyResourceKey().Name })) } return syncer.Sync(rs) }, } } func ConsumesType(typ model.ResourceType) bool { for _, consumedTyp := range ConsumedTypes { if consumedTyp == typ { return true } } return false }
package main import ( "strings" "time" "fyne.io/fyne/v2" "fyne.io/fyne/v2/container" "fyne.io/fyne/v2/widget" "github.com/sirupsen/logrus" ) func (c *Config) mGUI(m string) *fyne.Container { date := widget.NewEntry() date.SetText(time.Now().Format("2006-01-02")) song1box := widget.NewEntry() song1box.SetPlaceHolder("Song #1") song2box := widget.NewEntry() song2box.SetPlaceHolder("Song #2") song3box := widget.NewEntry() song3box.SetPlaceHolder("Song #3") fetchOtherMedia := widget.NewCheck("Fetch other media (pictures & videos)", func(f bool) { c.FetchOtherMedia = f c.writeConfigToFile() }) fetchOtherMedia.SetChecked(c.FetchOtherMedia) if c.AutoFetchMeetingData { if m == MM { song1box.Disabled() } song2box.Disabled() song3box.Disabled() fetchOtherMedia.Enable() } else { if m == MM { song1box.Enable() } song2box.Enable() song3box.Enable() fetchOtherMedia.Disable() } autoFetchMeetingData := widget.NewCheck("Automatically fetch meeting data", func(f bool) { c.AutoFetchMeetingData = f c.writeConfigToFile() if f { if m == MM { song1box.Disable() } song2box.Disable() song3box.Disable() fetchOtherMedia.Enable() } else { if m == MM { song1box.Enable() } song2box.Enable() song3box.Enable() fetchOtherMedia.Disable() } }) autoFetchMeetingData.SetChecked(c.AutoFetchMeetingData) playlistOption := widget.NewCheck("Create Playlist", func(p bool) { c.CreatePlaylist = p c.writeConfigToFile() }) playlistOption.SetChecked(c.CreatePlaylist) fetchButton := widget.NewButton("Fetch", func() { dateToSet, err := time.Parse("2006-01-02", date.Text) if err != nil { logrus.Fatal(err) } c.Date = WeekOf(dateToSet) c.SongsToGet = []string{song1box.Text, song2box.Text, song3box.Text} if err := c.fetchMeetingStuff(m); err == nil { fyne.CurrentApp().SendNotification(&fyne.Notification{ Title: "Meeting Downloader", Content: "SUCCESS!", }) } else { fyne.CurrentApp().SendNotification(&fyne.Notification{ Title: "Meeting Downloader", Content: "FAIL!", }) } // reset in case of subsequent runs c.Pictures = []file{} c.Videos = []video{} c.SongsToGet = []string{} c.SongsNames = []string{} }) mmBox := container.NewVBox( date, autoFetchMeetingData, fetchOtherMedia, song1box, song2box, song3box, playlistOption, fetchButton, c.Progress.ProgressBar, ) return mmBox } func (c *Config) settingsGUI() *fyne.Container { resPicker := widget.NewRadioGroup([]string{ RES240, RES360, RES480, RES720, }, func(res string) { c.Resolution = res }) resPicker.SetSelected(c.Resolution) targetDir := widget.NewEntry() targetDir.SetPlaceHolder("Download Path...") targetDir.SetText(c.SaveLocation) cacheDir := widget.NewEntry() cacheDir.SetPlaceHolder("Cache Path...") cacheDir.SetText(c.CacheLocation) purgeDir := widget.NewCheck("Delete previous content before downloading new", func(d bool) { c.PurgeSaveDir = d }) purgeDir.SetChecked(c.PurgeSaveDir) lang := widget.NewEntry() lang.SetPlaceHolder("MEPS Language Symbol (eg. E)") lang.SetText(c.Language) pubs := widget.NewEntry() pubs.SetPlaceHolder("Linked publication symbols to allow (eg. th, rr)") var pubSymbolString string for i, s := range c.PubSymbols { if i != 0 { pubSymbolString += ", " } pubSymbolString += s } pubs.SetText(pubSymbolString) save := widget.NewButton("Save", func() { c.SaveLocation = targetDir.Text c.CacheLocation = cacheDir.Text c.Language = lang.Text var pubSymbolSlice []string for _, p := range strings.Split(pubs.Text, ",") { pubSymbolSlice = append(pubSymbolSlice, strings.TrimSpace(strings.ToLower(p))) } c.PubSymbols = pubSymbolSlice c.writeConfigToFile() }) settingsBox := container.NewVBox( resPicker, targetDir, cacheDir, purgeDir, lang, pubs, save, ) return settingsBox }
package rest import ( "encoding/json" "githubapi-golang/domain" "net/http" ) type getRequest struct { endPoint string } func NewGetRequest(endPoint string) domain.GetRequest { return &getRequest{ endPoint: endPoint, } } func (g *getRequest) GetUser(username string) domain.UserInfo { var u domain.UserInfo resp, err := http.Get(g.endPoint + username) if err != nil { return u } defer resp.Body.Close() err = json.NewDecoder(resp.Body).Decode(&u) if err != nil { return u } return u } func (g *getRequest) GetRepos(username string) []domain.Repo { var rs []domain.Repo resp, err := http.Get(g.endPoint + username + "/repos?sort=created") if err != nil { return rs } defer resp.Body.Close() err = json.NewDecoder(resp.Body).Decode(&rs) if err != nil { return rs } return rs }
package main import ( "testing" "github.com/360EntSecGroup-Skylar/excelize" "fmt" ) func TestPathWrt(t *testing.T) { xlsx := excelize.NewFile() // Create a new sheet. index := xlsx.NewSheet("Sheet2") // Set value of a cell. xlsx.SetCellValue("Sheet2", "A2", "Hello -- world.") xlsx.SetCellValue("Sheet1", "B2", "100") xlsx.SetSheetRow("Sheet2", "", []string{"a", "b", "c"}) // Set active sheet of the workbook. xlsx.SetActiveSheet(index) // Save xlsx file by the given path. err := xlsx.SaveAs("./Book1.xlsx") if err != nil { fmt.Println(err) } }
package voucher import ( "github.com/stretchr/testify/suite" "testing" ) type TenantSuite struct { suite.Suite } func (s *TenantSuite) TestAccept() { } func (s *TenantSuite) TestAcceptRequest() { } func (s *TenantSuite) TestRevert() { } func TestTenantSuite(t *testing.T) { suite.Run(t, &TenantSuite{}) }
package requests import ( "fmt" "net/url" "strings" "github.com/atomicjolt/canvasapi" ) // PublishProvisionalGradesForAssignment Publish the selected provisional grade for all submissions to an assignment. // Use the "Select provisional grade" endpoint to choose which provisional grade to publish // for a particular submission. // // Students not in the moderation set will have their one and only provisional grade published. // // WARNING: This is irreversible. This will overwrite existing grades in the gradebook. // https://canvas.instructure.com/doc/api/moderated_grading.html // // Path Parameters: // # Path.CourseID (Required) ID // # Path.AssignmentID (Required) ID // type PublishProvisionalGradesForAssignment struct { Path struct { CourseID string `json:"course_id" url:"course_id,omitempty"` // (Required) AssignmentID string `json:"assignment_id" url:"assignment_id,omitempty"` // (Required) } `json:"path"` } func (t *PublishProvisionalGradesForAssignment) GetMethod() string { return "POST" } func (t *PublishProvisionalGradesForAssignment) GetURLPath() string { path := "courses/{course_id}/assignments/{assignment_id}/provisional_grades/publish" path = strings.ReplaceAll(path, "{course_id}", fmt.Sprintf("%v", t.Path.CourseID)) path = strings.ReplaceAll(path, "{assignment_id}", fmt.Sprintf("%v", t.Path.AssignmentID)) return path } func (t *PublishProvisionalGradesForAssignment) GetQuery() (string, error) { return "", nil } func (t *PublishProvisionalGradesForAssignment) GetBody() (url.Values, error) { return nil, nil } func (t *PublishProvisionalGradesForAssignment) GetJSON() ([]byte, error) { return nil, nil } func (t *PublishProvisionalGradesForAssignment) HasErrors() error { errs := []string{} if t.Path.CourseID == "" { errs = append(errs, "'Path.CourseID' is required") } if t.Path.AssignmentID == "" { errs = append(errs, "'Path.AssignmentID' is required") } if len(errs) > 0 { return fmt.Errorf(strings.Join(errs, ", ")) } return nil } func (t *PublishProvisionalGradesForAssignment) Do(c *canvasapi.Canvas) error { _, err := c.SendRequest(t) if err != nil { return err } return nil }
package main import ( "fmt" "reflect") func main() { //test_reference() //test_ptr() //test_struct() //test_array() //test_types() //test_slice_capacity() //test_copy_on_capacity_overload() test_looping() } func test_types() { var a = [1]string{"Hello"} // this defines a type! var b = [1]string{"World"} var c = [2]string{} // this defines another different type! fmt.Println(reflect.TypeOf(a), reflect.TypeOf(b), reflect.TypeOf(c)) fmt.Println(reflect.TypeOf(a)==reflect.TypeOf(b), reflect.TypeOf(b)==reflect.TypeOf(c)) } func test_reference() { type test struct { a int } b := &test{2} c := &test{2} fmt.Println(c == b) b = c fmt.Println(c == b, *c ==*b) // c==b is equal to python is(reference comparison), whereas *c==*b is equal to python == (referred object's comparison) } func test_ptr() { i, j := 1, 2 p := &i // point to i fmt.Println(*p) // read i through the pointer *p = 21 // set i through the pointer p = &j fmt.Println(*p) { k:=3 p=&k fmt.Println(p ,*p) } fmt.Println(p,*p) } func test_struct() { type Vertex struct { X int Y int } a,b := Vertex{X:1, Y:2}, Vertex{X:3, Y:4} p1 := &Vertex{X:5} // w := Vertex(3,4) // this is error. () for function call, {} for initialization a.Y = 0 p2 := &a fmt.Println(a,b,*p1) x := 0 //c=&x // c is already declared (initialized as *Vertex), cannot cast to int fmt.Println(x,*p2) } func modify_passed_array(a [2]string){ // array is copied; [2]string is a specific type; [3]string is another different type; cannot be casted ! a[0] = "Modified in array" fmt.Println("a in modify_passed_array:", a) } func modify_passed_slice(a []string){ // pass by slice reference; array is not copied; []string: slice type a[0] = "Modified in slice" fmt.Println("a in modify_passed_alice:", a) } func modify_passed_pointer(a *[2]string){ (*a)[0] = "Modified in pointer" fmt.Println("a in modify_passed_pointer:", a, *a) } /// slice creates a view of a subset of array. func test_array() { // var b [2]string{"Hello", "World"} // error var x = [2]string{ "Hello","Hello"} fmt.Println(x[0]==x[1]) // string compare var a = [2]string{"Hello", "World"} fmt.Println("Before a=", a) modify_passed_array(a) // arrays are like structs, with key-values as 0:value, 1:value ,2:value rather than struct key1:value, key2:value2 // so passing array will make a COPY of it , just like struct ! To pass oroginal array, pass a pointer to the array fmt.Println("After passing to modify array func ,a=",a) modify_passed_slice(a[0:]) // pass a slice that wraps underlying array! so this will change original array! fmt.Println("After passing to modify slice func ,a=",a) modify_passed_slice(a[1:]) // pass a slice that wraps underlying array! so this will change original array! fmt.Println("After passing to modify slice func ,a=",a) //p1 := &a //fmt.Println((*p1)[0], (*p1)[1]) //fmt.Println(reflect.TypeOf(p1)) modify_passed_pointer(&a) fmt.Println("After passing to modify pointer func ,a=",a) //modify_passed_pointer(a[]) //fmt.Println("After passing to modify pointer func ,a=",a) } func test_slice_capacity() { s := []int{2, 3, 5, 7, 11, 13} fmt.Printf("len=%d cap=%d %v\n", len(s), cap(s), s) // reslice the slice to give it zero length. s = s[:0] fmt.Printf("len=%d cap=%d %v\n", len(s), cap(s), s) // Extend its length. s = s[:4] fmt.Printf("len=%d cap=%d %v\n", len(s), cap(s), s) // Drop its first two values. s = s[:7] fmt.Printf("len=%d cap=%d %v\n", len(s), cap(s), s) } // slice implementation differs from python slicing (copy on write) func test_copy_on_capacity_overload() { /* a :=[5]int{0,1,2,3,4} [0 1 2 3 4] a------------- b ---- b = append(b,5) [0 1 2 3 5] a------------- b ------- b = append(b,6) [0 1 2 3 5] a------------- [2 3 5 6] b---------- */ a :=[5]int{0,1,2,3,4} b:=a[2:4] fmt.Println(cap(a), cap(a[0:1]), cap(a[0:2]),cap(a[2:4]), cap(a[3:4]), cap(a[4:4]) ) // capacity: head to end of actual array b = append(b,5) // this affacts a also, since it still has enough capacity fmt.Println(a, ",", b) b = append(b,6) // since capacity is exceeded, b is moved to another copy of oroginal underlying array, so now a, b use separate memory fmt.Println(a, ",", b) } func test_looping() { sum:=0 for i:=0;i<10;i++ { sum+=i } fmt.Println(sum) // for without any ; is while loop in GO sum=0 for sum<10 { sum+=1 } fmt.Println(sum) // for {} this is infinite loop sum = 0 for { sum+=1 if sum > 10 { break } else { // must be placed at the same line after } of if continue } } fmt.Println(sum) }
package odoo import ( "fmt" ) // BaseModuleUpgrade represents base.module.upgrade model. type BaseModuleUpgrade struct { LastUpdate *Time `xmlrpc:"__last_update,omptempty"` CreateDate *Time `xmlrpc:"create_date,omptempty"` CreateUid *Many2One `xmlrpc:"create_uid,omptempty"` DisplayName *String `xmlrpc:"display_name,omptempty"` Id *Int `xmlrpc:"id,omptempty"` ModuleInfo *String `xmlrpc:"module_info,omptempty"` WriteDate *Time `xmlrpc:"write_date,omptempty"` WriteUid *Many2One `xmlrpc:"write_uid,omptempty"` } // BaseModuleUpgrades represents array of base.module.upgrade model. type BaseModuleUpgrades []BaseModuleUpgrade // BaseModuleUpgradeModel is the odoo model name. const BaseModuleUpgradeModel = "base.module.upgrade" // Many2One convert BaseModuleUpgrade to *Many2One. func (bmu *BaseModuleUpgrade) Many2One() *Many2One { return NewMany2One(bmu.Id.Get(), "") } // CreateBaseModuleUpgrade creates a new base.module.upgrade model and returns its id. func (c *Client) CreateBaseModuleUpgrade(bmu *BaseModuleUpgrade) (int64, error) { ids, err := c.CreateBaseModuleUpgrades([]*BaseModuleUpgrade{bmu}) if err != nil { return -1, err } if len(ids) == 0 { return -1, nil } return ids[0], nil } // CreateBaseModuleUpgrade creates a new base.module.upgrade model and returns its id. func (c *Client) CreateBaseModuleUpgrades(bmus []*BaseModuleUpgrade) ([]int64, error) { var vv []interface{} for _, v := range bmus { vv = append(vv, v) } return c.Create(BaseModuleUpgradeModel, vv) } // UpdateBaseModuleUpgrade updates an existing base.module.upgrade record. func (c *Client) UpdateBaseModuleUpgrade(bmu *BaseModuleUpgrade) error { return c.UpdateBaseModuleUpgrades([]int64{bmu.Id.Get()}, bmu) } // UpdateBaseModuleUpgrades updates existing base.module.upgrade records. // All records (represented by ids) will be updated by bmu values. func (c *Client) UpdateBaseModuleUpgrades(ids []int64, bmu *BaseModuleUpgrade) error { return c.Update(BaseModuleUpgradeModel, ids, bmu) } // DeleteBaseModuleUpgrade deletes an existing base.module.upgrade record. func (c *Client) DeleteBaseModuleUpgrade(id int64) error { return c.DeleteBaseModuleUpgrades([]int64{id}) } // DeleteBaseModuleUpgrades deletes existing base.module.upgrade records. func (c *Client) DeleteBaseModuleUpgrades(ids []int64) error { return c.Delete(BaseModuleUpgradeModel, ids) } // GetBaseModuleUpgrade gets base.module.upgrade existing record. func (c *Client) GetBaseModuleUpgrade(id int64) (*BaseModuleUpgrade, error) { bmus, err := c.GetBaseModuleUpgrades([]int64{id}) if err != nil { return nil, err } if bmus != nil && len(*bmus) > 0 { return &((*bmus)[0]), nil } return nil, fmt.Errorf("id %v of base.module.upgrade not found", id) } // GetBaseModuleUpgrades gets base.module.upgrade existing records. func (c *Client) GetBaseModuleUpgrades(ids []int64) (*BaseModuleUpgrades, error) { bmus := &BaseModuleUpgrades{} if err := c.Read(BaseModuleUpgradeModel, ids, nil, bmus); err != nil { return nil, err } return bmus, nil } // FindBaseModuleUpgrade finds base.module.upgrade record by querying it with criteria. func (c *Client) FindBaseModuleUpgrade(criteria *Criteria) (*BaseModuleUpgrade, error) { bmus := &BaseModuleUpgrades{} if err := c.SearchRead(BaseModuleUpgradeModel, criteria, NewOptions().Limit(1), bmus); err != nil { return nil, err } if bmus != nil && len(*bmus) > 0 { return &((*bmus)[0]), nil } return nil, fmt.Errorf("base.module.upgrade was not found with criteria %v", criteria) } // FindBaseModuleUpgrades finds base.module.upgrade records by querying it // and filtering it with criteria and options. func (c *Client) FindBaseModuleUpgrades(criteria *Criteria, options *Options) (*BaseModuleUpgrades, error) { bmus := &BaseModuleUpgrades{} if err := c.SearchRead(BaseModuleUpgradeModel, criteria, options, bmus); err != nil { return nil, err } return bmus, nil } // FindBaseModuleUpgradeIds finds records ids by querying it // and filtering it with criteria and options. func (c *Client) FindBaseModuleUpgradeIds(criteria *Criteria, options *Options) ([]int64, error) { ids, err := c.Search(BaseModuleUpgradeModel, criteria, options) if err != nil { return []int64{}, err } return ids, nil } // FindBaseModuleUpgradeId finds record id by querying it with criteria. func (c *Client) FindBaseModuleUpgradeId(criteria *Criteria, options *Options) (int64, error) { ids, err := c.Search(BaseModuleUpgradeModel, criteria, options) if err != nil { return -1, err } if len(ids) > 0 { return ids[0], nil } return -1, fmt.Errorf("base.module.upgrade was not found with criteria %v and options %v", criteria, options) }
// Copyright 2020 Paul Greenberg greenpau@outlook.com // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package identity import ( "fmt" ) // Name represents human name type Name struct { First string `json:"first,omitempty" xml:"first,omitempty" yaml:"first,omitempty"` Last string `json:"last,omitempty" xml:"last,omitempty" yaml:"last,omitempty"` Middle string `json:"middle,omitempty" xml:"middle,omitempty" yaml:"middle,omitempty"` Preferred string `json:"preferred,omitempty" xml:"preferred,omitempty" yaml:"preferred,omitempty"` Nickname bool `json:"nickname,omitempty" xml:"nickname,omitempty" yaml:"nickname,omitempty"` Confirmed bool `json:"confirmed,omitempty" xml:"confirmed,omitempty" yaml:"confirmed,omitempty"` Primary bool `json:"primary,omitempty" xml:"primary,omitempty" yaml:"primary,omitempty"` Legal bool `json:"legal,omitempty" xml:"legal,omitempty" yaml:"legal,omitempty"` Alias bool `json:"alias,omitempty" xml:"alias,omitempty" yaml:"alias,omitempty"` } // NewName returns an instance of Name. func NewName() *Name { return &Name{} } // GetNameClaim returns name field of a claim. func (n *Name) GetNameClaim() string { if n.First != "" && n.Last != "" { return fmt.Sprintf("%s, %s", n.Last, n.First) } return "" } // GetFullName returns the primary full name for User. func (n *Name) GetFullName() string { if n.First != "" && n.Last != "" { return fmt.Sprintf("%s, %s", n.Last, n.First) } return "" }
package main import ( "fmt" "math" ) const factors = 500 func main() { n := uint64(0) for i := uint64(1); ; i++ { n += i count := numFactors(n) if count >= factors { fmt.Println("found it:", n) break } } } func numFactors(n uint64) int { count := 0 for i := uint64(1); i <= uint64(math.Floor(math.Sqrt(float64(n)))); i++ { if n%i == 0 { count++ if n/i != i { count++ } } } return count }
package jobseekerprofile import ( "excho-job/entity" "gorm.io/gorm" ) type Repository interface { FindByUserID(ID string) (entity.JobSeekerProfile, error) Create(jobSeekerProfile entity.JobSeekerProfile) (entity.JobSeekerProfile, error) UpdateByID(ID string, dataUpdate map[string]interface{}) (entity.JobSeekerProfile, error) } type repository struct { db *gorm.DB } func NewRepository(db *gorm.DB) *repository { return &repository{db} } func (r *repository) FindByUserID(ID string) (entity.JobSeekerProfile, error) { var jobSeekerProfile entity.JobSeekerProfile if err := r.db.Where("job_seeker_id = ?", ID).Find(&jobSeekerProfile).Error; err != nil { return jobSeekerProfile, err } return jobSeekerProfile, nil } func (r *repository) Create(jobSeekerProfile entity.JobSeekerProfile) (entity.JobSeekerProfile, error) { if err := r.db.Create(&jobSeekerProfile).Error; err != nil { return jobSeekerProfile, err } return jobSeekerProfile, nil } func (r *repository) UpdateByID(ID string, dataUpdate map[string]interface{}) (entity.JobSeekerProfile, error) { var jobSeekerProfile entity.JobSeekerProfile if err := r.db.Model(&jobSeekerProfile).Where("job_seeker_id = ?", ID).Updates(dataUpdate).Error; err != nil { return jobSeekerProfile, err } if err := r.db.Where("job_seeker_id = ?", ID).Find(&jobSeekerProfile).Error; err != nil { return jobSeekerProfile, err } return jobSeekerProfile, nil }
package main import ( "bufio" "flag" "fmt" "log" "os" "strconv" ) func main() { // Flag handling inpath := flag.String("i", "", "Path to input file containing line seperated data") outpath := flag.String("o", "", "Path to output file where results will be stored") stressrange := flag.Float64("r", 10.0, "The range of values that will be counted in the fatigue count") flag.Parse() // Open the file and create the scanner file, err := os.Open(*inpath) if err != nil { log.Fatal(err) } defer file.Close() scanner := bufio.NewScanner(file) // Append to a slice var stress []float64 var stressTemp float64 for scanner.Scan() { stressTemp, _ = strconv.ParseFloat(scanner.Text(), 64) stress = append(stress, stressTemp) } // Remove non-peaks from raw stress data stripped := Peaks(stress) // Perform Rainflow count to get half anf full counts half, full := RainflowCounting(stripped) // Get the counts of each result := GetCounts(half, full, *stressrange) // Write results to console fmt.Println("Rainflow counter ASTM E1049 85 cl 5.4.4") fmt.Println("----------------------------------------") fmt.Printf("Input file:\t\t%v\n", *inpath) fmt.Printf("Bin size:\t\t%.3f\n", *stressrange) fmt.Printf("Data points:\t\t%v\n", len(stress)) fmt.Printf("Peaks and troughs:\t%v\n\n", len(stripped)) fmt.Println("--------------------------------------------------------------------------") fmt.Printf("Bin Low\t\tBin High\tBin Mean\tRange Mean\tCount\n") // Print to console and write to the outfile out, err := os.Create(*outpath) defer out.Close() // Create new writer to write results to file w := bufio.NewWriter(out) fmt.Fprintf(w, "Bin Low,Bin High,Bin Mean,Range Mean,Count\n") var count, meanRange, meanBin float64 for _, k := range result { meanRange, count = k.RangeMeanCount() meanBin = k.BinMean() if count > 0 { fmt.Printf("%.2f\t\t%.2f\t\t%.2f\t\t%.2f\t\t%.2f\n", k.Low, k.High, meanBin, meanRange, count) fmt.Fprintf(w, "%.5f,%.5f,%.5f,%.5f,%.5f\n", k.Low, k.High, meanBin, meanRange, count) } } w.Flush() fmt.Println("--------------------------------------------------------------------------") fmt.Printf("Results written to file %v\n", *outpath) }
package parser import ( "bufio" "bytes" "errors" "io" ) // Lexer is a lexer type Lexer struct { r *bufio.Reader ch chan Token lines []int column int lastRuneSize int } func NewLexer(r io.Reader) *Lexer { return &Lexer{ r: bufio.NewReader(r), ch: make(chan Token), } } func (l *Lexer) read() (rune, error) { ch, size, err := l.r.ReadRune() if err != nil { return ch, err } if ch == '\n' { l.lines = append(l.lines, l.column) l.column = 0 } else { l.column += size } l.lastRuneSize = size return ch, nil } func (l *Lexer) unread() error { err := l.r.UnreadRune() if err != nil { return err } if l.column == 0 { if len(l.lines) == 0 { return errors.New("cannot unread at beginning of file") } l.column = l.lines[len(l.lines)-1] l.lines = l.lines[:len(l.lines)-1] } else { l.column -= l.lastRuneSize } return nil } func (t *Lexer) Next() (Token, error) { ch, err := t.read() for { if err != nil { return nil, err } if isSpace(ch) { if err := t.skipSpace(); err != nil { return nil, err } ch, err = t.read() } else { break } } pos := pos{line: len(t.lines) + 1, column: t.column} switch ch { case '(': return Symbol{sym: SymLParen, pos: pos}, nil case ')': return Symbol{sym: SymRParen, pos: pos}, nil case '{': return Symbol{sym: SymLBrace, pos: pos}, nil case '}': return Symbol{sym: SymRBrace, pos: pos}, nil case '.': return Symbol{sym: SymDot, pos: pos}, nil case ',': return Symbol{sym: SymComma, pos: pos}, nil case '=': return Symbol{sym: SymAssign, pos: pos}, nil case ':': return Symbol{sym: SymSemicolon, pos: pos}, nil default: if err := t.unread(); err != nil { return nil, err } return t.nextIdent() } } func (t *Lexer) skipSpace() error { for { ch, err := t.read() if err != nil { return err } switch ch { case ' ', '\t': // skip over default: t.unread() return nil } } } // nextIdent returns the next identifier-type token. func (t *Lexer) nextIdent() (Token, error) { var ( buf bytes.Buffer line int column int ) line, column = len(t.lines)+1, t.column+1 for { ch, err := t.read() if err != nil { return nil, err } if isAlphaNum(ch) { buf.WriteRune(ch) } else { t.unread() break } } pos := posRange{line: line, columnStart: column, columnEnd: t.column} switch buf.String() { case "type": return TypeLit{pos: pos}, nil case "struct": return StructLit{pos: pos}, nil case "func": return FuncLit{pos: pos}, nil case "interface": return InterfaceLit{pos: pos}, nil case "package": return PackageLit{pos: pos}, nil case "return": return ReturnLit{pos: pos}, nil default: // A generic identifier return Ident{str: buf.String(), pos: pos}, nil } } func isSpace(ch rune) bool { return ch == ' ' || ch == '\t' } func isAlphaNum(ch rune) bool { return ch == '_' || ('a' <= ch && ch <= 'z') || ('A' <= ch && ch <= 'Z') || ('0' <= ch && ch <= '9') }
package transfer // // Copyright (c) 2019 ARM Limited. // // SPDX-License-Identifier: MIT // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. // import ( "context" "errors" "fmt" "net/http" "io" . "devicedb/cluster" ) const DefaultEndpointURL = "/partitions/%d/keys" var EBadResponse = errors.New("Node responded with a bad response") type PartitionTransferTransport interface { Get(nodeID uint64, partition uint64) (io.Reader, func(), error) } type HTTPTransferTransport struct { httpClient *http.Client configController ClusterConfigController endpointURL string } func NewHTTPTransferTransport(configController ClusterConfigController, httpClient *http.Client) *HTTPTransferTransport { return &HTTPTransferTransport{ httpClient: httpClient, configController: configController, endpointURL: DefaultEndpointURL, } } func (transferTransport *HTTPTransferTransport) SetEndpointURL(endpointURL string) *HTTPTransferTransport { transferTransport.endpointURL = endpointURL return transferTransport } func (transferTransport *HTTPTransferTransport) Get(nodeID uint64, partition uint64) (io.Reader, func(), error) { peerAddress := transferTransport.configController.ClusterController().ClusterMemberAddress(nodeID) if peerAddress.IsEmpty() { return nil, nil, ENoSuchNode } endpointURL := peerAddress.ToHTTPURL(fmt.Sprintf(transferTransport.endpointURL, partition)) request, err := http.NewRequest("GET", endpointURL, nil) if err != nil { return nil, nil, err } ctx, cancel := context.WithCancel(context.Background()) request.WithContext(ctx) resp, err := transferTransport.httpClient.Do(request) if err != nil { cancel() if resp != nil && resp.Body != nil { resp.Body.Close() } return nil, nil, err } if resp.StatusCode != http.StatusOK { cancel() resp.Body.Close() return nil, nil, EBadResponse } close := func() { // should do any cleanup on behalf of this request cancel() resp.Body.Close() } return resp.Body, close, nil }
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package ipv4 import ( "encoding/binary" "log" "github.com/brewlin/net-protocol/stack" "github.com/brewlin/net-protocol/pkg/buffer" "github.com/brewlin/net-protocol/protocol/header" tcpip "github.com/brewlin/net-protocol/protocol" ) // handleControl handles the case when an ICMP packet contains the headers of // the original packet that caused the ICMP one to be sent. This information is // used to find out which transport endpoint must be notified about the ICMP // packet. // handleControl处理ICMP数据包包含导致ICMP发送的原始数据包的标头的情况。 // 此信息用于确定必须通知哪个传输端点有关ICMP数据包。 func (e *endpoint) handleControl(typ stack.ControlType, extra uint32, vv buffer.VectorisedView) { h := header.IPv4(vv.First()) // We don't use IsValid() here because ICMP only requires that the IP // header plus 8 bytes of the transport header be included. So it's // likely that it is truncated, which would cause IsValid to return // false. // // Drop packet if it doesn't have the basic IPv4 header or if the // original source address doesn't match the endpoint's address. if len(h) < header.IPv4MinimumSize || h.SourceAddress() != e.id.LocalAddress { return } hlen := int(h.HeaderLength()) if vv.Size() < hlen || h.FragmentOffset() != 0 { // We won't be able to handle this if it doesn't contain the // full IPv4 header, or if it's a fragment not at offset 0 // (because it won't have the transport header). return } // Skip the ip header, then deliver control message. vv.TrimFront(hlen) p := h.TransportProtocol() e.dispatcher.DeliverTransportControlPacket(e.id.LocalAddress, h.DestinationAddress(), ProtocolNumber, p, typ, extra, vv) } // 处理ICMP报文 func (e *endpoint) handleICMP(r *stack.Route, vv buffer.VectorisedView) { v := vv.First() if len(v) < header.ICMPv4MinimumSize { return } h := header.ICMPv4(v) // 根据icmp的类型来进行相应的处理 switch h.Type() { case header.ICMPv4Echo: // icmp echo请求 if len(v) < header.ICMPv4EchoMinimumSize { return } log.Printf("@网络层 icmp: 接受报文:echo") vv.TrimFront(header.ICMPv4MinimumSize) req := echoRequest{r: r.Clone(), v: vv.ToView()} select { case e.echoRequests <- req: // 发送给echoReplier处理 default: req.r.Release() } case header.ICMPv4EchoReply: // icmp echo响应 if len(v) < header.ICMPv4EchoMinimumSize { return } e.dispatcher.DeliverTransportPacket(r, header.ICMPv4ProtocolNumber, vv) case header.ICMPv4DstUnreachable: // 目标不可达 if len(v) < header.ICMPv4DstUnreachableMinimumSize { return } vv.TrimFront(header.ICMPv4DstUnreachableMinimumSize) switch h.Code() { case header.ICMPv4PortUnreachable: // 端口不可达 e.handleControl(stack.ControlPortUnreachable, 0, vv) case header.ICMPv4FragmentationNeeded: // 需要进行分片但设置不分片标志 mtu := uint32(binary.BigEndian.Uint16(v[header.ICMPv4DstUnreachableMinimumSize-2:])) e.handleControl(stack.ControlPacketTooBig, calculateMTU(mtu), vv) } } // TODO: Handle other ICMP types. } type echoRequest struct { r stack.Route v buffer.View } // 处理icmp echo请求的goroutine func (e *endpoint) echoReplier() { for req := range e.echoRequests { sendPing4(&req.r, 0, req.v) req.r.Release() } } // 根据icmp echo请求,封装icmp echo响应报文,并传给ip层处理 func sendPing4(r *stack.Route, code byte, data buffer.View) *tcpip.Error { hdr := buffer.NewPrependable(header.ICMPv4EchoMinimumSize + int(r.MaxHeaderLength())) icmpv4 := header.ICMPv4(hdr.Prepend(header.ICMPv4EchoMinimumSize)) icmpv4.SetType(header.ICMPv4EchoReply) icmpv4.SetCode(code) copy(icmpv4[header.ICMPv4MinimumSize:], data) data = data[header.ICMPv4EchoMinimumSize-header.ICMPv4MinimumSize:] icmpv4.SetChecksum(^header.Checksum(icmpv4, header.Checksum(data, 0))) log.Printf("@网络层 icmp: 响应报文:传递给ip层处理") // 传给ip层处理 return r.WritePacket(hdr, data.ToVectorisedView(), header.ICMPv4ProtocolNumber, r.DefaultTTL()) }
package errors import ( std_errors "errors" "fmt" ) // New returns a new error with a message and a stack. func New(msg string) error { err := std_errors.New(msg) err = withStack(err, 2) return err } // Errorf returns a new error with a formatted message and a stack. func Errorf(format string, args ...interface{}) error { err := fmt.Errorf(format, args...) err = withStack(err, 2) return err }
package tg import ( "log" "strings" "unicode" ) type CommandHandler func(b *CommandBot, args string, msg *Message) type AnyCommandHandler func(b *CommandBot, cmd, args string, msg *Message) // CommandBot only handles commands. type CommandBot struct { *Bot Handlers map[string][]CommandHandler AnyHandlers []AnyCommandHandler } func Split(text string, sep byte) (string, string) { var i, j int for i = 0; i < len(text) && text[i] != sep; i++ { } for j = i; j < len(text) && text[j] == sep; j++ { } return text[0:i], text[j:len(text)] } func (b *CommandBot) HandleUpdate(_ *Bot, up *Update) { if up.Message == nil { return } if up.Message.Text == nil { return } text := *up.Message.Text if len(text) == 0 || text[0] != '/' { // Not a command return } cmd, args := Split(text[1:len(text)], ' ') cmd = strings.TrimRightFunc(cmd, unicode.IsSpace) cmd, to := Split(cmd, '@') if to != "" && to != b.Name { log.Println("addressed not to me, but to", to) // Not addressed to me return } for _, ach := range b.AnyHandlers { ach(b, cmd, args, up.Message) } chs, ok := b.Handlers[cmd] if !ok { log.Printf("unknown command %q", cmd) return } for _, ch := range chs { ch(b, args, up.Message) } } func (b *CommandBot) OnCommand(cmd string, ch CommandHandler) { b.Handlers[cmd] = append(b.Handlers[cmd], ch) } func (b *CommandBot) OnAnyCommand(ach AnyCommandHandler) { b.AnyHandlers = append(b.AnyHandlers, ach) } func NewCommandBot(token string) *CommandBot { b := &CommandBot{NewBot(token), make(map[string][]CommandHandler), nil} b.OnUpdate(b.HandleUpdate) return b }
/* Copyright © 2021 NAME HERE <EMAIL ADDRESS> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package cmd import ( "bufio" "encoding/csv" "flag" "fmt" "io" "log" "math" "os" "strconv" "github.com/spf13/cobra" ) var addCmd = &cobra.Command{ Use: "quiz", Short: "A quick little basic quiz", Long: `A longer description that spans multiple lines and likely contains examples and usage of using your command. For example:`, Run: func(cmd *cobra.Command, args []string) { quiz() }, } func init() { rootCmd.AddCommand(addCmd) } type Line struct { Question string Answer string } type Score struct { Name string Points string } func quiz() { quizfil := flag.String("csv", "quiz.csv", "path to csv file with the quiz") flag.Parse() csvFile, err := os.Open(*quizfil) if err != nil { fmt.Println(err) return } defer csvFile.Close() csvReader := csv.NewReader(bufio.NewReader(csvFile)) var lines []Line for { line, error := csvReader.Read() if error == io.EOF { break } else if error != nil { log.Fatal(error) } lines = append(lines, Line{ Question: line[0], Answer: line[1], }) } count := 0 for idx, line := range lines { fmt.Print(strconv.Itoa(idx+1) + ": " + line.Question + ": ") var input string fmt.Scan(&input) if input == line.Answer { count++ } } fmt.Println() fmt.Println("You scored " + strconv.Itoa(count) + " out of " + strconv.Itoa(len(lines))) scores(count) } func scores(scores int) { csvfile, err := os.Open("scoreboard.csv") if err != nil { log.Fatalln("Couldn't open the csv file", err) } r := csv.NewReader(csvfile) var countscore float64 = 0 var sumOfPlayers float64 = 0 for { record, err := r.Read() if err == io.EOF { break } if err != nil { log.Fatal(err) } stringvar := record[0] //här förstår jag inte riktigt varför jag måste ha två variabler fast jag inte vill ?? intVar, err := strconv.Atoi(stringvar) if intVar < scores { countscore++ } sumOfPlayers++ } var percentfloat float64 = ((countscore / sumOfPlayers) * 100) var y = fmt.Sprint(math.RoundToEven(percentfloat)) fmt.Println("You were better than " + y + "% of all quizzers") insertscore(scores) } func insertscore(countscore int) { csvfile, err := os.OpenFile("scoreboard.csv", os.O_RDWR|os.O_APPEND, 0660) if err != nil { log.Fatalln("Couldn't open the csv file", err) } var score [][]string score = append(score, []string{strconv.Itoa(int(countscore)), ""}) writer := csv.NewWriter(csvfile) writer.WriteAll(score) writer.Flush() if err := writer.Error(); err != nil { log.Fatal(err) } fmt.Println("Your score has now been registrated!") }
package diff import ( "github.com/containerum/kube-client/pkg/model" "github.com/ninedraft/boxofstuff/strset" ) type ContainerSet map[ComparableContainer]model.Container func NewContainerSet(containers []model.Container) ContainerSet { var set = make(ContainerSet, len(containers)) for _, container := range containers { set[FromContainer(container)] = container } return set } func (set ContainerSet) Copy() ContainerSet { var cp = make(ContainerSet, len(set)) for k, v := range set { cp[k] = v } return cp } func (set ContainerSet) Have(container model.Container) bool { var _, ok = set[FromContainer(container)] return ok } func (set ContainerSet) Put(container model.Container) ContainerSet { set = set.Copy() set[FromContainer(container)] = container return set } func (set ContainerSet) New() ContainerSet { return make(ContainerSet, len(set)) } func (set ContainerSet) Len() int { return len(set) } func (set ContainerSet) Filter(pred func(container ComparableContainer) bool) ContainerSet { var filtered = set.New() for k, v := range set { if pred(k) { filtered[k] = v } } return filtered } func (set ContainerSet) Keys() []ComparableContainer { var containers = make([]ComparableContainer, 0, set.Len()) for container := range set { containers = append(containers, container) } return containers } func (set ContainerSet) Values() []model.Container { var containers = make([]model.Container, 0, set.Len()) for _, container := range set { containers = append(containers, container) } return containers } func (set ContainerSet) Sub(x ContainerSet) ContainerSet { set = set.Copy() for k := range x { delete(set, k) } return set } func (set ContainerSet) Names() []string { var names = make([]string, 0, set.Len()) for k := range set { names = append(names, k.Name) } return names } func (set ContainerSet) NamesSet() strset.Set { return strset.NewSet(set.Names()) } func (set ContainerSet) OnlyLatest() ContainerSet { return set.Filter(func(container ComparableContainer) bool { return container.IsLatest() }) }
/* Паттерн «Фабричный метод», является порождающим паттерном, т.е. отвечает за удобное и безопасное создание новых объектов или даже целых семейств объектов. Он позволяет: 1) Определяет интерфейс для создания объекта, но оставляет подклассам решение о том, какой класс инстанцировать. Фабричный метод позволяет классу делегировать инстанцирование подклассам; 2) Отделяет код производства продуктов от остального кода, который эти продукты использует; 3) Создание объектов внутри класса с помощью фабричного метода всегда оказывается более гибким решением, чем непосредственное создание; Пример: создание автомобиля по типу кузова. 1) Определяем интерфейс, инкапсулирующий методы по установке и выдаче значений для типа кузова; 2) Определяем «родительскую» структуру автомобиля (имеет имя и цену) + реализовываем интерфейс из п. 1; 3) Создаём сами типы автомобилей – структура с композицией из п. 2; 4) Функция getCarF – производит выдачу нужного типа, основываясь на его названии. «Плюсы использования»: 1) Добавление новых объектов становится проще; 2) Не нужно привязываться к конкретным классам; 3)) Код для инициализации новых объектов в одном месте. «Минусы использования»: 1) Код усложняется: может быть много разных иерархий. */ package main import "fmt" type bodyTypeF interface { setType(name string) setPrice(price int) getType() string getPrice() int } type carF struct { name string price int } func (cf *carF) setType(name string) { cf.name = name } func (cf *carF) getType() string { return cf.name } func (cf *carF) setPrice(price int) { cf.price = price } func (cf *carF) getPrice() int { return cf.price } type SUV struct { carF } func newSUV() bodyTypeF { return &SUV{ carF: carF{ name: "SUV", price: 100000, }, } } type cabrio struct { carF } func newCabrio() bodyTypeF { return &SUV{ carF: carF{ name: "Cabrio", price: 150000, }, } } type coupe struct { carF } func newCoupe() bodyTypeF { return &SUV{ carF: carF{ name: "Coupe", price: 200000, }, } } func getCarF(bodyType string) (bodyTypeF, error) { if bodyType == "SUV" { return newSUV(), nil } if bodyType == "Cabrio" { return newCabrio(), nil } if bodyType == "Coupe" { return newCoupe(), nil } return nil, fmt.Errorf("Wrong body type passed") } /*func main() { SUV, _ := getCarF("SUV") Cabrio, _ := getCarF("Cabrio") Coupe, _ := getCarF("Coupe") fmt.Printf("%s price is %d\n", SUV.getType(), SUV.getPrice()) fmt.Printf("%s price is %d\n", Cabrio.getType(), Cabrio.getPrice()) fmt.Printf("%s price is %d\n", Coupe.getType(), Coupe.getPrice()) }*/
package main import ( "fmt" "reflect" "unsafe" ) func main() { var str string var s []int var m map[string]int var f func(int, int) int var i interface{} var c chan int var ptr *int fmt.Println(str == "") fmt.Println(s == nil) fmt.Println(m == nil) fmt.Println(f == nil) fmt.Println(i == nil) fmt.Println(c == nil) fmt.Println(ptr == nil) fmt.Println("--------------") fmt.Println(reflect.TypeOf(str)) //2*byte 即输出16 指向底层数组指针(Data)+len(长度),可以看reflect.StringHeader fmt.Println(reflect.TypeOf(s)) //3*byte 即输出24 指向底层数组指针+len+cap,可以看reflect.SliceHeader fmt.Println(reflect.TypeOf(m)) //1*byte 即输出8 本质也是指针 fmt.Println(reflect.TypeOf(f)) //1*byte 即输出8 本质也是指针 fmt.Println(reflect.TypeOf(i)) //2*byte 即输出8 一个指向类型(Type)一个指向值(Value)反射用到的就是这个 fmt.Println(reflect.TypeOf(c)) //1*byte 即输出8 本质也是指针 fmt.Println(reflect.TypeOf(ptr)) //1*byte 即输出8 指针 fmt.Println("--------------") fmt.Println(unsafe.Sizeof(str)) fmt.Println(unsafe.Sizeof(s)) fmt.Println(unsafe.Sizeof(m)) fmt.Println(unsafe.Sizeof(f)) fmt.Println(unsafe.Sizeof(i)) fmt.Println(unsafe.Sizeof(c)) fmt.Println(unsafe.Sizeof(ptr)) }
package e var MsgFlags = map[int]string { SUCCESS : "ok", ERROR : "fail", INVALID_PARAMS : "请求参数错误", ERROR_AUTH_CHECK_TOKEN_TIMEOUT : "Token已超时", ERROR_AUTH_TOKEN : "Token生成失败", ERROR_AUTH : "Token错误", ERROR_REGISTER_FORMAT_FAIL : "请求参数不符合格式", ERROR_USER_EXIST : "注册失败,用户名已存在!", ERROR_UPLOAD_SAVE_IMAGE_FAIL: "保存图片失败", ERROR_UPLOAD_CHECK_IMAGE_FAIL: "检查图片失败", ERROR_UPLOAD_CHECK_IMAGE_FORMAT: "校验图片错误,图片格式或大小有问题", ERROR_ADD_BASIC_INFO_FAIL : "添加基本信息出错", ERROR_GET_BASIC_INFO_FAIL : "获取基本信息出错", } func GetMsg(code int) string { msg, ok := MsgFlags[code] if ok { return msg } return MsgFlags[ERROR] }
package testlib import ( "bytes" "fmt" "image/png" "github.com/remogatto/imagetest" "github.com/remogatto/mandala" ) func (t *TestSuite) TestDraw() { request := mandala.LoadResourceRequest{ Filename: "drawable/expected.png", Response: make(chan mandala.LoadResourceResponse), } mandala.ResourceManager() <- request response := <-request.Response buffer := response.Buffer t.True(response.Error == nil, "An error occured during resource opening") if buffer != nil { exp, err := png.Decode(bytes.NewBuffer(buffer)) t.True(err == nil, "An error occured during png decoding") distance := imagetest.CompareDistance(exp, <-t.testDraw, nil) t.True(distance < 0.1, fmt.Sprintf("Distance is %f", distance)) } }
package flags import "strconv" type Int64Value struct { set bool value int64 } func (i *Int64Value) Set(s string) error { v, err := strconv.ParseInt(s, 0, strconv.IntSize) if err != nil { err = numError(err) } i.value = v i.set = true return err } func (i *Int64Value) Get() interface{} { return i.value } func (i *Int64Value) String() string { return strconv.Itoa(int(i.value)) } func (i *Int64Value) IsSet() bool { return i.set }
package test import ( "fmt" "testing" ) func TestRange(t *testing.T) { nums := []int{1, 2, 3, 4} sum := 0 for i, num := range nums { sum += num fmt.Println(i, sum) } kvs := map[string]string{"a": "1", "b": "2", "c": "3"} for _, v := range kvs { fmt.Printf("%s\n", v) } for _, v := range "abcdef" { fmt.Println(v) } }
package update_model import ( "github.com/syou6162/go-active-learning/lib/classifier" tweet_feature "github.com/syou6162/go-active-learning/lib/feature/tweet" "github.com/syou6162/go-active-learning/lib/model" "github.com/syou6162/go-active-learning/lib/service" "github.com/urfave/cli" ) func doUpdateTweetModel(c *cli.Context) error { app, err := service.NewDefaultApp() if err != nil { return err } defer app.Close() tweets, err := app.SearchReferringTweets(30000) if err != nil { return err } exampleIds := make([]int, 0) for _, t := range tweets.Tweets { exampleIds = append(exampleIds, t.ExampleId) } examples, err := app.SearchExamplesByIds(exampleIds) if err != nil { return err } exampleById := make(map[int]*model.Example) for _, e := range examples { exampleById[e.Id] = e } instances := classifier.LearningInstances{} for _, t := range tweets.Tweets { e := exampleById[t.ExampleId] et := tweet_feature.GetExampleAndTweet(e, t) instances = append(instances, &et) } m, err := classifier.NewMIRAClassifierByCrossValidation(classifier.TWITTER, instances) if err != nil { return err } if err := app.InsertMIRAModel(*m); err != nil { return err } if err := postEvaluatedMetricsToMackerel("tweet_evaluation", m.Accuracy, m.Precision, m.Recall, m.Fvalue); err != nil { return err } return nil } var CommandUpdateTweetModel = cli.Command{ Name: "update-tweet-model", Usage: "update tweet model", Description: ` Update tweet model. `, Action: doUpdateTweetModel, }
package main import ( "github.com/notblizzard/bluebirdmini/Godeps/_workspace/src/github.com/gorilla/mux" "net/http" ) func main() { r := mux.NewRouter() r.HandleFunc("/", RootHandler) r.HandleFunc("/new", NewHandler) r.HandleFunc("/home", HomeHandler) r.HandleFunc("/login", LoginHandler) r.HandleFunc("/logout", LogoutHandler) r.HandleFunc("/register", RegisterHandler) r.HandleFunc("/{user}/{id}", PostHandler) http.Handle("/", r) http.ListenAndServe(":8000", nil) }
// Copyright 2020 Comcast Cable Communications Management, LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package cli type ArgType string const ( ArgTypeInt ArgType = "int" ArgTypeIntSlice ArgType = "[]int" ArgTypeFloat ArgType = "float" ArgTypeFloatSlice ArgType = "[]float" ArgTypeString ArgType = "string" ArgTypeStringSlice ArgType = "[]string" ArgTypeBool ArgType = "bool" ArgTypeBoolSlice ArgType = "[]bool" ) type Argument struct { Name string Shorthand string Type ArgType Description string Default interface{} LookupKey string Persistent bool } type Config struct { EnvPrefix string Name string Paths []string }
package vehicle import ( "time" "github.com/evcc-io/evcc/api" "github.com/evcc-io/evcc/util" "github.com/evcc-io/evcc/vehicle/bluelink" ) // https://github.com/Hacksore/bluelinky // https://github.com/Hyundai-Kia-Connect/hyundai_kia_connect_api/pull/353/files // Bluelink is an api.Vehicle implementation type Bluelink struct { *embed *bluelink.Provider } func init() { registry.Add("kia", NewKiaFromConfig) registry.Add("hyundai", NewHyundaiFromConfig) } // NewHyundaiFromConfig creates a new vehicle func NewHyundaiFromConfig(other map[string]interface{}) (api.Vehicle, error) { settings := bluelink.Config{ URI: "https://prd.eu-ccapi.hyundai.com:8080", BasicToken: "NmQ0NzdjMzgtM2NhNC00Y2YzLTk1NTctMmExOTI5YTk0NjU0OktVeTQ5WHhQekxwTHVvSzB4aEJDNzdXNlZYaG10UVI5aVFobUlGampvWTRJcHhzVg==", CCSPServiceID: "6d477c38-3ca4-4cf3-9557-2a1929a94654", CCSPApplicationID: bluelink.HyundaiAppID, AuthClientID: "64621b96-0f0d-11ec-82a8-0242ac130003", BrandAuthUrl: "https://eu-account.hyundai.com/auth/realms/euhyundaiidm/protocol/openid-connect/auth?client_id=%s&scope=openid+profile+email+phone&response_type=code&hkid_session_reset=true&redirect_uri=%s/api/v1/user/integration/redirect/login&ui_locales=%s&state=%s:%s", PushType: "GCM", Cfb: "RFtoRq/vDXJmRndoZaZQyfOot7OrIqGVFj96iY2WL3yyH5Z/pUvlUhqmCxD2t+D65SQ=", } return newBluelinkFromConfig("hyundai", other, settings) } // NewKiaFromConfig creates a new vehicle func NewKiaFromConfig(other map[string]interface{}) (api.Vehicle, error) { settings := bluelink.Config{ URI: "https://prd.eu-ccapi.kia.com:8080", BasicToken: "ZmRjODVjMDAtMGEyZi00YzY0LWJjYjQtMmNmYjE1MDA3MzBhOnNlY3JldA==", CCSPServiceID: "fdc85c00-0a2f-4c64-bcb4-2cfb1500730a", CCSPApplicationID: bluelink.KiaAppID, AuthClientID: "572e0304-5f8d-4b4c-9dd5-41aa84eed160", BrandAuthUrl: "https://eu-account.kia.com/auth/realms/eukiaidm/protocol/openid-connect/auth?client_id=%s&scope=openid+profile+email+phone&response_type=code&hkid_session_reset=true&redirect_uri=%s/api/v1/user/integration/redirect/login&ui_locales=%s&state=%s:%s", PushType: "APNS", Cfb: "wLTVxwidmH8CfJYBWSnHD6E0huk0ozdiuygB4hLkM5XCgzAL1Dk5sE36d/bx5PFMbZs=", } return newBluelinkFromConfig("kia", other, settings) } // newBluelinkFromConfig creates a new Vehicle func newBluelinkFromConfig(brand string, other map[string]interface{}, settings bluelink.Config) (api.Vehicle, error) { cc := struct { embed `mapstructure:",squash"` User, Password string VIN string Language string Expiry time.Duration Cache time.Duration }{ Language: "en", Expiry: expiry, Cache: interval, } if err := util.DecodeOther(other, &cc); err != nil { return nil, err } log := util.NewLogger(brand).Redact(cc.User, cc.Password, cc.VIN) identity := bluelink.NewIdentity(log, settings) if err := identity.Login(cc.User, cc.Password, cc.Language); err != nil { return nil, err } api := bluelink.NewAPI(log, settings.URI, identity.Request) vehicle, err := ensureVehicleEx( cc.VIN, api.Vehicles, func(v bluelink.Vehicle) string { return v.VIN }, ) if err != nil { return nil, err } v := &Bluelink{ embed: &cc.embed, Provider: bluelink.NewProvider(api, vehicle.VehicleID, cc.Expiry, cc.Cache), } return v, nil }
package main // Leetcode 89. (medium) func grayCode(n int) []int { res := []int{0} for i := 1; i <= n; i++ { res = append(res, res...) inc := len(res) / 2 left, right := inc-1, inc for left >= 0 { res[right] = res[left] + inc left-- right++ } } return res }
package client_test import ( "github.com/farzadrastegar/simple-cab/gateway" "github.com/farzadrastegar/simple-cab/gateway/client" ) // Client represents a test wrapper for client.Client. type Client struct { *client.Client cabService gateway.CabService Handler *Handler } // NewClient returns a new instance of test Client. func NewClient() *Client { c := &Client{ Client: client.NewClient(), Handler: NewHandler(), } c.Client.Handler = client.NewHandler() c.cabService = c.Client.Connect() c.Client.Handler.BusService = &c.Handler.BusService c.Client.Handler.RequestService = &c.Handler.RequestService return c } // Connect returns the cabservice from client. func (c *Client) Connect() gateway.CabService { return c.cabService }
package user import "github.com/zxhaaa6/gin-demo/model" type Service struct { UserDao Dao } func InitService() Service { service := Service{} service.UserDao = InitDao() return service } func (r *Service) findUserById(id string) (model.User, error) { return r.UserDao.findUserById(id) } func (r *Service) getAllUsers() ([]model.User, error) { return r.UserDao.getAllUsers() }
package main import ( "fmt" "net/http" "math" "math/rand" "bufio" "os" ) // pi launches n goroutines to compute an // approximation of pi. func pi(n int) float64 { f := 0.0 for k := 0; k <= n; k++ { f += 4 * math.Pow(-1, float64(k)) / (2*float64(k) + 1) } return f } func cpuLoad() string { return fmt.Sprintf("The answer is %d", pi(10000)) } func memoryLoad() string { // 1MB of memory a := make([]int32, 250000) a[0] = int32(rand.Int()) a[1] = int32(rand.Int()) a[2] = int32(rand.Int()) for i := 3; i < len(a); i++ { a[i] = a[i-3] * a[i-2] - a[i-1] } return fmt.Sprintf("The answer is %d", a[len(a)-1]) } func netLoad() string { str := "" for i := 0; i < 1000; i ++ { str = fmt.Sprintf( "1234567890") } return str } func fsLoad() string { f, err := os.Create(fmt.Sprintf("somefile%d", rand.Int31())) if err != nil { return err.Error() } defer f.Close() w := bufio.NewWriter(f) for i := 0; i < 1000; i ++ { w.Write([]byte("1234567890")) } w.Flush() return "OK" } func noLoad() string { return "nothing to do here" } func cpu(w http.ResponseWriter, r *http.Request){ fmt.Fprintf(w, cpuLoad()) } func memo(w http.ResponseWriter, r *http.Request){ fmt.Fprintf(w, memoryLoad()) } func net(w http.ResponseWriter, r *http.Request){ fmt.Fprintf(w, netLoad()) } func disk(w http.ResponseWriter, r *http.Request){ fmt.Fprintf(w, fsLoad()) } func none(w http.ResponseWriter, r *http.Request) { fmt.Fprintf(w, noLoad()) } func ScenarioFactory(funcs []func()string) func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request){ fmt.Fprintf(w, funcs[rand.Intn(len(funcs))]()) } } func main() { http.HandleFunc("/cpu", cpu) http.HandleFunc("/memo", memo) http.HandleFunc("/net", net) http.HandleFunc("/disk", disk) http.HandleFunc("/none", none) scenarios := [][]func()string { {noLoad}, {cpuLoad}, {memoryLoad}, {netLoad}, {fsLoad}, {cpuLoad,noLoad}, {memoryLoad,noLoad}, {netLoad,noLoad}, {fsLoad,noLoad}, {cpuLoad,memoryLoad,netLoad,fsLoad}, } for i := 0; i < len(scenarios); i ++ { http.HandleFunc(fmt.Sprintf("/s%d", i), ScenarioFactory(scenarios[i])) } http.ListenAndServe(":3212", nil) }
// Copyright 2015 The Vanadium Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package tool import ( "io" "os" "v.io/jiri/gerrit" "v.io/jiri/gitutil" "v.io/jiri/jenkins" "v.io/jiri/runutil" "v.io/x/lib/cmdline" "v.io/x/lib/timing" ) // Context represents an execution context of a tool command // invocation. Its purpose is to enable sharing of state throughout // the lifetime of a command invocation. type Context struct { opts ContextOpts run *runutil.Run seq *runutil.Sequence start *runutil.Start } // ContextOpts records the context options. type ContextOpts struct { Color *bool DryRun *bool Env map[string]string Manifest *string Stdin io.Reader Stdout io.Writer Stderr io.Writer Verbose *bool Timer *timing.Timer } // newContextOpts is the ContextOpts factory. func newContextOpts() *ContextOpts { return &ContextOpts{ Color: &ColorFlag, DryRun: &DryRunFlag, Env: map[string]string{}, Manifest: &ManifestFlag, Stdin: os.Stdin, Stdout: os.Stdout, Stderr: os.Stderr, Verbose: &VerboseFlag, Timer: nil, } } // initOpts initializes all unset options to the given defaults. func initOpts(defaultOpts, opts *ContextOpts) { if opts.Color == nil { opts.Color = defaultOpts.Color } if opts.DryRun == nil { opts.DryRun = defaultOpts.DryRun } if opts.Env == nil { opts.Env = defaultOpts.Env } if opts.Manifest == nil { opts.Manifest = defaultOpts.Manifest } if opts.Stdin == nil { opts.Stdin = defaultOpts.Stdin } if opts.Stdout == nil { opts.Stdout = defaultOpts.Stdout } if opts.Stderr == nil { opts.Stderr = defaultOpts.Stderr } if opts.Verbose == nil { opts.Verbose = defaultOpts.Verbose } if opts.Timer == nil { opts.Timer = defaultOpts.Timer } } // NewContext is the Context factory. func NewContext(opts ContextOpts) *Context { initOpts(newContextOpts(), &opts) run := runutil.NewRun(opts.Env, opts.Stdin, opts.Stdout, opts.Stderr, *opts.Color, *opts.DryRun, *opts.Verbose) seq := runutil.NewSequence(opts.Env, opts.Stdin, opts.Stdout, opts.Stderr, *opts.Color, *opts.DryRun, *opts.Verbose) start := runutil.NewStart(opts.Env, opts.Stdin, opts.Stdout, opts.Stderr, *opts.Color, *opts.DryRun, *opts.Verbose) return &Context{ opts: opts, run: run, seq: seq, start: start, } } // NewContextFromEnv returns a new context instance based on the given // cmdline environment. func NewContextFromEnv(env *cmdline.Env) *Context { opts := ContextOpts{} initOpts(newContextOpts(), &opts) opts.Stdin = env.Stdin opts.Stdout = env.Stdout opts.Stderr = env.Stderr opts.Timer = env.Timer return NewContext(opts) } // NewDefaultContext returns a new default context. func NewDefaultContext() *Context { return NewContext(ContextOpts{}) } // Clone creates a clone of the given context, overriding select // settings using the given options. func (ctx Context) Clone(opts ContextOpts) *Context { initOpts(&ctx.opts, &opts) return NewContext(opts) } // Color returns the color setting of the context. func (ctx Context) Color() bool { return *ctx.opts.Color } // DryRun returns the dry run setting of the context. func (ctx Context) DryRun() bool { return *ctx.opts.DryRun } // Env returns the environment of the context. func (ctx Context) Env() map[string]string { return ctx.opts.Env } // Gerrit returns the Gerrit instance of the context. func (ctx Context) Gerrit(host string) *gerrit.Gerrit { return gerrit.New(ctx.run, host) } type gitOpt interface { gitOpt() } type AuthorDateOpt string type CommitterDateOpt string type RootDirOpt string func (AuthorDateOpt) gitOpt() {} func (CommitterDateOpt) gitOpt() {} func (RootDirOpt) gitOpt() {} // Git returns a new git instance. // // This method accepts one optional argument: the repository root to // use for commands issued by the returned instance. If not specified, // commands will use the current directory as the repository root. func (ctx Context) Git(opts ...gitOpt) *gitutil.Git { rootDir := "" gitCtx := &ctx for _, opt := range opts { switch typedOpt := opt.(type) { case AuthorDateOpt: opts := ContextOpts{} opts.Env = ctx.Env() opts.Env["GIT_AUTHOR_DATE"] = string(typedOpt) gitCtx = ctx.Clone(opts) case CommitterDateOpt: opts := ContextOpts{} opts.Env = ctx.Env() opts.Env["GIT_COMMITTER_DATE"] = string(typedOpt) gitCtx = ctx.Clone(opts) case RootDirOpt: rootDir = string(typedOpt) } } return gitutil.New(gitCtx.run, rootDir) } // Jenkins returns a new Jenkins instance that can be used to // communicate with a Jenkins server running at the given host. func (ctx Context) Jenkins(host string) (*jenkins.Jenkins, error) { return jenkins.New(host) } // Manifest returns the manifest of the context. func (ctx Context) Manifest() string { return *ctx.opts.Manifest } // Run returns the run instance of the context. func (ctx Context) Run() *runutil.Run { return ctx.run } // Seq returns the sequence instance of the context. func (ctx Context) Seq() *runutil.Sequence { return ctx.seq } // Start returns the start instance of the context. func (ctx Context) Start() *runutil.Start { return ctx.start } // Stdin returns the standard input of the context. func (ctx Context) Stdin() io.Reader { return ctx.opts.Stdin } // Stdout returns the standard output of the context. func (ctx Context) Stdout() io.Writer { return ctx.opts.Stdout } // Stderr returns the standard error output of the context. func (ctx Context) Stderr() io.Writer { return ctx.opts.Stderr } // Verbose returns the verbosity setting of the context. func (ctx Context) Verbose() bool { return *ctx.opts.Verbose } // Timer returns the timer associated with the context, which may be nil. func (ctx Context) Timer() *timing.Timer { return ctx.opts.Timer } // TimerPush calls ctx.Timer().Push(name), only if the Timer is non-nil. func (ctx Context) TimerPush(name string) { if ctx.opts.Timer != nil { ctx.opts.Timer.Push(name) } } // TimerPop calls ctx.Timer().Pop(), only if the Timer is non-nil. func (ctx Context) TimerPop() { if ctx.opts.Timer != nil { ctx.opts.Timer.Pop() } }
package config // Reference: https://github.com/prometheus/prometheus/blob/76cd5f4c7f123041525f101611a2cc04fd3d5382/config/config.go#L138 import ( "io/ioutil" "github.com/pkg/errors" "gopkg.in/yaml.v2" ) var ( // DefaultConfig is the config with parsing config.yaml DefaultConfig = config{} ) func init() { loadFile("config/config.yaml") } type config struct { Vendors []*vendorConfig `yaml:"vendors,omitempty"` } type vendorConfig struct { Name string `yaml:"name"` Hostname string `yaml:"hostname"` Novels []*Novel `yaml:"novels"` } // Novel is novel type Novel struct { Name string `yaml:"name"` IndexURL string `yaml:"indexURL"` } func loadFile(filename string) (*config, error) { content, err := ioutil.ReadFile(filename) if err != nil { return nil, err } cfg := &DefaultConfig err = yaml.UnmarshalStrict([]byte(content), cfg) if err != nil { return nil, errors.Wrapf(err, "parsing YAML file %s", filename) } return cfg, nil }
// Copyright (c) Jeevanandam M. (https://github.com/jeevatkm) // go-aah/aah source code and usage is governed by a MIT style // license that can be found in the LICENSE file. package aah import ( "encoding/json" "encoding/xml" "errors" "fmt" "html/template" "io" "os" "path/filepath" "strings" "sync" "aahframework.org/essentials.v0" ) var ( // JSONMarshal is used to register external JSON library for Marshalling. JSONMarshal func(v interface{}) ([]byte, error) // JSONMarshalIndent is used to register external JSON library for Marshal indent. JSONMarshalIndent func(v interface{}, prefix, indent string) ([]byte, error) xmlHeaderBytes = []byte(xml.Header) rdrHTMLPool = &sync.Pool{New: func() interface{} { return &HTML{} }} rdrJSONPool = &sync.Pool{New: func() interface{} { return &JSON{} }} rdrXMLPool = &sync.Pool{New: func() interface{} { return &XML{} }} ) type ( // Data type used for convenient data type of map[string]interface{} Data map[string]interface{} // Render interface to various rendering classifcation for HTTP responses. Render interface { Render(io.Writer) error } // RenderFunc type is an adapter to allow the use of regular function as // custom Render. RenderFunc func(w io.Writer) error // Text renders the response as plain text Text struct { Format string Values []interface{} } // JSON renders the response JSON content. JSON struct { IsJSONP bool Callback string Data interface{} } // XML renders the response XML content. XML struct { Data interface{} } // Binary renders given path or io.Reader into response and closes the file. Binary struct { Path string Reader io.Reader } // HTML renders the given HTML into response with given model data. HTML struct { Template *template.Template Layout string Filename string ViewArgs Data } ) //‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ // RenderFunc methods //___________________________________ // Render method is implementation of Render interface in the adapter type. func (rf RenderFunc) Render(w io.Writer) error { return rf(w) } //‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ // Plain Text Render methods //___________________________________ // Render method writes Text into HTTP response. func (t *Text) Render(w io.Writer) (err error) { if len(t.Values) > 0 { _, err = fmt.Fprintf(w, t.Format, t.Values...) } else { _, err = fmt.Fprint(w, t.Format) } return } //‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ // JSON Render methods //___________________________________ // Render method writes JSON into HTTP response. func (j *JSON) Render(w io.Writer) error { var ( bytes []byte err error ) if appConfig.BoolDefault("render.pretty", false) { bytes, err = JSONMarshalIndent(j.Data, "", " ") } else { bytes, err = JSONMarshal(j.Data) } if err != nil { return err } if j.IsJSONP { if _, err = w.Write([]byte(j.Callback + "(")); err != nil { return err } } if _, err = w.Write(bytes); err != nil { return err } if j.IsJSONP { if _, err = w.Write([]byte(");")); err != nil { return err } } return nil } func (j *JSON) reset() { j.Callback = "" j.IsJSONP = false j.Data = nil } //‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ // XML Render methods //___________________________________ // Render method writes XML into HTTP response. func (x *XML) Render(w io.Writer) error { var ( bytes []byte err error ) if appConfig.BoolDefault("render.pretty", false) { bytes, err = xml.MarshalIndent(x.Data, "", " ") } else { bytes, err = xml.Marshal(x.Data) } if err != nil { return err } if _, err = w.Write(xmlHeaderBytes); err != nil { return err } if _, err = w.Write(bytes); err != nil { return err } return nil } func (x *XML) reset() { x.Data = nil } // MarshalXML method is to marshal `aah.Data` into XML. func (d Data) MarshalXML(e *xml.Encoder, start xml.StartElement) error { tokens := []xml.Token{start} for k, v := range d { token := xml.StartElement{Name: xml.Name{Local: strings.Title(k)}} tokens = append(tokens, token, xml.CharData(fmt.Sprintf("%v", v)), xml.EndElement{Name: token.Name}) } tokens = append(tokens, xml.EndElement{Name: start.Name}) var err error for _, t := range tokens { if err = e.EncodeToken(t); err != nil { return err } } // flush to ensure tokens are written return e.Flush() } //‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ // File and Reader Render methods //___________________________________ // Render method writes File into HTTP response. func (f *Binary) Render(w io.Writer) error { if f.Reader != nil { defer ess.CloseQuietly(f.Reader) _, err := io.Copy(w, f.Reader) return err } if !filepath.IsAbs(f.Path) { f.Path = filepath.Join(AppBaseDir(), "static", f.Path) } file, err := os.Open(f.Path) if err != nil { return err } defer ess.CloseQuietly(file) fi, err := file.Stat() if err != nil { return err } if fi.IsDir() { return fmt.Errorf("'%s' is a directory", f.Path) } _, err = io.Copy(w, file) return err } //‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ // HTML Render methods //___________________________________ // Render method renders the HTML template into HTTP response. func (h *HTML) Render(w io.Writer) error { if h.Template == nil { return errors.New("template is nil") } if ess.IsStrEmpty(h.Layout) { return h.Template.Execute(w, h.ViewArgs) } return h.Template.ExecuteTemplate(w, h.Layout, h.ViewArgs) } func (h *HTML) reset() { h.Template = nil h.Filename = "" h.Layout = "" h.ViewArgs = make(Data) } //‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾‾ // Render Unexported methods //___________________________________ // doRender method renders and detects the errors earlier. Writes the // error info if any. func (e *engine) doRender(ctx *Context) { if ctx.Reply().Rdr != nil { ctx.Reply().body = acquireBuffer() if err := ctx.Reply().Rdr.Render(ctx.Reply().body); err != nil { ctx.Log().Error("Render response body error: ", err) // panic would be appropriate here, since it handle by centralized error // handler. Funny though this is second spot in entire aah framework // the `panic` used other then panic interceptor for propagtion. panic(err) } } } func acquireHTML() *HTML { return rdrHTMLPool.Get().(*HTML) } func acquireJSON() *JSON { return rdrJSONPool.Get().(*JSON) } func acquireXML() *XML { return rdrXMLPool.Get().(*XML) } func releaseRender(r Render) { if r != nil { switch t := r.(type) { case *JSON: t.reset() rdrJSONPool.Put(t) case *HTML: t.reset() rdrHTMLPool.Put(t) case *XML: t.reset() rdrXMLPool.Put(t) } } } func init() { // Registering default standard JSON library JSONMarshal = json.Marshal JSONMarshalIndent = json.MarshalIndent }
package v1 import ( "context" "errors" "fmt" "io" "time" "github.com/h2non/filetype" "github.com/h2non/filetype/matchers" "github.com/traPtitech/trap-collection-server/src/domain" "github.com/traPtitech/trap-collection-server/src/domain/values" "github.com/traPtitech/trap-collection-server/src/repository" "github.com/traPtitech/trap-collection-server/src/service" "github.com/traPtitech/trap-collection-server/src/storage" "golang.org/x/sync/errgroup" ) type GameImage struct { db repository.DB gameRepository repository.Game gameImageRepository repository.GameImage gameImageStorage storage.GameImage } func NewGameImage( db repository.DB, gameRepository repository.Game, gameImageRepository repository.GameImage, gameImageStorage storage.GameImage, ) *GameImage { return &GameImage{ db: db, gameRepository: gameRepository, gameImageRepository: gameImageRepository, gameImageStorage: gameImageStorage, } } func (gi *GameImage) SaveGameImage(ctx context.Context, reader io.Reader, gameID values.GameID) error { err := gi.db.Transaction(ctx, nil, func(ctx context.Context) error { _, err := gi.gameRepository.GetGame(ctx, gameID, repository.LockTypeRecord) if errors.Is(err, repository.ErrRecordNotFound) { return service.ErrInvalidGameID } if err != nil { return fmt.Errorf("failed to get game: %w", err) } imageID := values.NewGameImageID() eg, ctx := errgroup.WithContext(ctx) fileTypePr, fileTypePw := io.Pipe() filePr, filePw := io.Pipe() eg.Go(func() error { defer fileTypePr.Close() fType, err := filetype.MatchReader(fileTypePr) if err != nil { return fmt.Errorf("failed to get file type: %w", err) } _, err = io.ReadAll(fileTypePr) if err != nil { return fmt.Errorf("failed to read file type: %w", err) } var imageType values.GameImageType switch fType.Extension { case matchers.TypeJpeg.Extension: imageType = values.GameImageTypeJpeg case matchers.TypePng.Extension: imageType = values.GameImageTypePng case matchers.TypeGif.Extension: imageType = values.GameImageTypeGif default: return service.ErrInvalidFormat } image := domain.NewGameImage( imageID, imageType, time.Now(), ) err = gi.gameImageRepository.SaveGameImage(ctx, gameID, image) if err != nil { return fmt.Errorf("failed to save game image: %w", err) } return nil }) eg.Go(func() error { defer filePr.Close() err = gi.gameImageStorage.SaveGameImage(ctx, filePr, imageID) if err != nil { return fmt.Errorf("failed to save game image file: %w", err) } return nil }) eg.Go(func() error { defer filePw.Close() defer fileTypePw.Close() mw := io.MultiWriter(fileTypePw, filePw) _, err = io.Copy(mw, reader) if err != nil { return fmt.Errorf("failed to copy image: %w", err) } return nil }) err = eg.Wait() if err != nil { return fmt.Errorf("failed to save game image: %w", err) } return nil }) if err != nil { return fmt.Errorf("failed in transaction: %w", err) } return nil } func (gi *GameImage) GetGameImage(ctx context.Context, gameID values.GameID) (values.GameImageTmpURL, error) { _, err := gi.gameRepository.GetGame(ctx, gameID, repository.LockTypeNone) if errors.Is(err, repository.ErrRecordNotFound) { return nil, service.ErrInvalidGameID } if err != nil { return nil, fmt.Errorf("failed to get game: %w", err) } image, err := gi.gameImageRepository.GetLatestGameImage(ctx, gameID, repository.LockTypeNone) if errors.Is(err, repository.ErrRecordNotFound) { return nil, service.ErrNoGameImage } if err != nil { return nil, fmt.Errorf("failed to get game image: %w", err) } tmpURL, err := gi.gameImageStorage.GetTempURL(ctx, image, time.Minute) if err != nil { return nil, fmt.Errorf("failed to get game image temp url: %W", err) } return tmpURL, nil }
package main import ( "bytes" "encoding/json" "errors" "fmt" "io/ioutil" "net/http" "strings" "time" "github.com/jxmoore/testFlightBuildAutomation/models" ) func processNewBuild() error { var slackMsg string fmt.Printf("Begining to process new build.\n Current flags are : \nBuildNumber :%v\nBuildVersion: %v\nPostToSlack: %v\nSlack Channel : %v\nSlack WebHook : %v\n", *buildNumber, *buildVersion, *post, *slackChannel, *slackHook, ) // skipping kid and iss, treating them as secret. fmt.Printf("\n\nAttempting to obtain token...\n") token, err := models.IOStoken{}.New(*iss, *kid) if err != nil { return fmt.Errorf("there was an error obtaining the token : %v", err.Error()) } fmt.Printf("Pulling all builds...\n") allBuilds, err := getAllBuilds(token) if err != nil { if strings.Contains(err.Error(), "403") { return fmt.Errorf("verify the JWT payload and headers. A 403 error was returned when attempting to pull the builds") } return fmt.Errorf("error pulling the builds from testflight : %v", err.Error()) } fmt.Printf("Looking through return set for build %v : %v...\n", *buildVersion, *buildNumber) x := 0 build := allBuilds.FindBuild(*buildNumber) // loop and poll for the build, if the build is not found in that time terminate. for { // build.ID is the internal build GUID in testflight. If its nil we have a nil struct if build.ID != "" { break } else if x > 4 { return fmt.Errorf("Unable to locate build : %v", *buildNumber) } fmt.Println("Currently unable to locate the build... This process will sleep for 60 seconds and re-poll the builds...") time.Sleep(60 * time.Second) // poll all builds again allBuilds, err = getAllBuilds(token) if err != nil { if strings.Contains(err.Error(), "403") { return fmt.Errorf("verify the JWT payload and headers. A 403 error was returned when attempting to pull the builds") } return fmt.Errorf("error pulling the builds from testflight : %v", err.Error()) } // try to find the build in allBuilds build = allBuilds.FindBuild(*buildNumber) x++ } fmt.Printf("Found build : %v:%v\n", *buildNumber, build.ID) fmt.Println("Obtaining approval state...") reviewStatus, err := build.GetReviewStatus(token) if err != nil { if strings.Contains(err.Error(), "403") { return fmt.Errorf("verify the JWT payload and headers. A 403 error was returned when getting the initial beta review status") } return fmt.Errorf("error getting the beta app review status from testflight : %v", err.Error()) } // The assumption is if its a new build with a new version number it will come in as something other than approved initially. // new builds on the same version (ie 1.1.4:665 and then 1.1.4:666) will come in as 'approved' out of the gate as the version number // has not been increased. if reviewStatus.Data.Attributes.BetaReviewState != "APPROVED" { slackMsg = fmt.Sprintf("Build %v has been approved for testing TestFlight\n", *buildNumber) } else { x := 0 fmt.Printf("The current approval state is: %v\nLooping waiting for approval to update to 'APPROVED'\n ", reviewStatus.Data.Attributes.BetaReviewState) for { if reviewStatus.Data.Attributes.BetaReviewState == "APPROVED" { break } else { // Check the token, its only good for 20 minutes. if token.CheckTokenExp(time.Second * time.Duration(120)) { token, err = token.RefreshToken() //refresh if we have < 120 seconds left if err != nil { return fmt.Errorf("there was an error refreshing the token during the beta loop : %v", err.Error()) } } time.Sleep(time.Second * time.Duration(90)) reviewStatus, err = build.GetReviewStatus(token) if err != nil { if strings.Contains(err.Error(), "403") { // ideally at some point this would retry with a new token. return fmt.Errorf("verify the JWT payload and headers. A 403 error was returned when attempting to pull the beta review status") } return fmt.Errorf("error getting the beta app review status from testflight : %v", err.Error()) } } // Printing the return value every 15 minutes as I know approved == APPROVED but there is no documentation on the other strings that are returned. if x == 10 { fmt.Printf("Waiting for approval... Current status is : %v\n", reviewStatus.Data.Attributes.BetaReviewState) } else if x > 10 { x = 0 } else { x++ } } } if token.CheckTokenExp(time.Second * time.Duration(240)) { token, err = token.RefreshToken() if err != nil { return fmt.Errorf("there was an error refreshing the token post review loop : %v", err.Error()) } } // the guids here should map to your actual testflight groups, these are dummy data // TODO - these could be pulled from flags or fon a dotfile groups := models.RealBetaGroups{[]models.GroupData{ models.GroupData{Typ: "QA", ID: "2f0a256d-e1gf-619d-v352-d4t4qw911we3"}, models.GroupData{Typ: "DEV", ID: "b2vc15e3-cf6b-917a-6aec-21vgff9aa1az"}, }} fmt.Printf("Processing group(s) : \n%v\n", groups) groupBody, err := json.Marshal(groups) if err != nil { return fmt.Errorf("error marshelling the group struct : %v", err.Error()) } for _, bld := range allBuilds.Data { if bld.BuildInfo.Version != *buildNumber { // we dont remove access to the build being 'processed' resp, err := bld.RemoveAccess(token, bytes.NewBuffer(groupBody)) if err != nil { return fmt.Errorf("there was an error removing access to %v : %v", bld.BuildInfo.Version, err.Error()) } fmt.Printf("Removed access to : %v\n", bld.BuildInfo.Version) if resp != "" { // resp should be nil unless there was an unforseen error that wasnt caught in the above fmt.Println(resp) } } } resp, err := build.GrantAccess(token, bytes.NewBuffer(groupBody)) if err != nil { return fmt.Errorf("There was an error granting the groups access to the build!\n%v", err.Error()) } fmt.Printf("Group access added to build %v\n", build.BuildInfo.Version) if resp != "" { // resp should be nil unless there was an unforseen error that wasnt caught in the above fmt.Println(resp) } if *post { // hoping here that \n works in slack as a new line... slackMsg += fmt.Sprintf("Build %v has been assigned to the appropriate groups within TestFlight!\nAccess to the old builds has been removed.", *buildNumber) sl := models.SlackPayload{ Channel: *slackChannel, Username: "TestFlight", IconURL: "http://pngimg.com/uploads/apple_logo/apple_logo_PNG19694.png", SlackAttach: []models.SlackAttachments{ models.SlackAttachments{ Fallback: slackMsg, Color: "blue", Title: "A new build has been processed", Field: []models.SlackFields{models.SlackFields{Value: slackMsg}}, }, }, } sl.PostToSlack(*slackHook) } return nil } // getAllBuilds pulls all of the builds using https://developer.apple.com/documentation/appstoreconnectapi/list_builds and returns a pointer to an allBuilddata struct func getAllBuilds(key models.IOStoken) (*models.AllBuilddata, error) { client := &http.Client{} request, err := http.NewRequest("GET", "https://api.appstoreconnect.apple.com/v1/builds", nil) if err != nil { return &models.AllBuilddata{}, errors.New(err.Error()) } request.Header.Add("Authorization", "Bearer "+key.SignedToken) response, err := client.Do(request) if err != nil { return &models.AllBuilddata{}, fmt.Errorf("there was an error pulling the builds when calling the /v1/builds endpoint :%v", err.Error()) } defer response.Body.Close() if response.StatusCode == 403 { return &models.AllBuilddata{}, errors.New("403 unauthorized, verify token") } body, err := ioutil.ReadAll(response.Body) if err != nil { return &models.AllBuilddata{}, fmt.Errorf("there was an error reading the response body from the api call to /v1/builds :%v", err.Error()) } allBuilds := models.AllBuilddata{} err = json.Unmarshal(body, &allBuilds) if err != nil { return &models.AllBuilddata{}, fmt.Errorf("there was an error pulling unmarshelling the response from the /v1/builds/ endpoint :%v", err.Error()) } return &allBuilds, nil }
package cmd import ( "fmt" "strings" "github.com/fugue/fugue-client/client/environments" "github.com/fugue/fugue-client/format" "github.com/fugue/fugue-client/models" "github.com/spf13/cobra" ) type createGoogleEnvironmentOptions struct { Name string ServiceAccountEmail string ProjectID string ScanInterval int64 ComplianceFamilies []string } // NewCreateGoogleEnvironmentCommand returns a command that creates an environment func NewCreateGoogleEnvironmentCommand() *cobra.Command { var opts createGoogleEnvironmentOptions cmd := &cobra.Command{ Use: "environment", Short: "Create an Google environment", Aliases: []string{"env"}, Run: func(cmd *cobra.Command, args []string) { client, auth := getClient() scanScheduleEnabled := opts.ScanInterval != 0 var scanIntervalPtr *int64 if scanScheduleEnabled { scanIntervalPtr = &opts.ScanInterval } params := environments.NewCreateEnvironmentParams() params.Environment = &models.CreateEnvironmentInput{ ComplianceFamilies: opts.ComplianceFamilies, Name: opts.Name, Provider: "google", ScanInterval: scanIntervalPtr, SurveyResourceTypes: []string{}, RemediateResourceTypes: []string{}, ScanScheduleEnabled: &scanScheduleEnabled, ProviderOptions: &models.ProviderOptions{ Google: &models.ProviderOptionsGoogle{ ServiceAccountEmail: opts.ServiceAccountEmail, ProjectID: opts.ProjectID, }, }, } resp, err := client.Environments.CreateEnvironment(params, auth) CheckErr(err) env := resp.Payload families := strings.Join(env.ComplianceFamilies, ", ") items := []interface{}{ Item{"ENVIRONMENT_ID", env.ID}, Item{"NAME", env.Name}, Item{"PROVIDER", env.Provider}, Item{"SCAN_INTERVAL", env.ScanInterval}, Item{"LAST_SCAN_AT", format.Unix(env.LastScanAt)}, Item{"NEXT_SCAN_AT", format.Unix(env.NextScanAt)}, Item{"SCAN_STATUS", env.ScanStatus}, Item{"COMPLIANCE_FAMILIES", families}, Item{"PROJECT_ID", env.ProviderOptions.Google.ProjectID}, Item{"SERVICE_ACCOUNT_EMAIL", env.ProviderOptions.Google.ServiceAccountEmail}, } table, err := format.Table(format.TableOpts{ Rows: items, Columns: []string{"Attribute", "Value"}, ShowHeader: true, MaxCellWidth: 70, }) CheckErr(err) for _, tableRow := range table { fmt.Println(tableRow) } }, } cmd.Flags().StringVar(&opts.Name, "name", "", "Environment name") cmd.Flags().StringVar(&opts.ServiceAccountEmail, "service-account-email", "", "Google Service Account Email") cmd.Flags().StringVar(&opts.ProjectID, "project-id", "", "Google Project ID (if not given, the project_id is extracted from the service acccount email)") cmd.Flags().Int64Var(&opts.ScanInterval, "scan-interval", 86400, "Scan interval (seconds)") cmd.Flags().StringSliceVar(&opts.ComplianceFamilies, "compliance-families", []string{}, "Compliance families") cmd.MarkFlagRequired("name") cmd.MarkFlagRequired("service-account-email") return cmd } func init() { googleCmd.AddCommand(NewCreateGoogleEnvironmentCommand()) }
package util_test import ( "net/http" "testing" "goscrum/server/util" "github.com/pkg/errors" . "github.com/smartystreets/goconvey/convey" ) func TestRedirect(t *testing.T) { Convey("Response -> Redirect", t, func() { res, err := util.Redirect("test.png") So(res.StatusCode, ShouldEqual, http.StatusTemporaryRedirect) So(err, ShouldBeNil) }) Convey("Response -> Success", t, func() { res, err := util.Success("test.png") So(res.StatusCode, ShouldEqual, http.StatusOK) So(err, ShouldBeNil) }) Convey("Response -> ClientError", t, func() { res, err := util.ClientError(http.StatusBadGateway) So(res.StatusCode, ShouldEqual, http.StatusBadGateway) So(err, ShouldBeNil) }) Convey("Response -> ServerError", t, func() { res, err := util.ServerError(errors.New("error")) So(res.StatusCode, ShouldEqual, http.StatusInternalServerError) So(err, ShouldBeNil) }) }
// Copyright 2019 Yunion // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package multicloud import ( "time" "yunion.io/x/pkg/errors" "yunion.io/x/onecloud/pkg/cloudprovider" ) type SRegion struct { SResourceBase STagBase } func (r *SRegion) GetIDiskById(id string) (cloudprovider.ICloudDisk, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetIDiskById") } func (r *SRegion) GetIHostById(id string) (cloudprovider.ICloudHost, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetIHostById") } func (r *SRegion) GetIHosts() ([]cloudprovider.ICloudHost, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetIHosts") } func (r *SRegion) GetISnapshotById(snapshotId string) (cloudprovider.ICloudSnapshot, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetISnapshotById") } func (r *SRegion) GetISnapshots() ([]cloudprovider.ICloudSnapshot, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetISnapshots") } func (r *SRegion) GetIStorageById(id string) (cloudprovider.ICloudStorage, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetIStorageById") } func (r *SRegion) GetIStoragecacheById(id string) (cloudprovider.ICloudStoragecache, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetIStoragecacheById") } func (r *SRegion) GetIStoragecaches() ([]cloudprovider.ICloudStoragecache, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetIStoragecaches") } func (r *SRegion) GetIStorages() ([]cloudprovider.ICloudStorage, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetIStorages") } func (r *SRegion) GetIVMById(id string) (cloudprovider.ICloudVM, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetIVMById") } func (r *SRegion) CreateSnapshotPolicy(input *cloudprovider.SnapshotPolicyInput) (string, error) { return "", errors.Wrapf(cloudprovider.ErrNotImplemented, "CreateSnapshotPolicy") } func (r *SRegion) UpdateSnapshotPolicy(input *cloudprovider.SnapshotPolicyInput, snapshotPolicyId string) error { return errors.Wrapf(cloudprovider.ErrNotImplemented, "UpdateSnapshotPolicy") } func (r *SRegion) GetISnapshotPolicyById(snapshotPolicyId string) (cloudprovider.ICloudSnapshotPolicy, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetISnapshotPolicyById") } func (self *SRegion) GetISnapshotPolicies() ([]cloudprovider.ICloudSnapshotPolicy, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetISnapshotPolicies") } func (self *SRegion) DeleteSnapshotPolicy(string) error { return errors.Wrapf(cloudprovider.ErrNotImplemented, "DeleteSnapshotPolicy") } func (self *SRegion) ApplySnapshotPolicyToDisks(snapshotPolicyId string, diskId string) error { return errors.Wrapf(cloudprovider.ErrNotImplemented, "ApplySnapshotPolicyToDisks") } func (self *SRegion) CancelSnapshotPolicyToDisks(snapshotPolicyId string, diskId string) error { return errors.Wrapf(cloudprovider.ErrNotImplemented, "CancelSnapshotPolicyToDisks") } func (self *SRegion) GetISkus() ([]cloudprovider.ICloudSku, error) { return nil, errors.Wrapf(cloudprovider.ErrNotSupported, "GetISkus") } func (self *SRegion) CreateISku(opts *cloudprovider.SServerSkuCreateOption) (cloudprovider.ICloudSku, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "CreateISku") } func (self *SRegion) GetINetworkInterfaces() ([]cloudprovider.ICloudNetworkInterface, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetINetworkInterfaces") } func (self *SRegion) GetIDBInstances() ([]cloudprovider.ICloudDBInstance, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetIDBInstances") } func (self *SRegion) GetIDBInstanceById(instanceId string) (cloudprovider.ICloudDBInstance, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetIDBInstanceById") } func (self *SRegion) GetIDBInstanceBackups() ([]cloudprovider.ICloudDBInstanceBackup, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetIDBInstanceBackups") } func (self *SRegion) GetIDBInstanceBackupById(backupId string) (cloudprovider.ICloudDBInstanceBackup, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetIDBInstanceBackupById") } func (self *SRegion) GetIElasticcaches() ([]cloudprovider.ICloudElasticcache, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetIElasticcaches") } func (self *SRegion) CreateIDBInstance(desc *cloudprovider.SManagedDBInstanceCreateConfig) (cloudprovider.ICloudDBInstance, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "CreateIDBInstance") } func (self *SRegion) CreateIElasticcaches(ec *cloudprovider.SCloudElasticCacheInput) (cloudprovider.ICloudElasticcache, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "CreateIElasticcaches") } func (self *SRegion) GetIElasticcacheById(id string) (cloudprovider.ICloudElasticcache, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetIElasticcacheById") } func (self *SRegion) GetICloudEvents(start time.Time, end time.Time, withReadEvent bool) ([]cloudprovider.ICloudEvent, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetICloudEvents") } func (self *SRegion) GetICloudQuotas() ([]cloudprovider.ICloudQuota, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetICloudQuotas") } func (self *SRegion) CreateInternetGateway() (cloudprovider.ICloudInternetGateway, error) { return nil, errors.Wrapf(cloudprovider.ErrNotSupported, "CreateInternetGateway") } func (self *SRegion) GetICloudFileSystems() ([]cloudprovider.ICloudFileSystem, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetICloudFileSystems") } func (self *SRegion) GetICloudFileSystemById(id string) (cloudprovider.ICloudFileSystem, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetICloudFileSystemById") } func (self *SRegion) GetICloudAccessGroups() ([]cloudprovider.ICloudAccessGroup, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetICloudAccessGroups") } func (self *SRegion) GetICloudAccessGroupById(id string) (cloudprovider.ICloudAccessGroup, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetICloudAccessGroupById") } func (self *SRegion) CreateICloudAccessGroup(opts *cloudprovider.SAccessGroup) (cloudprovider.ICloudAccessGroup, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "CreateICloudAccessGroup") } func (self *SRegion) CreateICloudFileSystem(opts *cloudprovider.FileSystemCraeteOptions) (cloudprovider.ICloudFileSystem, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "CreateICloudFileSystem") } func (self *SRegion) GetICloudWafIPSets() ([]cloudprovider.ICloudWafIPSet, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetICloudWafIPSets") } func (self *SRegion) GetICloudWafRegexSets() ([]cloudprovider.ICloudWafRegexSet, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetICloudWafRegexSets") } func (self *SRegion) GetICloudWafInstances() ([]cloudprovider.ICloudWafInstance, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetICloudWafInstances") } func (self *SRegion) GetICloudWafInstanceById(id string) (cloudprovider.ICloudWafInstance, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetICloudWafInstanceById") } func (self *SRegion) CreateICloudWafInstance(opts *cloudprovider.WafCreateOptions) (cloudprovider.ICloudWafInstance, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "CreateICloudWafInstance") } func (self *SRegion) GetICloudWafRuleGroups() ([]cloudprovider.ICloudWafRuleGroup, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetICloudWafRuleGroups") } func (self *SRegion) GetICloudMongoDBs() ([]cloudprovider.ICloudMongoDB, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetICloudMongoDBs") } func (self *SRegion) GetICloudMongoDBById(id string) (cloudprovider.ICloudMongoDB, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetICloudMongoDBById") } func (self *SRegion) GetIElasticSearchs() ([]cloudprovider.ICloudElasticSearch, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetIElasticSearchs") } func (self *SRegion) GetIElasticSearchById(id string) (cloudprovider.ICloudElasticSearch, error) { return nil, errors.Wrapf(cloudprovider.ErrNotImplemented, "GetIElasticSearchById") }
package proto import ( "packet" ) type MsgSceneRotPos struct { rotX int16 rotY int16 rotZ int16 posX int16 posY int16 posZ int16 } func (this *MsgSceneRotPos) Encode() []byte { pack := packet.NewWriteBuff(64) pack.WriteInt16(this.rotX) pack.WriteInt16(this.rotY) pack.WriteInt16(this.rotZ) pack.WriteInt16(this.posX) pack.WriteInt16(this.posY) pack.WriteInt16(this.posZ) return pack.ReadBytes() } func MsgSceneRotPosDecode(pack *packet.Packet) *MsgSceneRotPos { msgSceneRotPos := &MsgSceneRotPos{} msgSceneRotPos.rotX = pack.ReadInt16() msgSceneRotPos.rotY = pack.ReadInt16() msgSceneRotPos.rotZ = pack.ReadInt16() msgSceneRotPos.posX = pack.ReadInt16() msgSceneRotPos.posY = pack.ReadInt16() msgSceneRotPos.posZ = pack.ReadInt16() return msgSceneRotPos } func (this *MsgSceneRotPos) SetRotX(rotX int16) { this.rotX = rotX } func (this *MsgSceneRotPos) GetRotX() int16 { return this.rotX } func (this *MsgSceneRotPos) SetRotY(rotY int16) { this.rotY = rotY } func (this *MsgSceneRotPos) GetRotY() int16 { return this.rotY } func (this *MsgSceneRotPos) SetRotZ(rotZ int16) { this.rotZ = rotZ } func (this *MsgSceneRotPos) GetRotZ() int16 { return this.rotZ } func (this *MsgSceneRotPos) SetPosX(posX int16) { this.posX = posX } func (this *MsgSceneRotPos) GetPosX() int16 { return this.posX } func (this *MsgSceneRotPos) SetPosY(posY int16) { this.posY = posY } func (this *MsgSceneRotPos) GetPosY() int16 { return this.posY } func (this *MsgSceneRotPos) SetPosZ(posZ int16) { this.posZ = posZ } func (this *MsgSceneRotPos) GetPosZ() int16 { return this.posZ }
package main import ( "encoding/xml" "mime/multipart" "reflect" "strings" "golang.org/x/net/context" "github.com/PuerkitoBio/goquery" "github.com/golang/glog" ) type GoFeedParser struct { FeedReader } func (e GoFeedParser) ParseFeed(ctx context.Context, feedFile multipart.File) { e.waitGroup.Add(1) e.parserState.SetStat("reading-uri", 1) go func() { defer func() { glog.Infoln("Uri reader finished") e.waitGroup.Done() e.parserState.SetStat("reading-uri", -1) feedFile.Close() }() glog.Infoln("Uri reader started") XMLParse(ctx, feedFile, e.productExtractorChan, e.startTokensChan, YML_NAMES_MAP, reflect.TypeOf(GoOffer{})) }() } type GoOffer struct { XMLName xml.Name `xml:"offer"` Id string `xml:"id,attr"` Available string `xml:"available,attr"` Bid string `xml:"bid,attr"` Uri string `xml:"url"` Price string `xml:"price"` CurrencyId string `xml:"currencyId"` CategoryId string `xml:"categoryId"` Picture string `xml:"picture"` Store string `xml:"store"` Pickup string `xml:"pickup"` Delivery string `xml:"delivery"` Name string `xml:"name"` Description string `xml:"description"` Attributes []Attribute } func (o *GoOffer) GetProductInfo() (interface{}, error) { body, err := GetBody(o.Uri) if err != nil { return nil, err } bodyReader := strings.NewReader(body) doc, err := goquery.NewDocumentFromReader(bodyReader) if err != nil { return nil, err } description := doc.Find(".product-description__item .text").First().Text() o.Description = description attributeHandler := func(i int, s *goquery.Selection) { name := s.Find(".properties-table__title").First().Text() value := s.Find(".properties-table__td").Last().Text() if len(value) >= 200 { return } o.Attributes = append(o.Attributes, Attribute{Name: name, Value: value}) } doc.Find(".properties-table tr").Each(attributeHandler) return o, nil }
//if else if else package main import "fmt" func main() { x := 66 if x == 66 { fmt.Println("x = 66") } else if x <= 65 { fmt.Println("x <= 65") } else { fmt.Println("x not 66") } }
package admin import ( "github.com/kataras/iris" "github.com/mojocn/base64Captcha" "iris_test/models" "iris_test/common" ) type CaptchaResponse struct { CaptchaId string `json:"captchaId"` //验证码Id ImageUrl string `json:"imageUrl"` //验证码图片url } //登录 func Login(c iris.Context){ if(c.Method() == "POST"){ idKeyDTemp := common.GetSession(c,"captcha_code") if(idKeyDTemp == nil){ c.JSON(common.JsonData(false, "", "验证码不正确")) return } idKeyD := idKeyDTemp.(string) verifyValue := c.FormValue("captcha") verifyResult := base64Captcha.VerifyCaptcha(idKeyD, verifyValue) if !verifyResult { c.JSON(common.JsonData(false, "", "验证码不正确")) return } username := c.FormValue("name") password := c.FormValue("pwd") if(username == ""){ c.JSON(common.JsonData(false, "", "请输入用户名")) return } if(password == ""){ c.JSON(common.JsonData(false, "", "请输入密码")) return } admin_info := models.GetAdminByUserName(username, password) if(admin_info.Id <= 0 || admin_info.Status != 1){ c.JSON(common.JsonData(false, "", "用户名或密码不正确")) return } common.SetSession(c, "admin_info", admin_info) c.JSON(common.JsonData(true, admin_info, "登录成功")) }else{ c.View("admin/login/login.html") } } //退出登录 func Logout(c iris.Context){ common.DelSession(c, "admin_info") c.Redirect("/admin/login", iris.StatusFound) return } //验证码 func Captcha(c iris.Context){ //config struct for digits //数字验证码配置 var configD = base64Captcha.ConfigDigit{ Height: 60, Width: 120, MaxSkew: 0.7, DotCount: 80, CaptchaLen: 3, } /*//config struct for audio //声音验证码配置 var configA = base64Captcha.ConfigAudio{ CaptchaLen: 6, Language: "zh", } //config struct for Character //字符,公式,验证码配置 var configC = base64Captcha.ConfigCharacter{ Height: 60, Width: 240, //const CaptchaModeNumber:数字,CaptchaModeAlphabet:字母,CaptchaModeArithmetic:算术,CaptchaModeNumberAlphabet:数字字母混合. Mode: base64Captcha.CaptchaModeNumber, ComplexOfNoiseText: base64Captcha.CaptchaComplexLower, ComplexOfNoiseDot: base64Captcha.CaptchaComplexLower, IsShowHollowLine: false, IsShowNoiseDot: false, IsShowNoiseText: false, IsShowSlimeLine: false, IsShowSineLine: false, CaptchaLen: 6, }*/ /*//创建声音验证码 //GenerateCaptcha 第一个参数为空字符串,包会自动在服务器一个随机种子给你产生随机uiid. idKeyA, capA := base64Captcha.GenerateCaptcha("", configA) //以base64编码 base64stringA := base64Captcha.CaptchaWriteToBase64Encoding(capA) //创建字符公式验证码. //GenerateCaptcha 第一个参数为空字符串,包会自动在服务器一个随机种子给你产生随机uiid. idKeyC, capC := base64Captcha.GenerateCaptcha("", configC) //以base64编码 base64stringC := base64Captcha.CaptchaWriteToBase64Encoding(capC)*/ //创建数字验证码. //GenerateCaptcha 第一个参数为空字符串,包会自动在服务器一个随机种子给你产生随机uiid. idKeyD, capD := base64Captcha.GenerateCaptcha("", configD) //以base64编码 base64stringD := base64Captcha.CaptchaWriteToBase64Encoding(capD) common.SetSession(c,"captcha_code", idKeyD) c.JSON(common.JsonData(true, base64stringD, "操作成功")) //fmt.Println(idKeyA, base64stringA, "\n") //fmt.Println(idKeyC, base64stringC, "\n") //fmt.Println(idKeyD, base64stringD, "\n") }
package main import ( "html/template" "log" "os" ) type Page struct { Title string } var templates = template.Must(template.ParseGlob("templates/*")) func main() { p := Page{Title: "Heading"} err := templates.ExecuteTemplate(os.Stdout, "template.html", p) if err != nil { log.Fatal("Cannot Get View", err) } }
package mb import ( "context" "crypto/rand" "encoding/base64" "fmt" "io" "net/http" "net/http/cookiejar" "net/url" "strings" "github.com/evcc-io/evcc/util" "github.com/evcc-io/evcc/util/request" cv "github.com/nirasan/go-oauth-pkce-code-verifier" "golang.org/x/net/publicsuffix" "golang.org/x/oauth2" ) // https://github.com/TA2k/ioBroker.smart-eq // https://id.mercedes-benz.com/.well-known/openid-configuration const OAuthURI = "https://id.mercedes-benz.com" type Identity struct { *request.Helper oc *oauth2.Config oauth2.TokenSource } // NewIdentity creates Mercedes Benz identity func NewIdentity(log *util.Logger, oc *oauth2.Config) *Identity { return &Identity{ Helper: request.NewHelper(log), oc: oc, } } // github.com/uhthomas/tesla func state() string { var b [9]byte if _, err := io.ReadFull(rand.Reader, b[:]); err != nil { panic(err) } return base64.RawURLEncoding.EncodeToString(b[:]) } func (v *Identity) Login(user, password string) error { if v.Client.Jar == nil { var err error v.Client.Jar, err = cookiejar.New(&cookiejar.Options{ PublicSuffixList: publicsuffix.List, }) if err != nil { return err } } cv, err := cv.CreateCodeVerifier() if err != nil { return err } uri := v.oc.AuthCodeURL(state(), oauth2.AccessTypeOffline, oauth2.SetAuthURLParam("code_challenge", cv.CodeChallengeS256()), oauth2.SetAuthURLParam("code_challenge_method", "S256"), ) var resume string if err == nil { var param request.InterceptResult v.Client.CheckRedirect, param = request.InterceptRedirect("resume", false) if _, err = v.Get(uri); err == nil { resume, err = param() } v.Client.CheckRedirect = nil } data := struct { Username string `json:"username"` Password string `json:"password,omitempty"` RememberMe bool `json:"rememberMe,omitempty"` }{ Username: user, } var res struct { Result, Token string Errors []struct{ Key string } } var req *http.Request if err == nil { uri = fmt.Sprintf("%s/ciam/auth/login/user", OAuthURI) req, err = request.New(http.MethodPost, uri, request.MarshalJSON(data), request.JSONEncoding) if err == nil { if err = v.DoJSON(req, &res); err != nil && len(res.Errors) > 0 { err = fmt.Errorf("%s: %w", string(res.Errors[0].Key), err) } } } if err == nil { data.Password = password data.RememberMe = true uri = fmt.Sprintf("%s/ciam/auth/login/pass", OAuthURI) req, err = request.New(http.MethodPost, uri, request.MarshalJSON(data), request.JSONEncoding) if err == nil { if err = v.DoJSON(req, &res); err != nil && len(res.Errors) > 0 { err = fmt.Errorf("%s: %w", string(res.Errors[0].Key), err) } } } if err == nil && res.Token == "" && res.Result != "" { err = fmt.Errorf("missing token: %s", res.Result) } var code string if err == nil { params := url.Values{ "token": {res.Token}, } var param request.InterceptResult v.Client.CheckRedirect, param = request.InterceptRedirect("code", true) uri := OAuthURI + resume if _, err = v.Post(uri, request.FormContent, strings.NewReader(params.Encode())); err == nil { code, err = param() } v.Client.CheckRedirect = nil } var token *oauth2.Token if err == nil { ctx, cancel := context.WithTimeout( context.WithValue(context.Background(), oauth2.HTTPClient, v.Client), request.Timeout) defer cancel() token, err = v.oc.Exchange(ctx, code, oauth2.SetAuthURLParam("code_verifier", cv.CodeChallengePlain()), ) } if err == nil { v.TokenSource = v.oc.TokenSource(context.Background(), token) } return err }
package Controller import ( "1/Model" _struct "1/struct" "fmt" "github.com/gin-gonic/gin" "net/http" "os" "strconv" ) func CreatVideo(context *gin.Context) { var video _struct.Video err := context.ShouldBind(&video);if err!=nil{ context.String(5000,"绑定失败") return } //读取上传的视频 v,err :=context.FormFile("video") if err!=nil{ context.String(5000,"文件读取失败") return } videoName := v.Filename //读取上传的封面 cover,err :=context.FormFile("cover") if err!=nil{ context.String(5000,"文件读取失败") return } coverName := cover.Filename //获取当前用户cookie username,err := context.Request.Cookie("uid") if err!=nil{ context.String(5001,"获取cookie失败") return } uid, err := strconv.Atoi(username.Value);if err!=nil{ context.String(http.StatusBadRequest, "Error:%s", err.Error()) return } video.UID=uid //保存视频 url := "./"+video.Part+"/"+username.Value+"/"+videoName fmt.Println(url) //存入数据库 video.VideoURL=url+"/"+videoName video.CoverURL=url+"/"+coverName err = Model.CreateVideo(video);if err!=nil{ context.String(http.StatusBadRequest, "数据写入失败 Error:%s", err.Error()) return } //继续保存视频 err=os.MkdirAll(url,0777);if err!=nil{ context.String(http.StatusBadRequest, "文件夹创建失败 Error:%s", err.Error()) } if err := context.SaveUploadedFile(v, url+"/"+videoName); err != nil { context.String(http.StatusBadRequest, "视频保存失败 Error:%s", err.Error()) return } if err := context.SaveUploadedFile(cover, url+"/"+coverName); err != nil { context.String(http.StatusBadRequest, "封面保存失败 Error:%s", err.Error()) return } context.JSON(http.StatusOK, "上传文件成功") }
/* Copyright 2023 Gravitational, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package main import ( "context" "net/http" "net/url" "os" "os/exec" "path" "strings" "testing" "github.com/google/uuid" "github.com/stretchr/testify/require" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/client-go/kubernetes" "k8s.io/client-go/rest" clientcmdapi "k8s.io/client-go/tools/clientcmd/api" "github.com/gravitational/teleport/api/types" "github.com/gravitational/teleport/api/utils/keypaths" "github.com/gravitational/teleport/lib/kube/kubeconfig" "github.com/gravitational/teleport/lib/srv/alpnproxy/common" ) func (p *kubeTestPack) testProxyKube(t *testing.T) { // Set default kubeconfig to a non-exist file to avoid loading other things. t.Setenv("KUBECONFIG", path.Join(os.Getenv(types.HomeEnvVar), uuid.NewString())) // Test "tsh proxy kube root-cluster1". t.Run("with kube cluster arg", func(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) defer cancel() t.Cleanup(cancel) validateCmd := func(cmd *exec.Cmd) error { config := kubeConfigFromCmdEnv(t, cmd) checkKubeLocalProxyConfig(t, p.suite, config, p.rootClusterName, p.rootKubeCluster1) return nil } err := Run( ctx, []string{"proxy", "kube", p.rootKubeCluster1, "--insecure"}, setCmdRunner(validateCmd), ) require.NoError(t, err) }) // Test "tsh proxy kube" after "tsh login"s. t.Run("without kube cluster arg", func(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) defer cancel() t.Cleanup(cancel) require.NoError(t, Run(ctx, []string{"kube", "login", p.rootKubeCluster2, "--insecure"})) require.NoError(t, Run(ctx, []string{"kube", "login", p.leafKubeCluster, "-c", p.leafClusterName, "--insecure"})) validateCmd := func(cmd *exec.Cmd) error { config := kubeConfigFromCmdEnv(t, cmd) checkKubeLocalProxyConfig(t, p.suite, config, p.rootClusterName, p.rootKubeCluster2) checkKubeLocalProxyConfig(t, p.suite, config, p.leafClusterName, p.leafKubeCluster) return nil } err := Run( ctx, []string{"proxy", "kube", "--insecure"}, setCmdRunner(validateCmd), ) require.NoError(t, err) }) } func kubeConfigFromCmdEnv(t *testing.T, cmd *exec.Cmd) *clientcmdapi.Config { t.Helper() for _, env := range cmd.Env { if !strings.HasPrefix(env, "KUBECONFIG=") { continue } path := strings.TrimPrefix(env, "KUBECONFIG=") isProfilePath, err := keypaths.IsProfileKubeConfigPath(path) require.NoError(t, err) require.True(t, isProfilePath) config, err := kubeconfig.Load(path) require.NoError(t, err) return config } require.Fail(t, "no KUBECONFIG found") return nil } func checkKubeLocalProxyConfig(t *testing.T, s *suite, config *clientcmdapi.Config, teleportCluster, kubeCluster string) { t.Helper() sendRequestToKubeLocalProxy(t, config, teleportCluster, kubeCluster) } func sendRequestToKubeLocalProxy(t *testing.T, config *clientcmdapi.Config, teleportCluster, kubeCluster string) { t.Helper() contextName := kubeconfig.ContextName(teleportCluster, kubeCluster) proxyURL, err := url.Parse(config.Clusters[contextName].ProxyURL) require.NoError(t, err) tlsClientConfig := rest.TLSClientConfig{ CAData: config.Clusters[contextName].CertificateAuthorityData, CertData: config.AuthInfos[contextName].ClientCertificateData, KeyData: config.AuthInfos[contextName].ClientKeyData, ServerName: common.KubeLocalProxySNI(teleportCluster, kubeCluster), } client, err := kubernetes.NewForConfig(&rest.Config{ Host: "https://" + teleportCluster, TLSClientConfig: tlsClientConfig, Proxy: http.ProxyURL(proxyURL), }) require.NoError(t, err) resp, err := client.CoreV1().Pods("default").List(context.Background(), metav1.ListOptions{}) require.Nil(t, err) require.GreaterOrEqual(t, len(resp.Items), 1) }
// Copyright 2017 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package dotslash import ( "./simple" ) var ( A = simple.A )
package ghost import ( "bytes" "encoding/json" "fmt" "github.com/shihtzu-systems/bright/pkg/bungo" "github.com/shihtzu-systems/bright/pkg/tower" log "github.com/sirupsen/logrus" "strings" ) func NewGhost(id, sessionId string) Ghost { return Ghost{ Id: id, SessionId: sessionId, } } func (g *Ghost) Materialize(redis tower.Redis) { redis.Connect() defer redis.Disconnect() prettyJson := redis.Get(g.Key()) if prettyJson == "" { log.Debug("no ghost found: ", g.Key()) return } if err := json.Unmarshal([]byte(prettyJson), &g); err != nil { log.Fatal(err) } log.Debugf("ghost materialized: id=%s session_id=%s", g.Id, g.SessionId) } func (s *Soul) Embody(gamer bungo.User) { s.Gamer = gamer if len(gamer.Characters) >= 1 { s.Possessed = gamer.Characters[0].Id s.One = gamer.Characters[0] } if len(gamer.Characters) >= 2 { s.Two = gamer.Characters[1] } if len(gamer.Characters) == 3 { s.Three = gamer.Characters[2] } } func (s *Soul) Possess(id string) { s.Possessed = id } func (s *Soul) Charge(guardian bungo.Character) { switch guardian.Id { case s.One.Id: s.One = guardian case s.Two.Id: s.Two = guardian case s.Three.Id: s.Three = guardian } } func (g Ghost) Key(pieces ...string) string { key := "unknown" if len(pieces) > 0 { key = fmt.Sprintf("%s:%s:ghost:%s", g.Id, g.SessionId, strings.Join(pieces, ":")) } else { key = fmt.Sprintf("%s:%s:ghost", g.Id, g.SessionId) } log.Trace("ghost key: ", key) return key } func (s Soul) Summon() (out bungo.Character) { switch s.Possessed { case s.One.Id: out = s.One case s.Two.Id: out = s.Two case s.Three.Id: out = s.Three } return out } func (s Soul) SummonOthers() (out []bungo.Character) { if s.Possessed != s.One.Id { out = append(out, s.One) } if s.Possessed != s.Two.Id { out = append(out, s.Two) } if s.Possessed != s.Three.Id { out = append(out, s.Three) } return out } func (s Soul) Call(id string) (out bungo.Character, exists bool) { guardians, exists := s.Callx(id) if exists { out = guardians[0] exists = true } else if len(guardians) > 1 { log.Fatal("found too many guardians for id: ", id) } return out, exists } func (s Soul) Callx(ids ...string) (out []bungo.Character, exists bool) { for _, id := range ids { switch id { case s.One.Id: out = append(out, s.One) exists = true case s.Two.Id: out = append(out, s.Two) exists = true case s.Three.Id: out = append(out, s.Three) exists = true } } return out, exists } func (g Ghost) Save(redis tower.Redis) { prettyJson := g.PrettyJson() redis.Connect() defer redis.Disconnect() redis.Set(g.Key(), prettyJson) log.Debugf("ghost save: id=%s session_id=%s", g.Id, g.SessionId) } func (g Ghost) PrettyJson() (out []byte) { gout, _ := json.Marshal(g) var prettyJSON bytes.Buffer _ = json.Indent(&prettyJSON, gout, "", "\t") return prettyJSON.Bytes() }
package main import ( "flag" "fmt" "os" ) //包被导入时会被调用init函数,只会执行一次 func init() { //自定义命令源码文件的参数使用说明 //flag.Usage的类型是func(),即一种无参数声明且无结果声明的函数类型 //flag.Usage变量在声明时就已经被赋值了 flag.Usage = func() { fmt.Fprintf(os.Stderr, "Usage of %s:\n", "question") //用于打印需要输入参数的说明 flag.PrintDefaults() } } func main() { //接收命令行参数 //参数1:name表示接收参数的名字 //参数2:默认值 //参数3:参数描述 name := flag.String("name", "default name", "请输入名称:") //开始真正开始解析命令行参数,将内容赋值给响应的name变量 flag.Parse() fmt.Println("Hello " + *name) } /** 输入:go run main.go --help 输出: Usage of question: -name string 请输入名称: (default "default name") exit status 2 输入:go run main.go -name="duwanjiang" 输出:Hello duwanjiang */
package v2 import ( "bytes" "context" "fmt" "github.com/grafana/tempo/pkg/sort" "github.com/cespare/xxhash" "github.com/grafana/tempo/tempodb/backend" "github.com/grafana/tempo/tempodb/encoding/common" "github.com/opentracing/opentracing-go" ) type indexReader struct { r backend.ContextReader recordRW common.RecordReaderWriter pageSizeBytes int totalRecords int pageCache map[int]*page // indexReader is not concurrency safe, but since it is currently used within one request it is fine. } // NewIndexReader returns an index reader for a byte slice of marshalled // ordered records. // The index has not changed between v0 and v1. func NewIndexReader(r backend.ContextReader, pageSizeBytes int, totalRecords int) (common.IndexReader, error) { return &indexReader{ r: r, recordRW: NewRecordReaderWriter(), pageSizeBytes: pageSizeBytes, totalRecords: totalRecords, pageCache: map[int]*page{}, }, nil } // At implements common.indexReader func (r *indexReader) At(ctx context.Context, i int) (*common.Record, error) { if i < 0 || i >= r.totalRecords { return nil, nil } recordLength := r.recordRW.RecordLength() recordsPerPage := objectsPerPage(recordLength, r.pageSizeBytes, IndexHeaderLength) if recordsPerPage == 0 { return nil, fmt.Errorf("page %d is too small for one record", r.pageSizeBytes) } pageIdx := i / recordsPerPage recordIdx := i % recordsPerPage page, err := r.getPage(ctx, pageIdx) if err != nil { return nil, err } if recordIdx >= len(page.data)/recordLength { return nil, fmt.Errorf("unexpected out of bounds index %d, %d, %d, %d", i, pageIdx, recordIdx, len(page.data)) } recordBytes := page.data[recordIdx*recordLength : (recordIdx+1)*recordLength] // double check the record is not all 0s. this could occur if we read empty buffer space past the final // record in the final page allZeros := true for _, b := range recordBytes { if b != 0 { allZeros = false break } } if allZeros { return nil, fmt.Errorf("unexpected zero value record %d, %d, %d, %d", i, pageIdx, recordIdx, len(page.data)) } record := r.recordRW.UnmarshalRecord(recordBytes) return &record, nil } // Find implements common.indexReader func (r *indexReader) Find(ctx context.Context, id common.ID) (*common.Record, int, error) { // with a linear distribution of trace ids we can actually do much better than a normal // binary search. unfortunately there are edge cases which make this perform far worse. // for instance consider a set of trace ids what with 90% 64 bit ids and 10% 128 bit ids. span, ctx := opentracing.StartSpanFromContext(ctx, "indexReader.Find") defer span.Finish() i, err := sort.SearchWithErrors(r.totalRecords, func(i int) (bool, error) { record, err := r.At(ctx, i) if err != nil { return true, err } return bytes.Compare(record.ID, id) >= 0, nil }) if err != nil { return nil, -1, err } var record *common.Record if i >= 0 && i < r.totalRecords { record, err = r.At(ctx, i) if err != nil { return nil, -1, err } return record, i, nil } return nil, -1, nil } func (r *indexReader) getPage(ctx context.Context, pageIdx int) (*page, error) { page, ok := r.pageCache[pageIdx] if ok { return page, nil } pageBuffer := make([]byte, r.pageSizeBytes) _, err := r.r.ReadAt(ctx, pageBuffer, int64(pageIdx*r.pageSizeBytes)) if err != nil { return nil, err } page, err = unmarshalPageFromBytes(pageBuffer, &indexHeader{}) if err != nil { return nil, err } // checksum h := xxhash.New() _, _ = h.Write(page.data) if page.header.(*indexHeader).checksum != h.Sum64() { return nil, fmt.Errorf("mismatched checksum: %d", pageIdx) } r.pageCache[pageIdx] = page return page, nil }