query
stringlengths
7
3.85k
document
stringlengths
11
430k
metadata
dict
negatives
listlengths
0
101
negative_scores
listlengths
0
101
document_score
stringlengths
3
10
document_rank
stringclasses
102 values
makeLimitStatment renders statment as below if r.limit>0: LIMIT
func (op *OpQuery) makeLimitStatment() string { if op.limit > 0 { return fmt.Sprintf("LIMIT %d", op.limit) } return "" }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (c *Condition) Limit(l int) *Condition {\n\tc.limit = \"LIMIT \" + strconv.Itoa(l)\n\treturn c\n}", "func buildLimit(conditions map[string][]string) int {\n\tres := 20\n\tif len(conditions[\"limit\"]) > 0 {\n\t\tres, _ = strconv.Atoi(conditions[\"limit\"][0])\n\t\tif res > 300 {\n\t\t\tres = 300\n\t\t}\n\t}...
[ "0.6710295", "0.6347146", "0.6295109", "0.6226007", "0.62210786", "0.6170679", "0.6131475", "0.6119971", "0.6116671", "0.6077754", "0.6076456", "0.6064844", "0.6052961", "0.6028746", "0.6022812", "0.5987972", "0.5964589", "0.59535724", "0.5941685", "0.58898014", "0.5885067", ...
0.7328498
0
NewOpQuery returns a OpQuery instance with given sobjectName.
func NewOpQuery(sobjectName string) *OpQuery { return &OpQuery{ sobjectName: sobjectName, } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func NewOplogQuery(dataset map[string]interface{}) (Query, error) {\n\tnamespace, ok := dataset[\"ns\"].(string)\n\tif namespace == \"\" || !ok {\n\t\treturn nil, errors.New(\"namespace not given\")\n\t}\n\n\tp := strings.Index(namespace, \".\")\n\tif p == -1 {\n\t\treturn nil, errors.New(\"Invalid namespace given...
[ "0.6429491", "0.6319228", "0.6145101", "0.6085631", "0.60563207", "0.60545856", "0.5890437", "0.58600295", "0.5855261", "0.5772147", "0.5764213", "0.57355213", "0.57117575", "0.5668293", "0.56658304", "0.56325763", "0.5594233", "0.55472946", "0.5546636", "0.5518794", "0.54784...
0.8440395
0
Handler to get the program build information (GET /).
func (h *handler) getBuild(w http.ResponseWriter, r *http.Request) { _, responseContentType, err := contenttype.APICheck(r.Header) w.Header().Set("Content-Type", responseContentType) errResponder := httperr.NewResponder(responseContentType, h.logger) if err != nil { errResponder.Respond(w, http.StatusNotAcceptable, err.Error()) return } err = json.NewEncoder(w).Encode(h.build) if err != nil { h.logger.Error(err.Error()) errResponder.Respond(w, http.StatusInternalServerError, "") return } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetBuildInfoPerASIC() {\n\n url := \"http://10.67.69.71:8080/job/amd-staging-dkms-sustaining-dGPU/job/sanity-test/job/bare-metal-pro-gfx/api/xml?tree=allBuilds[description,fullDisplayName,id,timestamp]&exclude=//allBuild[not(contains(description,%22navi10%22))]\"\n\tfilepath := \"./navi10_buildinfo\" + \"....
[ "0.64320457", "0.6398367", "0.6037579", "0.5929717", "0.58946925", "0.58839583", "0.5821352", "0.5800616", "0.57839584", "0.5774395", "0.57712066", "0.5636525", "0.5602506", "0.55906236", "0.55887514", "0.55836105", "0.5559641", "0.5559641", "0.555559", "0.5506469", "0.548768...
0.6375576
2
After proto.Merge called two lists are merged and we cannot be sure of order of the elements and if the element is from the Mesh or from the Dataplane resource.
func (d *DataplaneResource) mergeLists( meshCfg *mesh_proto.PrometheusMetricsBackendConfig, dpCfg *mesh_proto.PrometheusMetricsBackendConfig, ) { aggregate := make(map[string]*mesh_proto.PrometheusAggregateMetricsConfig) for _, conf := range meshCfg.Aggregate { aggregate[conf.Name] = conf } // override Mesh aggregate configuration with Dataplane for _, conf := range dpCfg.Aggregate { aggregate[conf.Name] = conf } // contains all the elements for Dataplane configuration var unduplicatedConfig []*mesh_proto.PrometheusAggregateMetricsConfig for _, value := range aggregate { unduplicatedConfig = append(unduplicatedConfig, value) } // we cannot set the same values because they are going to be appended meshCfg.Aggregate = []*mesh_proto.PrometheusAggregateMetricsConfig{} dpCfg.Aggregate = unduplicatedConfig }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m MessageDescriptorMap) Merge(other MessageDescriptorMap) {\n\tfor id, other := range other {\n\t\tif m[id] == nil {\n\t\t\tm[id] = other\n\t\t} else {\n\t\t\tif other.Translations[defaultLanguage] != m[id].Translations[defaultLanguage] {\n\t\t\t\tm[id].updated = true\n\t\t\t}\n\t\t\tfor language, translatio...
[ "0.5636764", "0.5608513", "0.55030245", "0.5457892", "0.5433847", "0.5427023", "0.5413673", "0.5328313", "0.52650255", "0.52569515", "0.524656", "0.5213097", "0.5209504", "0.5205742", "0.5193463", "0.5180105", "0.51653993", "0.5135421", "0.51296806", "0.5109867", "0.51043874"...
0.61825055
0
Generate a new block based on the old block and new payload
func generateNewBlock(oldBlock Block, dataPayload string) (Block, error) { var newBlock Block timeNow := time.Now() newBlock.Index = oldBlock.Index + 1 newBlock.Timestamp = timeNow.String() newEvent, err := dataPayloadtoServiceEvent(dataPayload) if err != nil { log.Println("ERROR: Unable to convert data payload into ServiceEvent for new block generation.") } newBlock.Event = newEvent newBlock.PrevHash = oldBlock.Hash newBlock.Hash = calculateHash(newBlock) return newBlock, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (s *BlockchainService) generateBlock(oldBlock *Block, payload int) Block {\n\tvar newBlock Block\n\n\tt := time.Now()\n\n\tnewBlock.Index = oldBlock.Index + 1\n\tnewBlock.Timestamp = t.String()\n\tnewBlock.Payload = payload\n\tnewBlock.PrevHash = oldBlock.Hash\n\tnewBlock.Hash = calculateHash(&newBlock)\n\n\t...
[ "0.7882912", "0.75677776", "0.7454618", "0.740441", "0.7075559", "0.6992254", "0.695611", "0.6840896", "0.646973", "0.645678", "0.6430594", "0.6280684", "0.6150822", "0.6101972", "0.604691", "0.60240793", "0.60015756", "0.59656155", "0.5949742", "0.5923937", "0.5896824", "0...
0.79917556
0
Load Garages, Vehicles and Events in order to provide baseline data sets
func loadBaseData() error { err := fileToInterface(validGarageDataFile, &ValidGarages) if err != nil { log.Println("ERROR: Unable to load valid garage data") return err } err = fileToInterface(validVehicleDataFile, &ValidVehicles) if err != nil { log.Println("ERROR: Unable to load valid vehicles data") return err } err = fileToInterface(validEventDataFile, &ValidEvents) if err != nil { log.Println("ERROR: Unable to load valid events data") return err } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func loadData() {\r\n\tloadImages()\r\n\tsortPredictDetails()\t \r\n}", "func (al *AggregateLoader) Load() (retErr error) {\n\tlog.Println(\"loader started\")\n\tconfig := al.config\n\tignoreNew = config.IgnoreNew\n\tdg, err := dgraph.NewDgraphConnection(&dgraph.Config{\n\t\tHosts: config.Alpha,\n\t})\n\tif err ...
[ "0.6516506", "0.6347956", "0.61831176", "0.5677068", "0.5658918", "0.5573051", "0.5475211", "0.5449381", "0.5438672", "0.54160637", "0.54029816", "0.53700936", "0.5336872", "0.5336363", "0.5317149", "0.53148144", "0.5312098", "0.52779335", "0.5253924", "0.523492", "0.5209779"...
0.65898174
0
Convert a data payload from the web handler into a ServiceEvent
func dataPayloadtoServiceEvent(dataPayload string) (ServiceEvent, error) { var newServiceEvent ServiceEvent var newEventDescription EventDescription var newEventDescriptionType EventType var vehicle Vehicle var garage Garage log.Print(string(newServiceEvent.Identifier) + string(newEventDescription.VehicleMilage) + string(newEventDescriptionType.EventId) + string(vehicle.V5c) + string(garage.GarageId)) return newServiceEvent, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func EventFromPayload(eventId int, data interface{}) *swf.HistoryEvent {\n\tevent := &swf.HistoryEvent{}\n\tevent.EventId = I(eventId)\n\tswitch t := data.(type) {\n\tcase *swf.ActivityTaskCancelRequestedEventAttributes:\n\t\tevent.ActivityTaskCancelRequestedEventAttributes = t\n\t\tevent.EventType = S(swf.EventTy...
[ "0.6438354", "0.60353184", "0.6000109", "0.59321505", "0.5768451", "0.5756071", "0.57459855", "0.5693179", "0.56273997", "0.5607155", "0.55602217", "0.5532839", "0.55267197", "0.5525084", "0.55238265", "0.5508955", "0.55087966", "0.5476726", "0.54726946", "0.5445884", "0.5440...
0.74734116
0
Hash function to take key block data and return a SHA256 hash as a string
func calculateHash(block Block) string { // Time and vehicle identifier (v5c) are the key block items to generate the hash record := string(string(block.Index) + block.Timestamp + block.Event.PerformedOnVehicle.V5c + block.PrevHash) h := sha256.New() h.Write([]byte(record)) hashed := h.Sum(nil) return hex.EncodeToString(hashed) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func calculateHash (block Block) string{\n h := sha256.New()\n unique := block.Data + block.PrevHash + block.TimeStamp + strconv.Itoa(block.Nonce)\n h.Write([]byte(unique))\n \n return hex.EncodeToString(h.Sum(nil))\n}", "func getSHA256Hash(data []byte) string {\n\treturn hex.EncodeToString(getSHA...
[ "0.74991125", "0.72448534", "0.71867824", "0.7152402", "0.70672005", "0.7050035", "0.7008327", "0.694254", "0.6933465", "0.6855276", "0.6829969", "0.6821539", "0.68141586", "0.6776146", "0.67583466", "0.67563283", "0.67516357", "0.6748018", "0.67467165", "0.67405146", "0.6731...
0.72586614
1
generateGenesisBlock will create the first block
func generateGenesisBlock() Block { var genesisBlock Block var genesisRecord ServiceEvent var genesisRecordEventDescription EventDescription var genesisRecordEventDescriptionType EventType var genesisRecordVehicle Vehicle var genesisRecordGarage Garage // Seed values for Garage, Vehicle, EventType and EventDescription genesisRecordGarage.GarageId = 0 genesisRecordGarage.Location = "genesis location" genesisRecordGarage.Name = "genesis inc." genesisRecordGarage.Owner = "genesis and co." genesisRecordGarage.Type = "main dealer" genesisRecordVehicle.V5c = "63ne515" genesisRecordVehicle.VehicleColour = append(genesisRecordVehicle.VehicleColour, "starting colour") genesisRecordVehicle.VehicleMake = "genesis make" genesisRecordVehicle.VehicleModel = "genesis model" genesisRecordVehicle.VehicleRegistration = append(genesisRecordVehicle.VehicleRegistration, "GEN 351 S") genesisRecordEventDescriptionType.EventId = 0 genesisRecordEventDescriptionType.EventDescription = "genesis event" genesisRecordEventDescription.EventItem = append(genesisRecordEventDescription.EventItem, genesisRecordEventDescriptionType) genesisRecordEventDescription.VehicleMilage = 10000000 // Pull all the objects into ServiceEvent genesisRecord.EventAuthorisor = "Created by serviceChain as the Genesis Block" genesisRecord.EventDetails = genesisRecordEventDescription genesisRecord.Identifier = 1 genesisRecord.PerformedBy = genesisRecordGarage genesisRecord.PerformedOnVehicle = genesisRecordVehicle // Set the values for the Block genesisBlock.Index = 1 genesisBlock.Hash = "0" genesisBlock.PrevHash = "0" genesisBlock.Timestamp = time.Now().String() genesisBlock.Event = genesisRecord blockString, err := json.MarshalIndent(genesisBlock, "", "\t") if err != nil { log.Println("INFO: serviceChain.createGenesisBlock(): Problem creating the JSON output of the genesis block. Continuing...") } log.Println("INFO: serviceChain.generateGenesisBlock(): Created block with contents: " + string(blockString)) return genesisBlock }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (b *Block) CreateGenesisBlock() {\n\n header := Header{0, int64(time.Now().Unix()), \"GenesisBlock\", \"\", 0, \"\"}\n b.Mpt = p1.GetMPTrie()\n b.Header = header\n}", "func CreateGenesisBlock(creator *wallet.Wallet) (bl Block, tx Transaction) {\n\tbl.Header.PrevBlockHash = [constants.HASHSIZE]byte{...
[ "0.8338062", "0.79232156", "0.7768899", "0.73699194", "0.7312927", "0.7312893", "0.72981054", "0.7217498", "0.717386", "0.7148623", "0.7148288", "0.7131218", "0.71268797", "0.7094627", "0.7069991", "0.69981176", "0.69953555", "0.698607", "0.6977885", "0.6955806", "0.6951625",...
0.8215045
1
Perform tests to validate if a specific block is valid by comparing it to its own values
func isBlockValid(prior Block, current Block) bool { return true }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func TestBaseGradedBlock_Valid(t *testing.T) {\n\tt.Run(\"V1\", func(t *testing.T) {\n\t\ttestBaseGradedBlock_valid(t, 1)\n\t})\n\tt.Run(\"V2\", func(t *testing.T) {\n\t\ttestBaseGradedBlock_valid(t, 2)\n\t})\n\tt.Run(\"V3\", func(t *testing.T) {\n\t\ttestBaseGradedBlock_valid(t, 3)\n\t})\n\tt.Run(\"V4\", func(t *...
[ "0.7005355", "0.67256606", "0.6663936", "0.6658995", "0.65298724", "0.65101755", "0.6502856", "0.6467316", "0.64659816", "0.646173", "0.6443889", "0.6409351", "0.63813853", "0.63750744", "0.63302433", "0.62938285", "0.62859845", "0.62698686", "0.6257908", "0.6239302", "0.6231...
0.691765
1
Replace the current chain by adding a new block
func replaceChain(newBlock Block) bool { // make this thread safe blockchainwritelock.Lock() defer blockchainwritelock.Unlock() // Is the block valid and if so then append it to the chain if isBlockValid(newBlock, Blockchain[len(Blockchain)-1]) { Blockchain = append(Blockchain, newBlock) BlockchainLength = len(Blockchain) var registration string // Update vehicle lookups log.Printf("INFO: replaceChain(): Adding vehicle to vehicle and blocklookup map.") lastregindex := len(newBlock.Event.PerformedOnVehicle.VehicleRegistration) registration = newBlock.Event.PerformedOnVehicle.VehicleRegistration[lastregindex-1] registration = strings.ToUpper(registration) registration = strings.Replace(registration, " ", "", -1) blocklist := vehicleMap[registration] log.Printf("INFO: replaceChain(): REG %s, BLOCKLIST SIZE %s", registration, strconv.Itoa(len(blocklist))) log.Printf("INFO: replaceChain(): Captured registration: %s with %s previous entries", registration, strconv.Itoa(len(blocklist))) if (len(blocklist)) > 0 { log.Printf("INFO: replaceChain(): Vehicle been added before. Appending new block id with value %s", strconv.Itoa(newBlock.Index)) vehicleMap[registration] = append(blocklist, newBlock.Index) } else { newBlockSlice := []int{newBlock.Index} log.Printf("INFO: replaceChain(): created new list of blocks for registration %s, size is %s", registration, strconv.Itoa(len(newBlockSlice))) // vehicleMap not initialised so set it up if len(vehicleMap) < 1 { log.Printf("INFO: replaceChain(): vehicleMap is not initialised, size is %s", strconv.Itoa(len(vehicleMap))) vehicleMap = make(map[string][]int) } // Add the new vehicle to the map log.Printf("INFO: replaceChain(): Adding vehicle %s to new vehicleMap", registration) vehicleMap[registration] = newBlockSlice } log.Printf("INFO: replaceChain(): Added vehicle reg %s to block lookup table, blockid %s", registration, strconv.Itoa(newBlock.Index)) log.Printf("INFO: replaceChain(): Appended new block, writing to disk with ID %s", strconv.Itoa(BlockchainLength)) err := interfaceToFile("./saved_chains/md5589_blockchain_"+strconv.Itoa(BlockchainLength), Blockchain) if err != nil { log.Printf("ERROR: Unable to write blockchain to disk: %s", strconv.Itoa(BlockchainLength)) } return true } return false }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (this *Blockchain) addBlock(block Block) {\n mutex.Lock()\n this.chain = append(this.chain, block)\n mutex.Unlock()\n // reset pending Transactions\n this.pendingTransactions = nil\n}", "func AddToMyChain(data BlockData) error {\r\n\t// Create the new block (get parent, create block)\r\n\tpre...
[ "0.70676774", "0.6995445", "0.68848646", "0.68397206", "0.67871076", "0.6565399", "0.63590604", "0.6292728", "0.6123014", "0.60018307", "0.594084", "0.5887955", "0.58096766", "0.58064395", "0.5777828", "0.5749241", "0.56876445", "0.56867546", "0.5637102", "0.56358117", "0.563...
0.66153383
5
ServerStart starts the web server on the specified TCP port. Blank will default to 8000.
func ServerStart(port string) (string, error) { // List of view handlers handlerStrings = append(handlerStrings, "/", "/blockchain/view/<ID>", "/garage/view/<ID>", "serviceevent/add/", "/vehicle/view/<ID>") http.HandleFunc("/", defaultHandler) // Each call to "/" will invoke defaultHandler http.HandleFunc("/blockchain/view/", blockchainViewHandler) http.HandleFunc("/garage/view/", garageViewHandler) http.HandleFunc("/serviceevent/add/", writeServiceEventHandler) http.HandleFunc("/vehicle/view/", vehicleViewHandler) //log.Fatal(http.ListenAndServe("localhost:"+port, nil)) return "Started on: " + port, http.ListenAndServe("localhost:"+port, nil) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func StartServer() {\n\tif server == nil {\n\t\tGetInstance()\n\t}\n\n\tlog.Println(\"starting server on http://localhost\" + defaultPort)\n\tserver.Run(defaultPort)\n}", "func (o *HttpServer) Start() error {\n\turi := fmt.Sprintf(\"%s:%d\", o.Host, o.Port)\n\tlog.Printf(\"[HTTP] Server listen on %s\\n\", uri)\n...
[ "0.7371509", "0.73683625", "0.7337591", "0.73110557", "0.7226168", "0.7212318", "0.7199945", "0.71704507", "0.7159049", "0.7156385", "0.7153561", "0.7147691", "0.7074961", "0.7063656", "0.7048088", "0.70398515", "0.7033529", "0.70296556", "0.70292014", "0.7026401", "0.6990443...
0.79115456
0
Default handler to catchall
func defaultHandler(w http.ResponseWriter, r *http.Request) { fmt.Fprintf(w, "INFO: Default Handler called from %s. Please try alternative methods such as \n %s", r.RemoteAddr, handlerStrings) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func CatchAllHandler(w http.ResponseWriter, r *http.Request) {\n\t// 1. Get the redirect URL out of the config\n\tif !viper.IsSet(\"default_url\") {\n\t\t// The reason for using StatusNotFound here instead of StatusInternalServerError\n\t\t// is because this is a catch-all function. You could come here via various...
[ "0.74647164", "0.71886647", "0.67791533", "0.65686786", "0.65400976", "0.64790803", "0.63966566", "0.62958306", "0.6246133", "0.623425", "0.62189925", "0.621134", "0.6204758", "0.61410385", "0.61060303", "0.60277355", "0.60198945", "0.60198945", "0.60198945", "0.5942256", "0....
0.6887508
2
Handler to manage requests to /blockchain/ subchain
func blockchainViewHandler(w http.ResponseWriter, r *http.Request) { // Take the URL beyond /blockchain/ and split into request and value strings requestAction := strings.Split(r.URL.String(), "/") requestItem, err := strconv.Atoi(requestAction[3]) requestItem = requestItem - 1 if err != nil { log.Println("INFO: Unable to convert argument to integer, assume this is a request for entire chain") } if requestItem == -1 { //Request item is invalid so display that blockID only blockString, err := json.MarshalIndent(Blockchain, "", "\t") if err != nil { log.Println("ERROR: blockchainViewHandler(): Cannot print Blockchain") } fmt.Fprintf(w, "\n %s", blockString) } else { blockItemString, _ := json.MarshalIndent(Blockchain[requestItem], "", "\t") // Do nothing if index too high fmt.Fprintf(w, "\n %s.", blockItemString) } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func handleGetBlockchain(w http.ResponseWriter, r *http.Request) {\n\n\t// create a json representation of the current blockchain with indentations\n\tbytes, err := json.MarshalIndent(model.Blockchain, \"\", \" \")\n\n\tif err != nil {\n\t\thttp.Error(w, err.Error(), http.StatusInternalServerError)\n\t\treturn\n\...
[ "0.5844577", "0.56329745", "0.5578209", "0.5482666", "0.54420656", "0.5347861", "0.5323568", "0.5304798", "0.5262302", "0.52571183", "0.52263904", "0.5204237", "0.51906383", "0.518504", "0.51638216", "0.5162992", "0.51466084", "0.5141968", "0.510756", "0.5105206", "0.5061075"...
0.62619346
0
Handler to manage requests to /garage/ subchain
func garageViewHandler(w http.ResponseWriter, r *http.Request) { // Take the URL beyond /garage/ and split into request and value strings requestAction := strings.Split(r.URL.String(), "/") requestItem, err := strconv.Atoi(requestAction[3]) if err != nil { log.Println("INFO: garageViewHandler(): No garage id requested, setting to -1") requestItem = -1 } if requestItem < 0 { //no value so display them all garageString, err := json.MarshalIndent(ValidGarages, "", "\t") if err != nil { log.Println("ERROR: garageViewHandler(): Cannot print Garages JSON data") } fmt.Fprintf(w, "\n %s", garageString) } else { garageString, _ := json.MarshalIndent(ValidGarages[requestItem], "", "\t") // Do nothing if index too high fmt.Fprintf(w, "\n %s.", garageString) } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (f *Frontend) triageHandler(w http.ResponseWriter, r *http.Request) {\n\tctx := r.Context()\n\tw.Header().Set(\"Content-Type\", \"application/json\")\n\ttr := &TriageRequest{}\n\tif err := json.NewDecoder(r.Body).Decode(tr); err != nil {\n\t\thttputils.ReportError(w, err, \"Failed to decode JSON.\", http.Stat...
[ "0.53979844", "0.5357035", "0.5331468", "0.52953714", "0.5275639", "0.5275639", "0.5273326", "0.5237795", "0.51973367", "0.51813775", "0.51783746", "0.5174104", "0.5167441", "0.5147928", "0.5087115", "0.5081987", "0.5057369", "0.5057227", "0.5027063", "0.50116813", "0.5004467...
0.64442056
0
Save will convert the input interface (v) into a JSON formatted object on disk
func interfaceToFile(path string, structIn interface{}) error { // Create a lock and then defer the unlock until function exit filewritelock.Lock() defer filewritelock.Unlock() //Create os.File and defer close file, err := os.Create(string(path)) if err != nil { return err } defer file.Close() // Create a reader (via marshal) that we can use to copy into the writer reader, err := Marshal(structIn) if err != nil { return err } _, err = io.Copy(file, reader) return err }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Save(w io.Writer, v interface{}) error {\n\tdata, err := json.Marshal(v)\n\tif err != nil {\n\t\treturn err\n\t}\n\t_, err = io.WriteString(w, string(data))\n\treturn err\n}", "func Save(file string, v interface{}) error {\n\tf, err := gzipf.Create(file)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer f.Cl...
[ "0.79349977", "0.7603756", "0.7265592", "0.70981365", "0.6894807", "0.6894807", "0.6894807", "0.68806446", "0.6834804", "0.6746869", "0.67125416", "0.65770334", "0.65708554", "0.65669554", "0.6514774", "0.6461022", "0.64585143", "0.643517", "0.64074165", "0.63989145", "0.6370...
0.0
-1
Load is used to convert a JSON (marshall output formatted) file to a struct (interface)
func fileToInterface(path string, structOut interface{}) error { // Lock and defer the unlock until function exit filewritelock.Lock() defer filewritelock.Unlock() log.Println("INFO: Loading " + path) fileOut, err := os.Open(path) if err != nil { log.Println("ERROR: fileToInterface() " + err.Error() + " while openning file " + path) return err } defer fileOut.Close() Unmarshal(fileOut, structOut) return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (*JSONStruct) Load(filename string, v interface{}) {\n\tdata, err := io.ReadFile(filename)\n\tif err != nil {\n\t\treturn\n\t}\n\tjsonBytes := []byte(data)\n\n\terr = json.Unmarshal(jsonBytes, v)\n\tif err != nil {\n\t\treturn\n\t}\n}", "func Load(path string, object interface{}) error {\n\tfile, err := os....
[ "0.8062228", "0.7621891", "0.7608677", "0.7536772", "0.7311489", "0.7303192", "0.7233047", "0.71877", "0.71543425", "0.70984703", "0.70858276", "0.7059995", "0.7041795", "0.7030563", "0.70268375", "0.7012453", "0.7006991", "0.6945984", "0.68736225", "0.6873584", "0.6835881", ...
0.6021396
74
LoadTestEnv will load .env.test
func LoadTestEnv() error { _, b, _, _ := runtime.Caller(0) d := path.Join(path.Dir(b)) err := godotenv.Load(fmt.Sprintf("%s/.env.test", filepath.Dir(d))) if err != nil { log.Fatal("failed to load test env config: ", err) } return err }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func LoadEnvFile(t *testing.T) error {\r\n\tenvFileName := os.Getenv(TestEnvFilePath)\r\n\terr := godotenv.Load(envFileName)\r\n\tif err != nil {\r\n\t\treturn fmt.Errorf(\"Can not read .env file: %s\", envFileName)\r\n\t}\r\n\treturn nil\r\n}", "func loadTestConfig() {\n\terr := godotenv.Load(\"../.env.testing\...
[ "0.7706625", "0.76327944", "0.7433559", "0.7390404", "0.6782704", "0.673599", "0.671696", "0.67095554", "0.67051566", "0.66891676", "0.6651732", "0.6646073", "0.6565774", "0.656104", "0.6542334", "0.6491168", "0.64808255", "0.64760107", "0.6458198", "0.63961804", "0.63782704"...
0.8625285
0
InitTestDB will migrate db tables
func InitTestDB() (*gorm.DB, error) { if err := LoadTestEnv(); err != nil { panic(err) } config := configs.New() db := database.New(&config.Database, false) database.AutoMigrate(db) if conn, ok := db.DB(); ok != nil { defer conn.Close() } return db, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (db *DbCtxt) InitDatabase() error {\n\tvar models []interface{}\n\tmodels = append(models,\n\t\t&Hotel{},\n\t\t&Room{},\n\t\t&RatePlan{},\n\t)\n\tfor _, model := range models {\n\t\terr := db.client.AutoMigrate(model)\n\t\tif err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\treturn nil\n}", "func InitTestDB() e...
[ "0.73692185", "0.724161", "0.721584", "0.7181012", "0.71460074", "0.7139826", "0.71323043", "0.7104169", "0.7086372", "0.7060455", "0.7036444", "0.70175046", "0.70077455", "0.6931672", "0.6924954", "0.68977255", "0.68855184", "0.6883087", "0.68763554", "0.68635714", "0.678895...
0.71609265
4
Hijack implements the http.Hijacker interface.
func (w *responseWriter) Hijack() (net.Conn, *bufio.ReadWriter, error) { if w.size < 0 { w.size = 0 } return w.ResponseWriter.(http.Hijacker).Hijack() }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (r *ResponseWriter) Hijack() {\n\tr.ResponseWriter.Hijack()\n\treturn\n}", "func (response *Response) Hijack() (net.Conn, *bufio.ReadWriter, error) {\n\treturn response.Writer.(http.Hijacker).Hijack()\n}", "func (l *logWriter) Hijack() (net.Conn, *bufio.ReadWriter, error) {\n\treturn l.ResponseWriter.(htt...
[ "0.7620157", "0.754383", "0.7506798", "0.7460087", "0.74350834", "0.7414825", "0.738739", "0.7385394", "0.7369718", "0.7350782", "0.7296897", "0.72345155", "0.7159378", "0.7158735", "0.7012246", "0.70109314", "0.69657886", "0.68642426", "0.68232936", "0.67975926", "0.6614268"...
0.7054844
14
CloseNotify implements the http.CloseNotify interface.
func (w *responseWriter) CloseNotify() <-chan bool { return w.ResponseWriter.(http.CloseNotifier).CloseNotify() }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (resp *response) CloseNotify() <-chan bool {\n\treturn resp.ResponseWriter.(http.CloseNotifier).CloseNotify()\n}", "func (r *responseWriterWithCloseNotify) CloseNotify() <-chan bool {\n\treturn r.rw.(http.CloseNotifier).CloseNotify()\n}", "func (r *Response) CloseNotify() <-chan bool {\n\treturn r.Respons...
[ "0.71369374", "0.69510823", "0.6842617", "0.67997015", "0.67796373", "0.6760421", "0.6758504", "0.666461", "0.641242", "0.63343847", "0.5994585", "0.5943047", "0.59304035", "0.5919809", "0.5905144", "0.5892296", "0.5802287", "0.56952953", "0.5690403", "0.56514496", "0.5640458...
0.6749032
9
Flush implements the http.Flush interface.
func (w *responseWriter) Flush() { w.WriteHeaderNow() w.ResponseWriter.(http.Flusher).Flush() }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *mockHTTPWriter) Flush() {}", "func (b *BodyWriter) Flush() {}", "func (b *httpBatch) Flush() error {\n\treturn nil\n}", "func (p *ProxyWriter) Flush() {\n\tif f, ok := p.W.(http.Flusher); ok {\n\t\tf.Flush()\n\t}\n}", "func (resp *response) Flush() {\n\tresp.ResponseWriter.(http.Flusher).Flush()\n...
[ "0.7804822", "0.77415204", "0.75806135", "0.7484612", "0.7451575", "0.7442949", "0.7434191", "0.7376928", "0.7280186", "0.7275494", "0.72015464", "0.72015464", "0.71789104", "0.71663654", "0.7159713", "0.7103418", "0.70785534", "0.70785403", "0.70659363", "0.69853115", "0.683...
0.71478975
15
bigramWordByCopy concatenates adjecent pairs of word by reusing the input string and suppressing the allocation.
func bigramWordByCopy(str string) []string { ss := strings.Split(str, " ") // Handle unexpected string input: // ss = "" => []string{""} // ss = "foobar" => []string{"foobar"} if len(ss) <= 1 { return ss } bigram := make([]string, len(ss)-1) for i := 0; i < len(ss)-1; i++ { // Counts the length of primary and secondary words and whitespace // and copy it to the element of slice. bigram[i] = str[:(len(ss[i])+1)+len(ss[i+1])] // Drop the primary word and whitespace. str = str[len(ss[i])+1:] } return bigram }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func bigramWordByConcat(str string) []string {\n\tss := strings.Split(str, \" \")\n\n\t// Handle unexpected string input:\n\t// ss = \"\" => []string{\"\"}\n\t// ss = \"foobar\" => []string{\"foobar\"}\n\tif len(ss) <= 1 {\n\t\treturn ss\n\t}\n\n\tbigram := make([]string, len(ss)-1)\n\tfor i := 0; i < len(ss...
[ "0.5800434", "0.5727833", "0.54212874", "0.52668625", "0.5137672", "0.5091352", "0.50865716", "0.47963905", "0.47856638", "0.4722009", "0.46957615", "0.46068934", "0.4593441", "0.45838994", "0.45664155", "0.45664144", "0.45522085", "0.45507455", "0.45397383", "0.45167142", "0...
0.69708395
0
bigramWordByConcat concatenates adjecent pairs of word just by using plus operator.
func bigramWordByConcat(str string) []string { ss := strings.Split(str, " ") // Handle unexpected string input: // ss = "" => []string{""} // ss = "foobar" => []string{"foobar"} if len(ss) <= 1 { return ss } bigram := make([]string, len(ss)-1) for i := 0; i < len(ss)-1; i++ { bigram[i] = ss[i] + " " + ss[i+1] } return bigram }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func bigramWordByAppend(str string) []string {\n\tss := strings.Split(str, \" \")\n\n\t// Handle unexpected string input:\n\t// ss = \"\" => []string{\"\"}\n\t// ss = \"foobar\" => []string{\"foobar\"}\n\tif len(ss) <= 1 {\n\t\treturn ss\n\t}\n\n\tvar tmp string\n\tvar bigram []string\n\tfor i, s := range ss...
[ "0.6186775", "0.617156", "0.6004593", "0.58910674", "0.5868411", "0.5799477", "0.56004256", "0.5538303", "0.55113024", "0.54897165", "0.54454404", "0.54269725", "0.5422925", "0.5394519", "0.5383755", "0.53743744", "0.537224", "0.5281358", "0.5256899", "0.52460384", "0.5219058...
0.7001356
0
bigramWordByJoin concatenates adjecent pairs of string by using strings.Join function.
func bigramWordByJoin(str string) []string { ss := strings.Split(str, " ") // Handle unexpected string input: // ss = "" => []string{""} // ss = "foobar" => []string{"foobar"} if len(ss) <= 1 { return ss } bigram := make([]string, len(ss)-1) for i := 0; i < len(ss)-1; i++ { bigram[i] = strings.Join([]string{ss[i], ss[i+1]}, " ") } return bigram }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func bigramWordByAppend(str string) []string {\n\tss := strings.Split(str, \" \")\n\n\t// Handle unexpected string input:\n\t// ss = \"\" => []string{\"\"}\n\t// ss = \"foobar\" => []string{\"foobar\"}\n\tif len(ss) <= 1 {\n\t\treturn ss\n\t}\n\n\tvar tmp string\n\tvar bigram []string\n\tfor i, s := range ss...
[ "0.66665906", "0.66610605", "0.61865735", "0.61223364", "0.60511523", "0.60123986", "0.59851575", "0.5955917", "0.58678406", "0.5808041", "0.57088685", "0.5674272", "0.56346524", "0.5621613", "0.55867535", "0.55749595", "0.5506919", "0.5492988", "0.5458885", "0.54253215", "0....
0.7665968
0
bigramWordByAppend concatenates adjecent pairs of string by using variablelength array and append. Append creates new instance at each time when called, so it is high cost...
func bigramWordByAppend(str string) []string { ss := strings.Split(str, " ") // Handle unexpected string input: // ss = "" => []string{""} // ss = "foobar" => []string{"foobar"} if len(ss) <= 1 { return ss } var tmp string var bigram []string for i, s := range ss { if i != 0 { bigram = append(bigram, strings.Join([]string{tmp, s}, " ")) } tmp = s } return bigram }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func bigramWordByConcat(str string) []string {\n\tss := strings.Split(str, \" \")\n\n\t// Handle unexpected string input:\n\t// ss = \"\" => []string{\"\"}\n\t// ss = \"foobar\" => []string{\"foobar\"}\n\tif len(ss) <= 1 {\n\t\treturn ss\n\t}\n\n\tbigram := make([]string, len(ss)-1)\n\tfor i := 0; i < len(ss...
[ "0.5854114", "0.5709732", "0.5358691", "0.53485084", "0.5276471", "0.50733846", "0.5061012", "0.49326667", "0.48938566", "0.48491368", "0.48405713", "0.4838271", "0.48338476", "0.4833691", "0.48082826", "0.4801425", "0.47989488", "0.47850963", "0.47785482", "0.47461033", "0.4...
0.6780024
0
Get accounts by the provided names
func (api *API) GetAccounts(ctx context.Context, names ...string) ([]*Account, error) { var resp []*Account err := api.call(ctx, "get_accounts", []interface{}{names}, &resp) return resp, err }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (accounts *Accounts) ByName(name string) *Account {\n\tfor _, acc := range accounts.Map {\n\t\tif acc.Name == name {\n\t\t\treturn acc\n\t\t}\n\t}\n\treturn nil\n}", "func (c *GethClient) Accounts(ctx context.Context) ([]string, error) {\n\tvar result []string\n\terr := c.rpcCli.CallContext(ctx, &result, \"...
[ "0.637644", "0.63298076", "0.6257646", "0.6228807", "0.6209985", "0.61972433", "0.6075114", "0.606668", "0.59395134", "0.5928551", "0.589412", "0.5893891", "0.5889837", "0.58318645", "0.58226526", "0.5752496", "0.57408327", "0.5739339", "0.5705747", "0.5685761", "0.5654846", ...
0.58654314
13
GetAccountsCount returns account count
func (api *API) GetAccountsCount(ctx context.Context) (int, error) { var resp int err := api.call(ctx, "get_account_count", caller.EmptyParams, &resp) return resp, err }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_FCToken *FCTokenCaller) GetAccountCount(opts *bind.CallOpts) (*big.Int, error) {\n\tvar out []interface{}\n\terr := _FCToken.contract.Call(opts, &out, \"getAccountCount\")\n\n\tif err != nil {\n\t\treturn *new(*big.Int), err\n\t}\n\n\tout0 := *abi.ConvertType(out[0], new(*big.Int)).(**big.Int)\n\n\treturn o...
[ "0.7629392", "0.7616788", "0.7604881", "0.76005614", "0.75523543", "0.7378767", "0.73631734", "0.68072057", "0.67254966", "0.6607631", "0.6525248", "0.64642614", "0.64617866", "0.642899", "0.6358162", "0.63580036", "0.6307512", "0.6102952", "0.6097889", "0.6087178", "0.602654...
0.8077577
0
LookupAccounts get names and IDs for registered accounts. lowerBoundName Lower bound of the first name to return. limit Maximum number of results to return must not exceed 1000
func (api *API) LookupAccounts(ctx context.Context, lowerBoundName string, limit uint16) ([]string, error) { var resp []string err := api.call(ctx, "lookup_accounts", []interface{}{lowerBoundName, limit}, &resp) return resp, err }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (c *GethClient) Accounts(ctx context.Context) ([]string, error) {\n\tvar result []string\n\terr := c.rpcCli.CallContext(ctx, &result, \"personal_listAccounts\")\n\treturn result, err\n}", "func getAccount(addrs []string) ([]*account, []string, error) {\n\tstartWorker()\n\n\ttotalTask := len(addrs)\n\tresult...
[ "0.5750897", "0.56853", "0.56486696", "0.5646235", "0.5644906", "0.5556404", "0.5544633", "0.5519726", "0.55192685", "0.54581314", "0.54003", "0.5372517", "0.5332015", "0.5319677", "0.5290591", "0.52856636", "0.5274148", "0.52607983", "0.5248461", "0.5248349", "0.5233114", ...
0.8132988
0
Set callback to invoke as soon as a new block is applied
func (api *API) SetBlockAppliedCallback(notice func(header *types.BlockHeader, error error)) (err error) { err = api.setCallback("set_block_applied_callback", func(raw json.RawMessage) { var header []types.BlockHeader if err := json.Unmarshal(raw, &header); err != nil { notice(nil, err) } for _, b := range header { notice(&b, nil) } }) return }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (t *SelfTester) SetOnNewPoliciesReadyCb(cb func()) {\n}", "func (v *Vox) SetCb(cb audio.OnDataCb) {\n\tv.Lock()\n\tdefer v.Unlock()\n\tv.cb = cb\n}", "func (q *Queue) SetCallback(cb Callback) error {\n\tq.cb = cb\n\treturn nil\n}", "func TestCallbackInvokedWhenSetLate(t *testing.T) {\n\twg := &sync.Wait...
[ "0.5863456", "0.5697866", "0.568345", "0.5491504", "0.5381555", "0.5343357", "0.52973217", "0.52345043", "0.5169272", "0.5145329", "0.51420856", "0.51409525", "0.50777704", "0.5075929", "0.5064654", "0.50526845", "0.5041527", "0.50297105", "0.50177705", "0.50153047", "0.50027...
0.6094151
0
NewCmdConfigUseContext returns a Command instance for 'config usecontext' sub command
func NewCmdConfigUseContext(out io.Writer, configAccess clientcmd.ConfigAccess) *cobra.Command { options := &useContextOptions{configAccess: configAccess} cmd := &cobra.Command{ Use: "use-context CONTEXT_NAME", DisableFlagsInUseLine: true, Short: i18n.T("Set the current-context in a kubeconfig file"), Aliases: []string{"use"}, Long: `Set the current-context in a kubeconfig file.`, Example: useContextExample, ValidArgsFunction: completion.ContextCompletionFunc, Run: func(cmd *cobra.Command, args []string) { cmdutil.CheckErr(options.complete(cmd)) cmdutil.CheckErr(options.run()) fmt.Fprintf(out, "Switched to context %q.\n", options.contextName) }, } return cmd }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func NewCommandConfig(options *app.Options) *cobra.Command {\n\to := &ConfigOptions{\n\t\tOptions: options,\n\t}\n\n\t// root\n\tcmds := &cobra.Command{\n\t\tUse: \"config\",\n\t\tShort: \"Configuration command\",\n\t\tLong: \"This is a configuration command\",\n\t\tRun: func(c *cobra.Command, args []string) {\...
[ "0.74391395", "0.6349216", "0.624942", "0.6107694", "0.6100607", "0.6080159", "0.60459125", "0.60392904", "0.59372085", "0.58892083", "0.5855555", "0.58185875", "0.57851416", "0.57795674", "0.57705146", "0.5729567", "0.5684045", "0.5587479", "0.5549792", "0.55303764", "0.5526...
0.8202349
0
Validate validates this tweet
func (m *Tweet) Validate(formats strfmt.Registry) error { var res []error if err := m.validateCreatedAt(formats); err != nil { res = append(res, err) } if err := m.validateFullText(formats); err != nil { res = append(res, err) } if err := m.validateID(formats); err != nil { res = append(res, err) } if err := m.validateTwitterID(formats); err != nil { res = append(res, err) } if err := m.validateTwitterUserID(formats); err != nil { res = append(res, err) } if err := m.validateTwitterUsername(formats); err != nil { res = append(res, err) } if len(res) > 0 { return errors.CompositeValidationError(res...) } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (mt *Bottle) Validate() (err error) {\n\tif mt.ID == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"id\"))\n\t}\n\tif mt.Name == \"\" {\n\t\terr = goa.MergeErrors(err, goa.MissingAttributeError(`response`, \"name\"))\n\t}\n\n\tif utf8.RuneCountInString(mt.Name) < 1 {\n\t\terr =...
[ "0.5874286", "0.58268666", "0.57588404", "0.57587236", "0.56676954", "0.5583745", "0.55723965", "0.5527022", "0.5522042", "0.55206895", "0.55169415", "0.5466347", "0.5446972", "0.544323", "0.5401981", "0.5369382", "0.534803", "0.5335379", "0.53225595", "0.5300802", "0.5298521...
0.78896123
0
DeleteVpdGrantRule invokes the eflo.DeleteVpdGrantRule API synchronously
func (client *Client) DeleteVpdGrantRule(request *DeleteVpdGrantRuleRequest) (response *DeleteVpdGrantRuleResponse, err error) { response = CreateDeleteVpdGrantRuleResponse() err = client.DoAction(request, response) return }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (client *Client) DeleteVpdGrantRuleWithCallback(request *DeleteVpdGrantRuleRequest, callback func(response *DeleteVpdGrantRuleResponse, err error)) <-chan int {\n\tresult := make(chan int, 1)\n\terr := client.AddAsyncTask(func() {\n\t\tvar response *DeleteVpdGrantRuleResponse\n\t\tvar err error\n\t\tdefer clo...
[ "0.7489635", "0.70623237", "0.628145", "0.61440516", "0.5646188", "0.55109155", "0.54183745", "0.5369351", "0.5364473", "0.53176093", "0.5313999", "0.5264016", "0.5197791", "0.5184923", "0.5144244", "0.514319", "0.5125727", "0.51196355", "0.5111975", "0.5102731", "0.5095808",...
0.71831626
1
DeleteVpdGrantRuleWithChan invokes the eflo.DeleteVpdGrantRule API asynchronously
func (client *Client) DeleteVpdGrantRuleWithChan(request *DeleteVpdGrantRuleRequest) (<-chan *DeleteVpdGrantRuleResponse, <-chan error) { responseChan := make(chan *DeleteVpdGrantRuleResponse, 1) errChan := make(chan error, 1) err := client.AddAsyncTask(func() { defer close(responseChan) defer close(errChan) response, err := client.DeleteVpdGrantRule(request) if err != nil { errChan <- err } else { responseChan <- response } }) if err != nil { errChan <- err close(responseChan) close(errChan) } return responseChan, errChan }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (client *Client) DeleteVpdGrantRuleWithCallback(request *DeleteVpdGrantRuleRequest, callback func(response *DeleteVpdGrantRuleResponse, err error)) <-chan int {\n\tresult := make(chan int, 1)\n\terr := client.AddAsyncTask(func() {\n\t\tvar response *DeleteVpdGrantRuleResponse\n\t\tvar err error\n\t\tdefer clo...
[ "0.7585028", "0.63448346", "0.5797064", "0.5475028", "0.54170793", "0.53912616", "0.5266579", "0.5227858", "0.5227255", "0.5184082", "0.49702325", "0.4959252", "0.48021874", "0.47766876", "0.47660303", "0.4763217", "0.4746066", "0.47404018", "0.47138152", "0.46389297", "0.463...
0.8069158
0
DeleteVpdGrantRuleWithCallback invokes the eflo.DeleteVpdGrantRule API asynchronously
func (client *Client) DeleteVpdGrantRuleWithCallback(request *DeleteVpdGrantRuleRequest, callback func(response *DeleteVpdGrantRuleResponse, err error)) <-chan int { result := make(chan int, 1) err := client.AddAsyncTask(func() { var response *DeleteVpdGrantRuleResponse var err error defer close(result) response, err = client.DeleteVpdGrantRule(request) callback(response, err) result <- 1 }) if err != nil { defer close(result) callback(nil, err) result <- 0 } return result }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (client *Client) DeleteVpdGrantRuleWithChan(request *DeleteVpdGrantRuleRequest) (<-chan *DeleteVpdGrantRuleResponse, <-chan error) {\n\tresponseChan := make(chan *DeleteVpdGrantRuleResponse, 1)\n\terrChan := make(chan error, 1)\n\terr := client.AddAsyncTask(func() {\n\t\tdefer close(responseChan)\n\t\tdefer c...
[ "0.68763554", "0.6586179", "0.6053671", "0.5836633", "0.5591262", "0.5553131", "0.55327445", "0.51898825", "0.5092163", "0.49991354", "0.49571776", "0.48992956", "0.48691872", "0.48664564", "0.48612252", "0.48151034", "0.48070365", "0.47596213", "0.47545245", "0.4747927", "0....
0.8300928
0
CreateDeleteVpdGrantRuleRequest creates a request to invoke DeleteVpdGrantRule API
func CreateDeleteVpdGrantRuleRequest() (request *DeleteVpdGrantRuleRequest) { request = &DeleteVpdGrantRuleRequest{ RpcRequest: &requests.RpcRequest{}, } request.InitWithApiInfo("eflo", "2022-05-30", "DeleteVpdGrantRule", "eflo", "openAPI") request.Method = requests.POST return }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func CreateDeleteVpdGrantRuleResponse() (response *DeleteVpdGrantRuleResponse) {\n\tresponse = &DeleteVpdGrantRuleResponse{\n\t\tBaseResponse: &responses.BaseResponse{},\n\t}\n\treturn\n}", "func CreateCreateVpdGrantRuleRequest() (request *CreateVpdGrantRuleRequest) {\n\trequest = &CreateVpdGrantRuleRequest{\n\t...
[ "0.6789691", "0.6448388", "0.6266018", "0.61834526", "0.6073148", "0.60366744", "0.6007101", "0.57567215", "0.5755069", "0.57518935", "0.5743048", "0.5707084", "0.5651492", "0.5617365", "0.55842", "0.55457777", "0.5540249", "0.5528424", "0.5467309", "0.54369605", "0.53962755"...
0.8559896
0
CreateDeleteVpdGrantRuleResponse creates a response to parse from DeleteVpdGrantRule response
func CreateDeleteVpdGrantRuleResponse() (response *DeleteVpdGrantRuleResponse) { response = &DeleteVpdGrantRuleResponse{ BaseResponse: &responses.BaseResponse{}, } return }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func CreateDeleteVpdGrantRuleRequest() (request *DeleteVpdGrantRuleRequest) {\n\trequest = &DeleteVpdGrantRuleRequest{\n\t\tRpcRequest: &requests.RpcRequest{},\n\t}\n\trequest.InitWithApiInfo(\"eflo\", \"2022-05-30\", \"DeleteVpdGrantRule\", \"eflo\", \"openAPI\")\n\trequest.Method = requests.POST\n\treturn\n}", ...
[ "0.722374", "0.68933475", "0.614391", "0.61086893", "0.5957358", "0.59270376", "0.57114804", "0.56331295", "0.56186", "0.56060326", "0.55410606", "0.53387797", "0.5270599", "0.52538157", "0.52283925", "0.5223366", "0.5216195", "0.5184312", "0.51793605", "0.51685613", "0.51684...
0.8677458
0
use isCountryOpts instead of areOpts because it saves code in the test and tests the areOpts function with a relatively large dataset tests all isXxxOpt apart from isSortByOpt as well
func TestAreOpts(t *testing.T) { cases := []struct { c string isOpt bool }{ {"se", true}, {"bg", true}, {"il", true}, {"za", true}, {"ba", false}, {"12", false}, {"", false}, {"hugh", false}, } for _, i := range cases { var isCountry bool = isOptOf(i.c, countryOpts) if isCountry != i.isOpt { t.Errorf("Expected %v but got %v when case=%v", i.isOpt, isCountry, i.c) } } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func getOpts(opt ...wrapping.Option) (*options, error) {\n\t// First, separate out options into local and global\n\topts := getDefaultOptions()\n\tvar wrappingOptions []wrapping.Option\n\tvar localOptions []OptionFunc\n\tfor _, o := range opt {\n\t\tif o == nil {\n\t\t\tcontinue\n\t\t}\n\t\tiface := o()\n\t\tswitc...
[ "0.5510299", "0.5391247", "0.5304554", "0.51175886", "0.50980186", "0.5081817", "0.5009267", "0.49521753", "0.49211538", "0.48807758", "0.48724702", "0.48723876", "0.48486164", "0.4811651", "0.47531763", "0.47263643", "0.47097528", "0.47027934", "0.4702349", "0.4642556", "0.4...
0.7578039
0
AggregateId provides a mock function with given fields:
func (_m *MockAggregate) AggregateId() AggregateId { ret := _m.Called() var r0 AggregateId if rf, ok := ret.Get(0).(func() AggregateId); ok { r0 = rf() } else { if ret.Get(0) != nil { r0 = ret.Get(0).(AggregateId) } } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func MockAggregateSealProof(proofType abi.RegisteredSealProof, minerAddr address.Address, count int) ([]byte, error) {\n\tproof := make([]byte, aggProofLen(count))\n\ti := copy(proof, mockAggregateSealProofPrefix)\n\tbinary.BigEndian.PutUint64(proof[i:], uint64(proofType))\n\ti += 8\n\tbinary.BigEndian.PutUint64(p...
[ "0.5495845", "0.5490403", "0.54627246", "0.5439933", "0.54380864", "0.5367581", "0.5240518", "0.5239894", "0.5228337", "0.5196607", "0.5162903", "0.5103768", "0.5070772", "0.5063013", "0.50611025", "0.5040642", "0.50355613", "0.5028443", "0.50251395", "0.50246865", "0.5022824...
0.6844021
0
AggregateName provides a mock function with given fields:
func (_m *MockAggregate) AggregateName() string { ret := _m.Called() var r0 string if rf, ok := ret.Get(0).(func() string); ok { r0 = rf() } else { r0 = ret.Get(0).(string) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockORM) Group(query string) ORM {\n\tret := _m.Called(query)\n\n\tvar r0 ORM\n\tif rf, ok := ret.Get(0).(func(string) ORM); ok {\n\t\tr0 = rf(query)\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).(ORM)\n\t\t}\n\t}\n\n\treturn r0\n}", "func (m *MockProduct) GetAggregationByName(arg0 con...
[ "0.54706424", "0.5427147", "0.5411635", "0.53259814", "0.52480215", "0.5112713", "0.5103304", "0.50429875", "0.50058126", "0.49664485", "0.49663937", "0.49218696", "0.4901599", "0.48994717", "0.48761216", "0.4872714", "0.4859629", "0.48289967", "0.4812752", "0.47973967", "0.4...
0.6614694
0
Apply provides a mock function with given fields: _a0
func (_m *MockAggregate) Apply(_a0 Event) error { ret := _m.Called(_a0) var r0 error if rf, ok := ret.Get(0).(func(Event) error); ok { r0 = rf(_a0) } else { r0 = ret.Error(0) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *mockUpdater) ApplyUpdate(_a0 context.Context, _a1 updater.Update) error {\n\tret := _m.Called(_a0, _a1)\n\n\tvar r0 error\n\tif rf, ok := ret.Get(0).(func(context.Context, updater.Update) error); ok {\n\t\tr0 = rf(_a0, _a1)\n\t} else {\n\t\tr0 = ret.Error(0)\n\t}\n\n\treturn r0\n}", "func (_m *MockDisc...
[ "0.6667897", "0.6231973", "0.60797775", "0.57886577", "0.569986", "0.5681185", "0.56517804", "0.5511168", "0.5505724", "0.54718584", "0.5459437", "0.54411364", "0.5426445", "0.54227555", "0.53494734", "0.53305525", "0.5329849", "0.52974755", "0.5293721", "0.52798426", "0.5278...
0.7186859
0
OriginalVersion provides a mock function with given fields:
func (_m *MockAggregate) OriginalVersion() int { ret := _m.Called() var r0 int if rf, ok := ret.Get(0).(func() int); ok { r0 = rf() } else { r0 = ret.Get(0).(int) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func newVersionCheckerMock(version string, tags []string) *VersionChecker {\n\n\tfixedAppVersion := fixVersion(version)\n\n\treturn &VersionChecker{\n\t\tfixedAppVersion: fixedAppVersion,\n\t\tversionSource: &versionCheckerMock{\n\t\t\ttags: tags,\n\t\t\tfixVersionStrFunc: fixVersion,\n\t\t\ttagFilter...
[ "0.6774614", "0.6536822", "0.65328676", "0.65046424", "0.6437681", "0.6410609", "0.61703193", "0.61611354", "0.6090569", "0.60708857", "0.60435575", "0.6039088", "0.599811", "0.59314656", "0.59307015", "0.59270644", "0.5898324", "0.57774776", "0.57339287", "0.56818753", "0.56...
0.7191302
0
StoreEvent provides a mock function with given fields: _a0
func (_m *MockAggregate) StoreEvent(_a0 EventData) { _m.Called(_a0) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *MockEventRepository) Store(arg0 *sweeper.Event) error {\n\tm.ctrl.T.Helper()\n\tret := m.ctrl.Call(m, \"Store\", arg0)\n\tret0, _ := ret[0].(error)\n\treturn ret0\n}", "func MockConsensusEvent(hash []byte, round uint64, step uint8, keys []key.ConsensusKeys, p *user.Provisioners, i ...int) consensus.Even...
[ "0.6289277", "0.62114894", "0.62100565", "0.6184719", "0.6097846", "0.5966063", "0.59371525", "0.5930946", "0.5930256", "0.58361834", "0.57825756", "0.5665977", "0.56246656", "0.5573866", "0.5529164", "0.55186653", "0.55073816", "0.5465433", "0.54647565", "0.54395753", "0.540...
0.78557014
0
Version provides a mock function with given fields:
func (_m *MockAggregate) Version() int { ret := _m.Called() var r0 int if rf, ok := ret.Get(0).(func() int); ok { r0 = rf() } else { r0 = ret.Get(0).(int) } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *U2FDevice) Version() (string, error) {\n\tret := _m.Called()\n\n\tvar r0 string\n\tif rf, ok := ret.Get(0).(func() string); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(string)\n\t}\n\n\tvar r1 error\n\tif rf, ok := ret.Get(1).(func() error); ok {\n\t\tr1 = rf()\n\t} else {\n\t\tr1 = ret.Error(1...
[ "0.6758605", "0.67251813", "0.67000055", "0.6609332", "0.64761645", "0.6366889", "0.63209385", "0.6299836", "0.6254888", "0.6251358", "0.62467533", "0.6206996", "0.61766696", "0.61749876", "0.6163177", "0.61334395", "0.61285645", "0.6045214", "0.60311085", "0.59991527", "0.59...
0.6841355
0
clearUncommittedEvents provides a mock function with given fields:
func (_m *MockAggregate) clearUncommittedEvents() { _m.Called() }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockAggregate) getUncommittedEvents() []Event {\n\tret := _m.Called()\n\n\tvar r0 []Event\n\tif rf, ok := ret.Get(0).(func() []Event); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tif ret.Get(0) != nil {\n\t\t\tr0 = ret.Get(0).([]Event)\n\t\t}\n\t}\n\n\treturn r0\n}", "func (m *MockEventDao) UnfinishedEvents(ta...
[ "0.70760024", "0.6043022", "0.5928685", "0.5716477", "0.55757356", "0.5526177", "0.54580176", "0.54382396", "0.53673023", "0.53628206", "0.53532636", "0.5341673", "0.53009766", "0.5300278", "0.52957547", "0.52558625", "0.5241087", "0.5238653", "0.5225781", "0.51996076", "0.51...
0.78784215
0
getUncommittedEvents provides a mock function with given fields:
func (_m *MockAggregate) getUncommittedEvents() []Event { ret := _m.Called() var r0 []Event if rf, ok := ret.Get(0).(func() []Event); ok { r0 = rf() } else { if ret.Get(0) != nil { r0 = ret.Get(0).([]Event) } } return r0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockAggregate) clearUncommittedEvents() {\n\t_m.Called()\n}", "func (_m *LogPollerWrapper) LatestEvents() ([]types.OracleRequest, []types.OracleResponse, error) {\n\tret := _m.Called()\n\n\tvar r0 []types.OracleRequest\n\tvar r1 []types.OracleResponse\n\tvar r2 error\n\tif rf, ok := ret.Get(0).(func() ...
[ "0.67549837", "0.6047971", "0.60382354", "0.5892229", "0.5887458", "0.588709", "0.5847266", "0.58460593", "0.58450013", "0.5806093", "0.5781316", "0.5730033", "0.5698806", "0.56369007", "0.56117475", "0.5567934", "0.55019575", "0.54096115", "0.5402011", "0.5381635", "0.537648...
0.78118217
0
incrementVersion provides a mock function with given fields:
func (_m *MockAggregate) incrementVersion() { _m.Called() }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockAggregate) setVersion(_a0 int) {\n\t_m.Called(_a0)\n}", "func (m *MockManager) UpdateVersion() {\n\tm.ctrl.Call(m, \"UpdateVersion\")\n}", "func (_m *MockAggregate) Version() int {\n\tret := _m.Called()\n\n\tvar r0 int\n\tif rf, ok := ret.Get(0).(func() int); ok {\n\t\tr0 = rf()\n\t} else {\n\t\t...
[ "0.6878722", "0.6719195", "0.6694321", "0.66044044", "0.63764864", "0.6363949", "0.620103", "0.615884", "0.61135453", "0.6080862", "0.6059925", "0.60365295", "0.60349417", "0.5980796", "0.5962855", "0.5956764", "0.5921386", "0.591762", "0.58893186", "0.587819", "0.57154053", ...
0.8021044
0
setVersion provides a mock function with given fields: _a0
func (_m *MockAggregate) setVersion(_a0 int) { _m.Called(_a0) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (_m *MockAggregate) incrementVersion() {\n\t_m.Called()\n}", "func (_m *MockAggregate) Version() int {\n\tret := _m.Called()\n\n\tvar r0 int\n\tif rf, ok := ret.Get(0).(func() int); ok {\n\t\tr0 = rf()\n\t} else {\n\t\tr0 = ret.Get(0).(int)\n\t}\n\n\treturn r0\n}", "func newVersionCheckerMock(version stri...
[ "0.7078154", "0.70451313", "0.66910595", "0.6581204", "0.6534845", "0.65255547", "0.629458", "0.62848294", "0.62847006", "0.62608105", "0.6256023", "0.62408495", "0.620789", "0.62014997", "0.61569256", "0.6112755", "0.608206", "0.60665065", "0.60335004", "0.60245526", "0.5984...
0.7880125
0
New returns a new instance of Exp struct
func New(s string) *Exp { exp := &Exp{context: s} exp.init() return exp }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func NewExpCache(ctx context.Context, ttl time.Duration) *ExpCache {\n\tc := &ExpCache{\n\t\tttl: ttl,\n\t}\n\tgo c.runClean(ctx)\n\treturn c\n}", "func New(value int64, exp int32) Decimal {\n\treturn Decimal{\n\t\tvalue: big.NewInt(value),\n\t\texp: exp,\n\t}\n}", "func New(value int64, exp int32) Decimal {...
[ "0.6859243", "0.6837793", "0.6766397", "0.66804975", "0.6581954", "0.65122634", "0.627142", "0.6140074", "0.60797876", "0.60538423", "0.5930504", "0.5929171", "0.58859235", "0.58518046", "0.5833372", "0.5824712", "0.5821287", "0.5820043", "0.58156526", "0.57652354", "0.576247...
0.76039636
0
Eval returns calculated result of the expression
func (e *Exp) Eval() float64 { e.init() result, _ := e.eval(e.opTree) return result }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func ExampleEval() {\n\tfmt.Println(Eval(\"5\"))\n\tfmt.Println(Eval(\"1 + 2\"))\n\tfmt.Println(Eval(\"1 - 2 + 3\"))\n\tfmt.Println(Eval(\"3 * ( 3 + 1 * 3 ) / 2\"))\n\tfmt.Println(Eval(\"3 * ( ( 3 + 1 ) * 3 ) / 2\"))\n\t//OutPut:\n\t//5\n\t//3\n\t//2\n\t//9\n\t//18\n}", "func Eval(ctx context.Context, e Expr, vs...
[ "0.78225446", "0.7148306", "0.70671", "0.7050722", "0.6980047", "0.6950422", "0.69288963", "0.685653", "0.6832329", "0.66447026", "0.6639212", "0.6600153", "0.6545889", "0.65193206", "0.65044826", "0.6502576", "0.6476158", "0.64584225", "0.6431236", "0.6425402", "0.6418105", ...
0.7565433
1
tokenize converts expression context to tokens
func (e *Exp) tokenize() []string { siz := len(e.context) tokens := make([]string, 0, 1) for i := 0; i < siz; i++ { ch := e.context[i] switch { case e.isSpace(ch): continue case ch == '(': tokens = append(tokens, "(") case ch == ')': tokens = append(tokens, ")") default: isOperator, op := e.checkOperatorAt(i) if !isOperator { op = e.getOperandAt(i) } tokens = append(tokens, op) i = i + len(op) - 1 } } return tokens }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func tokenize(text string) ([]Token, error) {\n\t// So we can split on whitespace, TODO - be better, maybe use strings.FieldsFunc() ?\n\ttext = strings.ReplaceAll(text, \"(\", \" ( \")\n\ttext = strings.ReplaceAll(text, \")\", \" ) \")\n\n\twords := strings.Fields(text)\n\ttokens := make([]Token, 0, len(words))\n\...
[ "0.6739857", "0.6652426", "0.6560778", "0.6519955", "0.6415867", "0.6381054", "0.6336418", "0.6074973", "0.6074295", "0.60185516", "0.59861237", "0.59165454", "0.58904195", "0.5833101", "0.58267784", "0.5766809", "0.57505447", "0.5721691", "0.5713765", "0.56996644", "0.567599...
0.78800166
0
In go a main function is automatically called.
func main() { /**This is the shortform syntax for declaring a variable. It infers the type automatically, but still adheres to the precepts of static typing; NO implicit type casting. NOTE: This only works INSIDE functions, you cannot define global scope variables this way*/ name := "John Doe" greet(name, greeting) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func main() {\n\tif err := run(); err != nil {\n\t\tlog.Fatalln(err)\n\t}\n}", "func Main(args ...interface{}) {\n\n\t// ...\n}", "func main() {\n\tgimain.Main(func() {\n\t\tmainrun()\n\t})\n}", "func main() {\n\tgimain.Main(func() {\n\t\tmainrun()\n\t})\n}", "func main() {\n\tgimain.Main(func() {\n\t\tmai...
[ "0.83128273", "0.81733865", "0.8166593", "0.8166593", "0.8166593", "0.8166593", "0.8166593", "0.8113298", "0.8093467", "0.8060706", "0.8060706", "0.8047963", "0.80479354", "0.79819757", "0.79602563", "0.7953302", "0.7953302", "0.7948158", "0.79363006", "0.7931229", "0.7930159...
0.0
-1
A simple function declaration that takes two parameters and prints them
func greet(name string, greeting string) { fmt.Println(greeting, name) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func functionName2(param1 string, param2 int) {\n\tfmt.Println(param1, param2)\n}", "func functionName3(param1, param2 int) {\n\tfmt.Print(param1, param2)\n}", "func main() {\n\tfmt.Println(first, second)\n}", "func printTwoString(message, messageTwo string){\n\tfmt.Println(message,messageTwo)\n}", "func S...
[ "0.7193864", "0.7145628", "0.659819", "0.6474157", "0.64014083", "0.6371635", "0.63422257", "0.6295856", "0.6172505", "0.6172505", "0.6147449", "0.61148703", "0.61049265", "0.60778385", "0.6029526", "0.60243577", "0.60058254", "0.5994484", "0.5975014", "0.5965018", "0.595032"...
0.59283566
23
NewInterRegionTrafficQosQueue registers a new resource with the given unique name, arguments, and options.
func NewInterRegionTrafficQosQueue(ctx *pulumi.Context, name string, args *InterRegionTrafficQosQueueArgs, opts ...pulumi.ResourceOption) (*InterRegionTrafficQosQueue, error) { if args == nil { return nil, errors.New("missing one or more required arguments") } if args.Dscps == nil { return nil, errors.New("invalid value for required argument 'Dscps'") } if args.RemainBandwidthPercent == nil { return nil, errors.New("invalid value for required argument 'RemainBandwidthPercent'") } if args.TrafficQosPolicyId == nil { return nil, errors.New("invalid value for required argument 'TrafficQosPolicyId'") } opts = internal.PkgResourceDefaultOpts(opts) var resource InterRegionTrafficQosQueue err := ctx.RegisterResource("alicloud:cen/interRegionTrafficQosQueue:InterRegionTrafficQosQueue", name, args, &resource, opts...) if err != nil { return nil, err } return &resource, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func NewQueue(ctx *pulumi.Context,\n\tname string, args *QueueArgs, opts ...pulumi.ResourceOption) (*Queue, error) {\n\tif args == nil {\n\t\treturn nil, errors.New(\"missing one or more required arguments\")\n\t}\n\n\tif args.HoursOfOperationArn == nil {\n\t\treturn nil, errors.New(\"invalid value for required ar...
[ "0.61182225", "0.5922367", "0.5234103", "0.5177013", "0.50431615", "0.49678585", "0.48655295", "0.4863346", "0.48462552", "0.4834142", "0.4833258", "0.4809173", "0.4791956", "0.47916126", "0.4789563", "0.4777478", "0.47687298", "0.4752891", "0.474635", "0.4737543", "0.4733976...
0.7100234
0
GetInterRegionTrafficQosQueue gets an existing InterRegionTrafficQosQueue resource's state with the given name, ID, and optional state properties that are used to uniquely qualify the lookup (nil if not required).
func GetInterRegionTrafficQosQueue(ctx *pulumi.Context, name string, id pulumi.IDInput, state *InterRegionTrafficQosQueueState, opts ...pulumi.ResourceOption) (*InterRegionTrafficQosQueue, error) { var resource InterRegionTrafficQosQueue err := ctx.ReadResource("alicloud:cen/interRegionTrafficQosQueue:InterRegionTrafficQosQueue", name, id, state, &resource, opts...) if err != nil { return nil, err } return &resource, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func NewInterRegionTrafficQosQueue(ctx *pulumi.Context,\n\tname string, args *InterRegionTrafficQosQueueArgs, opts ...pulumi.ResourceOption) (*InterRegionTrafficQosQueue, error) {\n\tif args == nil {\n\t\treturn nil, errors.New(\"missing one or more required arguments\")\n\t}\n\n\tif args.Dscps == nil {\n\t\tretur...
[ "0.6420932", "0.5977712", "0.5420494", "0.53960836", "0.5246199", "0.48895094", "0.47974697", "0.4722064", "0.4676575", "0.45463547", "0.45444638", "0.45172334", "0.4422663", "0.43709758", "0.43634933", "0.43407527", "0.43357632", "0.4238608", "0.42307967", "0.4191139", "0.41...
0.77743757
0
The DSCP value of the traffic packet to be matched in the current queue, ranging from 0 to 63.
func (o InterRegionTrafficQosQueueOutput) Dscps() pulumi.StringArrayOutput { return o.ApplyT(func(v *InterRegionTrafficQosQueue) pulumi.StringArrayOutput { return v.Dscps }).(pulumi.StringArrayOutput) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (p UDPPort) Value() int32 {\n\treturn p.port\n}", "func getCNTFRQ() TSCValue", "func (p TCPPort) Value() int32 {\n\treturn p.port\n}", "func (p FlowProtocol) Value() int32 {\n\treturn int32(p)\n}", "func (this *VlqBase128Be) Value() (v int, err error) {\n\tif (this._f_value) {\n\t\treturn this.value, ...
[ "0.5506935", "0.5470176", "0.53617424", "0.5298042", "0.52958035", "0.5238294", "0.51907325", "0.5134276", "0.49141097", "0.4898104", "0.4896561", "0.4892348", "0.48907828", "0.48768947", "0.4873316", "0.48707613", "0.4837199", "0.4807118", "0.47696432", "0.47665495", "0.4751...
0.47516108
21
The description information of the traffic scheduling policy.
func (o InterRegionTrafficQosQueueOutput) InterRegionTrafficQosQueueDescription() pulumi.StringPtrOutput { return o.ApplyT(func(v *InterRegionTrafficQosQueue) pulumi.StringPtrOutput { return v.InterRegionTrafficQosQueueDescription }).(pulumi.StringPtrOutput) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (o OrganizationSecurityPolicyOutput) Description() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *OrganizationSecurityPolicy) pulumi.StringPtrOutput { return v.Description }).(pulumi.StringPtrOutput)\n}", "func (s *SecurityRule) Description() string {\n\treturn s.Description_\n}", "func (o ServerPoli...
[ "0.6332255", "0.62671643", "0.62489957", "0.62482285", "0.62482285", "0.62149894", "0.6198616", "0.61557883", "0.60916317", "0.60522413", "0.6021759", "0.60082877", "0.5975633", "0.59616953", "0.59614545", "0.59455854", "0.5939765", "0.58849066", "0.58648825", "0.58433276", "...
0.0
-1
The name of the traffic scheduling policy.
func (o InterRegionTrafficQosQueueOutput) InterRegionTrafficQosQueueName() pulumi.StringPtrOutput { return o.ApplyT(func(v *InterRegionTrafficQosQueue) pulumi.StringPtrOutput { return v.InterRegionTrafficQosQueueName }).(pulumi.StringPtrOutput) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (p scheduleOnHost) name() policyName {\n\treturn scheduleOnHostAnnotationPolicy\n}", "func (p scheduleWithOverProvisioningAwareness) name() policyName {\n\treturn overProvisioningPolicy\n}", "func (p preferScheduleOnHost) name() policyName {\n\treturn preferScheduleOnHostAnnotationPolicy\n}", "func (o R...
[ "0.74736947", "0.72675604", "0.7266452", "0.67986524", "0.6659939", "0.6645844", "0.66170263", "0.661601", "0.6613454", "0.65992755", "0.65884906", "0.65819204", "0.65560925", "0.6417246", "0.6395096", "0.638605", "0.6384816", "0.638258", "0.6348977", "0.63074774", "0.6294279...
0.0
-1
The percentage of crossregion bandwidth that the current queue can use.
func (o InterRegionTrafficQosQueueOutput) RemainBandwidthPercent() pulumi.IntOutput { return o.ApplyT(func(v *InterRegionTrafficQosQueue) pulumi.IntOutput { return v.RemainBandwidthPercent }).(pulumi.IntOutput) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (br *BandwidthMeter) Bandwidth() (bytesPerSec float64) {\n deltaSecs := br.lastRead.Sub(br.start).Seconds()\n bytesPerSec = float64(br.bytesRead) / deltaSecs\n return\n}", "func percentAvailable(statfs *syscall.Statfs_t) uint8 {\n\tif statfs.Blocks == 0 {\n\t\treturn uint8(0)\n\t}\n\treturn uint8(f...
[ "0.64463323", "0.56960267", "0.5637167", "0.5596584", "0.5571499", "0.5541268", "0.5538112", "0.54866683", "0.5401307", "0.5400026", "0.5393798", "0.53594464", "0.5357982", "0.53403497", "0.5308735", "0.5300413", "0.5234758", "0.52156454", "0.51747286", "0.5167365", "0.511746...
0.633253
1
The status of the traffic scheduling policy. Creating: The function is being created.Active: available.Modifying: is being modified.Deleting: Deleted.Deleted: Deleted.
func (o InterRegionTrafficQosQueueOutput) Status() pulumi.StringOutput { return o.ApplyT(func(v *InterRegionTrafficQosQueue) pulumi.StringOutput { return v.Status }).(pulumi.StringOutput) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (o GetSecurityPoliciesPolicyOutput) Status() pulumi.StringOutput {\n\treturn o.ApplyT(func(v GetSecurityPoliciesPolicy) string { return v.Status }).(pulumi.StringOutput)\n}", "func (o ServerPolicyOutput) Status() pulumi.StringPtrOutput {\n\treturn o.ApplyT(func(v *ServerPolicy) pulumi.StringPtrOutput { retu...
[ "0.637768", "0.61041886", "0.60253423", "0.6005666", "0.58875674", "0.58757114", "0.58757114", "0.5827052", "0.5817711", "0.5783979", "0.57203084", "0.5682372", "0.56083393", "0.5584729", "0.5578477", "0.55780166", "0.55646974", "0.55562615", "0.5540438", "0.5537126", "0.5507...
0.564104
12
The ID of the traffic scheduling policy.
func (o InterRegionTrafficQosQueueOutput) TrafficQosPolicyId() pulumi.StringOutput { return o.ApplyT(func(v *InterRegionTrafficQosQueue) pulumi.StringOutput { return v.TrafficQosPolicyId }).(pulumi.StringOutput) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (r *Policy) ID() pulumi.IDOutput {\n\treturn r.s.ID()\n}", "func (o OrganizationSecurityPolicyOutput) PolicyId() pulumi.StringOutput {\n\treturn o.ApplyT(func(v *OrganizationSecurityPolicy) pulumi.StringOutput { return v.PolicyId }).(pulumi.StringOutput)\n}", "func (v *AddDecisionTaskRequest) GetScheduleI...
[ "0.6260987", "0.6210173", "0.61886704", "0.6113732", "0.6055413", "0.60344696", "0.59402114", "0.58527046", "0.5847763", "0.58081985", "0.5807119", "0.5807077", "0.57898337", "0.5768478", "0.5742225", "0.56864387", "0.5664078", "0.5663449", "0.565793", "0.5621334", "0.5612016...
0.56604415
18
getHTTPRecoder creates a new httpClient that records all HTTP requests in a cassette. This cassette is then replayed whenever tests are executed again. This means that once the requests are recorded in the cassette, no more real HTTP request must be made to run the tests. It is important to call add a `defer cleanup()` so the given cassette files are correctly closed and saved after the requests.
func getHTTPRecoder(t *testing.T, update bool) (client *http.Client, cleanup func(), err error) { recorderMode := recorder.ModeReplaying if update { recorderMode = recorder.ModeRecording } // Setup recorder and scw client r, err := recorder.NewAsMode(getTestFilePath(t, ".cassette"), recorderMode, &SocketPassthroughTransport{}) if err != nil { return nil, nil, err } // Add a filter which removes Authorization headers from all requests: r.AddFilter(cassetteRequestFilter) // Remove secrets from response r.AddSaveFilter(cassetteResponseFilter) r.SetMatcher(cassetteMatcher) return &http.Client{Transport: &retryableHTTPTransport{transport: r}}, func() { assert.NoError(t, r.Stop()) // Make sure recorder is stopped once done with it }, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func GetHTTPClient() *http.Client { return httpClientPool.Get().(*http.Client) }", "func TracedHTTPClient(timeout time.Duration) *http.Client {\n\tot := project.DefaultHTTPTransport()\n\treturn &http.Client{\n\t\tTimeout: timeout,\n\t\tTransport: &withInjectedDataRoundTripper{ot},\n\t}\n}", "func httpGet(ctx...
[ "0.50657016", "0.50615436", "0.50130624", "0.49709573", "0.49220985", "0.49121484", "0.49045214", "0.4903276", "0.4830333", "0.47424307", "0.47421747", "0.47300133", "0.47285676", "0.4725351", "0.47202286", "0.47041163", "0.4699457", "0.4673111", "0.4672279", "0.46473005", "0...
0.7810919
0
AddSSLKeys adds the given ssl keys to the given delivery service.
func AddSSLKeys(w http.ResponseWriter, r *http.Request) { inf, userErr, sysErr, errCode := api.NewInfo(r, nil, nil) if userErr != nil || sysErr != nil { api.HandleErr(w, r, inf.Tx.Tx, errCode, userErr, sysErr) return } defer inf.Close() if !inf.Config.RiakEnabled { api.HandleErr(w, r, inf.Tx.Tx, http.StatusInternalServerError, nil, errors.New("adding SSL keys to Riak for delivery service: Riak is not configured")) return } req := tc.DeliveryServiceAddSSLKeysReq{} if err := api.Parse(r.Body, inf.Tx.Tx, &req); err != nil { api.HandleErr(w, r, inf.Tx.Tx, http.StatusBadRequest, errors.New("parsing request: "+err.Error()), nil) return } if userErr, sysErr, errCode := tenant.Check(inf.User, *req.DeliveryService, inf.Tx.Tx); userErr != nil || sysErr != nil { api.HandleErr(w, r, inf.Tx.Tx, errCode, userErr, sysErr) return } certChain, isUnknownAuth, err := verifyCertificate(req.Certificate.Crt, "") if err != nil { api.HandleErr(w, r, inf.Tx.Tx, http.StatusBadRequest, errors.New("verifying certificate: "+err.Error()), nil) return } req.Certificate.Crt = certChain base64EncodeCertificate(req.Certificate) dsSSLKeys := tc.DeliveryServiceSSLKeys{ CDN: *req.CDN, DeliveryService: *req.DeliveryService, Hostname: *req.HostName, Key: *req.Key, Version: *req.Version, Certificate: *req.Certificate, } if err := riaksvc.PutDeliveryServiceSSLKeysObj(dsSSLKeys, inf.Tx.Tx, inf.Config.RiakAuthOptions); err != nil { api.HandleErr(w, r, inf.Tx.Tx, http.StatusInternalServerError, nil, errors.New("putting SSL keys in Riak for delivery service '"+*req.DeliveryService+"': "+err.Error())) return } if err := updateSSLKeyVersion(*req.DeliveryService, req.Version.ToInt64(), inf.Tx.Tx); err != nil { api.HandleErr(w, r, inf.Tx.Tx, http.StatusInternalServerError, nil, errors.New("adding SSL keys to delivery service '"+*req.DeliveryService+"': "+err.Error())) return } if isUnknownAuth { api.WriteRespAlert(w, r, tc.WarnLevel, "WARNING: SSL keys were successfully added for '"+*req.DeliveryService+"', but the certificate is signed by an unknown authority and may be invalid") return } api.WriteResp(w, r, "Successfully added ssl keys for "+*req.DeliveryService) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (s *KeyStore) AddKeys(role string, threshold uint, expiry string, keys map[string]*KeyInfo) error {\n\tif threshold == 0 {\n\t\treturn errors.Errorf(\"invalid threshold (0)\")\n\t}\n\n\trk := roleKeys{threshold: threshold, expiry: expiry, keys: &sync.Map{}}\n\n\tfor id, info := range keys {\n\t\tpub, err := i...
[ "0.53601336", "0.523952", "0.5065318", "0.5049098", "0.501079", "0.49524736", "0.4897458", "0.48651546", "0.486056", "0.48080438", "0.47436917", "0.47358012", "0.4728572", "0.47049695", "0.46828505", "0.46395725", "0.4624217", "0.46203995", "0.45901638", "0.45896345", "0.4586...
0.8105353
0
GetSSLKeysByHostName fetches the ssl keys for a deliveryservice specified by the fully qualified hostname
func GetSSLKeysByHostName(w http.ResponseWriter, r *http.Request) { inf, userErr, sysErr, errCode := api.NewInfo(r, []string{"hostname"}, nil) if userErr != nil || sysErr != nil { api.HandleErr(w, r, inf.Tx.Tx, errCode, userErr, sysErr) return } defer inf.Close() if inf.Config.RiakEnabled == false { api.HandleErr(w, r, inf.Tx.Tx, http.StatusServiceUnavailable, errors.New("the Riak service is unavailable"), errors.New("getting SSL keys from Riak by host name: Riak is not configured")) return } hostName := inf.Params["hostname"] domainName := "" hostRegex := "" strArr := strings.Split(hostName, ".") ln := len(strArr) if ln > 1 { for i := 2; i < ln-1; i++ { domainName += strArr[i] + "." } domainName += strArr[ln-1] hostRegex = `.*\.` + strArr[1] + `\..*` } // lookup the cdnID cdnID, ok, err := getCDNIDByDomainname(domainName, inf.Tx.Tx) if err != nil { api.HandleErr(w, r, inf.Tx.Tx, http.StatusInternalServerError, nil, errors.New("getting cdn id by domain name: "+err.Error())) return } if !ok { api.WriteRespAlert(w, r, tc.InfoLevel, " - a cdn does not exist for the domain: "+domainName+" parsed from hostname: "+hostName) return } // now lookup the deliveryservice xmlID xmlID, ok, err := getXMLID(cdnID, hostRegex, inf.Tx.Tx) if err != nil { api.HandleErr(w, r, inf.Tx.Tx, http.StatusInternalServerError, nil, errors.New("getting xml id: "+err.Error())) return } if !ok { api.WriteRespAlert(w, r, tc.InfoLevel, " - a delivery service does not exist for a host with hostname of "+hostName) return } getSSLKeysByXMLIDHelper(xmlID, inf, w, r) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (client IotHubResourceClient) GetKeysForKeyNameSender(req *http.Request) (*http.Response, error) {\n\treturn client.Send(req, azure.DoRetryWithRegistration(client.Client))\n}", "func (*CaHostKeyCerts) GetPath() string { return \"/api/objects/ca/host_key_cert/\" }", "func GetSSLKeysByXMLID(w http.ResponseW...
[ "0.5425132", "0.5336563", "0.5303181", "0.5217682", "0.51962817", "0.518711", "0.51473093", "0.51252294", "0.5092041", "0.50768316", "0.50609934", "0.50054747", "0.49826443", "0.4936314", "0.493125", "0.49036396", "0.4879752", "0.48282203", "0.4825235", "0.48070812", "0.48033...
0.82809246
0
GetSSLKeysByXMLID fetches the deliveryservice ssl keys by the specified xmlID.
func GetSSLKeysByXMLID(w http.ResponseWriter, r *http.Request) { inf, userErr, sysErr, errCode := api.NewInfo(r, []string{"xmlid"}, nil) if userErr != nil || sysErr != nil { api.HandleErr(w, r, inf.Tx.Tx, errCode, userErr, sysErr) return } defer inf.Close() if inf.Config.RiakEnabled == false { api.HandleErr(w, r, inf.Tx.Tx, http.StatusServiceUnavailable, errors.New("the Riak service is unavailable"), errors.New("getting SSL keys from Riak by xml id: Riak is not configured")) return } xmlID := inf.Params["xmlid"] getSSLKeysByXMLIDHelper(xmlID, inf, w, r) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (to *Session) GetDeliveryServiceRequestByXMLID(XMLID string) ([]tc.DeliveryServiceRequest, toclientlib.ReqInf, error) {\n\treturn to.GetDeliveryServiceRequestByXMLIDWithHdr(XMLID, nil)\n}", "func GetSSLKeysByHostName(w http.ResponseWriter, r *http.Request) {\n\tinf, userErr, sysErr, errCode := api.NewInfo(r...
[ "0.61564887", "0.5915994", "0.5859082", "0.5506592", "0.5346934", "0.5332263", "0.52300006", "0.506717", "0.50242484", "0.4996197", "0.48781332", "0.47579432", "0.47013146", "0.46787006", "0.46667585", "0.46625802", "0.46300685", "0.46164986", "0.46141317", "0.46038184", "0.4...
0.8322349
0
returns the cdn_id found by domainname.
func getCDNIDByDomainname(domainName string, tx *sql.Tx) (int64, bool, error) { cdnID := int64(0) if err := tx.QueryRow(`SELECT id from cdn WHERE domain_name = $1`, domainName).Scan(&cdnID); err != nil { if err == sql.ErrNoRows { return 0, false, nil } return 0, false, err } return cdnID, true, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func lookupDomainName(domainName string) string {\n\tif du, ok := domainUuid[domainName]; ok {\n\t\treturn du\n\t}\n\treturn \"\"\n}", "func (o GetGroupResultOutput) DomainId() pulumi.StringOutput {\n\treturn o.ApplyT(func(v GetGroupResult) string { return v.DomainId }).(pulumi.StringOutput)\n}", "func domainC...
[ "0.6280613", "0.59721607", "0.5955924", "0.59257716", "0.5804063", "0.5770586", "0.57447374", "0.5641713", "0.5604987", "0.5594733", "0.5567953", "0.5559334", "0.5520176", "0.5501989", "0.54701555", "0.5468714", "0.5436906", "0.5434725", "0.5403778", "0.5378927", "0.53753287"...
0.82583183
0
returns a delivery service xmlId for a cdn by host regex.
func getXMLID(cdnID int64, hostRegex string, tx *sql.Tx) (string, bool, error) { q := ` SELECT ds.xml_id from deliveryservice ds JOIN deliveryservice_regex dr on ds.id = dr.deliveryservice AND ds.cdn_id = $1 JOIN regex r on r.id = dr.regex WHERE r.pattern = $2 ` xmlID := "" if err := tx.QueryRow(q, cdnID, hostRegex).Scan(&xmlID); err != nil { if err == sql.ErrNoRows { return "", false, nil } return "", false, errors.New("querying xml id: " + err.Error()) } return xmlID, true, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (resolver nameResolver) ExtractServiceId(host string) string {\n\tresourceName := strings.Split(host, \".\")[0]\n\treturn strings.TrimPrefix(resourceName, resolver.resourceNamePrefix)\n}", "func (i SourceInfo) ExternalServiceID() int64 {\n\tps := strings.SplitN(i.ID, \":\", 3)\n\tif len(ps) != 3 {\n\t\tretu...
[ "0.5841075", "0.4965493", "0.48057923", "0.47056535", "0.46926", "0.46767744", "0.46705613", "0.46302775", "0.46255037", "0.45803738", "0.4568915", "0.4561361", "0.4536049", "0.4521004", "0.44993153", "0.44881353", "0.44595507", "0.4455809", "0.44513792", "0.44446835", "0.441...
0.69668454
0
matchPattern matches regex against entity's path to find project name.
func matchPattern(fp string, patterns []MapPattern) (string, bool, error) { fp, err := realpath.Realpath(fp) if err != nil { return "", false, Err(fmt.Errorf("failed to get the real path: %w", err).Error()) } for _, pattern := range patterns { if pattern.Regex.MatchString(fp) { matches := pattern.Regex.FindStringSubmatch(fp) if len(matches) > 0 { params := make([]interface{}, len(matches[1:])) for i, v := range matches[1:] { params[i] = v } result, err := pyfmt.Fmt(pattern.Name, params...) if err != nil { log.Errorf("error formatting %q: %s", pattern.Name, err) continue } return result, true, nil } } } return "", false, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func ProjectRegexp() *regexp.Regexp {\n\treturn regexp.MustCompile(\"^projects/\" + NameRegex + \"$\")\n}", "func getDesiredPattern(pattern string) string {\n\twant := []string{}\n\tfor _, token := range strings.Split(pattern, \"/\") {\n\t\tif strings.HasPrefix(token, \"{\") && strings.HasSuffix(token, \"}\") {\...
[ "0.5743209", "0.5709654", "0.5678647", "0.56146234", "0.55893093", "0.5514387", "0.53998846", "0.5398883", "0.5398883", "0.5346574", "0.5334595", "0.5325938", "0.524541", "0.5237653", "0.52365726", "0.5223555", "0.52056265", "0.5200951", "0.51546204", "0.5128055", "0.51171446...
0.5454481
6
String returns its name.
func (m Map) String() string { return "project-map-detector" }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (n name) String() string {\n\treturn fmt.Sprintf(n.Name)\n}", "func (n Name) String() string {\n\treturn string(n)\n}", "func (n Name) String() string {\n\treturn string(n)\n}", "func (n Named) String() string {\n\tif n.Path == \"\" {\n\t\treturn n.Name\n\t}\n\treturn fmt.Sprintf(\"%s.%s\", n.Path, n.Na...
[ "0.76239175", "0.7596077", "0.7596077", "0.73250514", "0.73232955", "0.7240509", "0.7014141", "0.6975502", "0.69344705", "0.6914544", "0.6905238", "0.6836352", "0.6781786", "0.67532694", "0.67160845", "0.66846424", "0.66834974", "0.6683491", "0.66771424", "0.66757715", "0.667...
0.0
-1
Disrupt returns true if the correct string is provided.
func (d *DependencyDisableAutoOnline) Disrupt(s string) bool { return s == "DisableGatewayAutoOnline" }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (d *HostLowerDeposit) Disrupt(s string) bool {\n\treturn s == \"lowerDeposit\"\n}", "func (d *HostMDMProgramDelayedWrite) Disrupt(s string) bool {\n\treturn s == \"MDMProgramOutputDelayWrite\"\n}", "func (d *DependencyUnsyncedConsensus) Disrupt(s string) bool {\n\treturn s == \"UnsyncedConsensus\"\n}", ...
[ "0.7409636", "0.7348035", "0.7164227", "0.7164227", "0.69773644", "0.6653572", "0.66276544", "0.6580599", "0.6532585", "0.63150513", "0.6164744", "0.50763667", "0.5054735", "0.4899544", "0.48503143", "0.48239964", "0.4809021", "0.4808761", "0.47824186", "0.4775752", "0.475206...
0.66241425
7
NewRobertaLMHead creates new RobertaLMHead.
func NewRobertaLMHead(p *nn.Path, config *bert.BertConfig) *RobertaLMHead { dense := nn.NewLinear(p.Sub("dense"), config.HiddenSize, config.HiddenSize, nn.DefaultLinearConfig()) layerNormConfig := nn.DefaultLayerNormConfig() layerNormConfig.Eps = 1e-12 layerNorm := nn.NewLayerNorm(p.Sub("layer_norm"), []int64{config.HiddenSize}, layerNormConfig) decoder := util.NewLinearNoBias(p.Sub("decoder"), config.HiddenSize, config.VocabSize, util.DefaultLinearNoBiasConfig()) bias := p.NewVar("bias", []int64{config.VocabSize}, nn.NewKaimingUniformInit()) return &RobertaLMHead{ dense: dense, decoder: decoder, layerNorm: layerNorm, bias: bias, } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func NewRobertaClassificationHead(p *nn.Path, config *bert.BertConfig) *RobertaClassificationHead {\n\tdense := nn.NewLinear(p.Sub(\"dense\"), config.HiddenSize, config.HiddenSize, nn.DefaultLinearConfig())\n\tnumLabels := int64(len(config.Id2Label))\n\toutProj := nn.NewLinear(p.Sub(\"out_proj\"), config.HiddenSiz...
[ "0.6216581", "0.5670481", "0.55518067", "0.5520945", "0.540021", "0.524219", "0.51972264", "0.51203287", "0.510461", "0.50970316", "0.5024263", "0.50216347", "0.5017086", "0.5012589", "0.49524257", "0.4937669", "0.49000117", "0.4897512", "0.48824078", "0.4841005", "0.4789799"...
0.76238585
0
Foward forwards pass through RobertaLMHead model.
func (rh *RobertaLMHead) Forward(hiddenStates *ts.Tensor) *ts.Tensor { gelu := util.NewGelu() appliedDense := hiddenStates.Apply(rh.dense) geluFwd := gelu.Fwd(appliedDense) appliedLN := geluFwd.Apply(rh.layerNorm) appliedDecoder := appliedLN.Apply(rh.decoder) appliedBias := appliedDecoder.MustAdd(rh.bias, true) geluFwd.MustDrop() appliedDense.MustDrop() appliedLN.MustDrop() return appliedBias }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (mlm *RobertaForMaskedLM) Forward(inputIds, mask, tokenTypeIds, positionIds, inputEmbeds, encoderHiddenStates, encoderMask *ts.Tensor, train bool) (output *ts.Tensor, hiddenStates, attentions []*ts.Tensor, err error) {\n\n\thiddenState, _, allHiddenStates, allAttentions, err := mlm.roberta.ForwardT(inputIds, ...
[ "0.6255751", "0.60953754", "0.60477453", "0.60463643", "0.6005756", "0.5827889", "0.5759985", "0.57281214", "0.56785464", "0.5660496", "0.5638164", "0.5615751", "0.55979073", "0.55658805", "0.55035555", "0.54785925", "0.54708135", "0.5467028", "0.5461007", "0.54442316", "0.54...
0.6625633
0
NewRobertaForMaskedLM builds a new RobertaForMaskedLM.
func NewRobertaForMaskedLM(p *nn.Path, config *bert.BertConfig) *RobertaForMaskedLM { roberta := bert.NewBertModel(p.Sub("roberta"), config) lmHead := NewRobertaLMHead(p.Sub("lm_head"), config) return &RobertaForMaskedLM{ roberta: roberta, lmHead: lmHead, } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func NewRobertaLMHead(p *nn.Path, config *bert.BertConfig) *RobertaLMHead {\n\tdense := nn.NewLinear(p.Sub(\"dense\"), config.HiddenSize, config.HiddenSize, nn.DefaultLinearConfig())\n\n\tlayerNormConfig := nn.DefaultLayerNormConfig()\n\tlayerNormConfig.Eps = 1e-12\n\tlayerNorm := nn.NewLayerNorm(p.Sub(\"layer_nor...
[ "0.56021535", "0.51360244", "0.5057678", "0.47741595", "0.46441257", "0.46423316", "0.4629943", "0.45565084", "0.4553068", "0.45473307", "0.4484458", "0.44752312", "0.44710892", "0.44612712", "0.4439623", "0.44129133", "0.43823466", "0.43730444", "0.43224025", "0.43211555", "...
0.81801337
0
Load loads model from file or model name. It also updates default configuration parameters if provided. This method implements `PretrainedModel` interface.
func (mlm *RobertaForMaskedLM) Load(modelNameOrPath string, config interface{ pretrained.Config }, params map[string]interface{}, device gotch.Device) error { var urlOrFilename string // If modelName, infer to default configuration filename: if modelFile, ok := pretrained.RobertaModels[modelNameOrPath]; ok { urlOrFilename = modelFile } else { // Otherwise, just take the input urlOrFilename = modelNameOrPath } cachedFile, err := util.CachedPath(urlOrFilename) if err != nil { return err } vs := nn.NewVarStore(device) p := vs.Root() mlm.roberta = bert.NewBertModel(p.Sub("roberta"), config.(*bert.BertConfig)) mlm.lmHead = NewRobertaLMHead(p.Sub("lm_head"), config.(*bert.BertConfig)) err = vs.Load(cachedFile) if err != nil { return err } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Load(fileName string, src interface{}) error {\n\tfile, err := os.Open(fileName)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer file.Close()\n\tif err == nil {\n\t\tdecoder := gob.NewDecoder(file)\n\t\tif err = decoder.Decode(src); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\t// Restore parameters\n\tswitc...
[ "0.70856375", "0.6931012", "0.691719", "0.67702013", "0.6714875", "0.6647788", "0.6632438", "0.6570047", "0.6427433", "0.6367626", "0.63450754", "0.6210927", "0.62089187", "0.6141275", "0.6126932", "0.5932069", "0.59089965", "0.5824516", "0.5803706", "0.57926947", "0.56931376...
0.6942267
1
Forwad forwads pass through the model. Params: + `inputIds`: Optional input tensor of shape (batch size, sequence length). If None, precomputed embeddings must be provided (see inputEmbeds). + `mask`: Optional mask of shape (batch size, sequence length). Masked position have value 0, nonmasked value 1. If None set to 1. + `tokenTypeIds`: Optional segment id of shape (batch size, sequence length). Convention is value of 0 for the first sentence (incl. ) and 1 for the second sentence. If None set to 0. + `positionIds`: Optional position ids of shape (batch size, sequence length). If None, will be incremented from 0. + `inputEmbeds`: Optional precomputed input embeddings of shape (batch size, sequence length, hidden size). If None, input ids must be provided (see inputIds). + `encoderHiddenStates`: Optional encoder hidden state of shape (batch size, encoder sequence length, hidden size). If the model is defined as a decoder and the encoder hidden states is not None, used in the crossattention layer as keys and values (query from the decoder). + `encoderMask`: Optional encoder attention mask of shape (batch size, encoder sequence length). If the model is defined as a decoder and the encoder_hidden_states is not None, used to mask encoder values. Positions with value 0 will be masked. + `train`: boolean flag to turn on/off the dropout layers in the model. Should be set to false for inference. Returns: + `output`: tensor of shape (batch size, numLabels, vocab size) + `hiddenStates`: optional slice of tensors of length numHiddenLayers with shape (batch size, sequence length, hidden size). + `attentions`: optional slice of tensors of length num hidden layers with shape (batch size, sequence length, hidden size). + `err`: error
func (mlm *RobertaForMaskedLM) Forward(inputIds, mask, tokenTypeIds, positionIds, inputEmbeds, encoderHiddenStates, encoderMask *ts.Tensor, train bool) (output *ts.Tensor, hiddenStates, attentions []*ts.Tensor, err error) { hiddenState, _, allHiddenStates, allAttentions, err := mlm.roberta.ForwardT(inputIds, mask, tokenTypeIds, positionIds, inputEmbeds, encoderHiddenStates, encoderMask, train) if err != nil { return ts.None, nil, nil, err } predictionScores := mlm.lmHead.Forward(hiddenState) return predictionScores, allHiddenStates, allAttentions, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (tc *RobertaForTokenClassification) ForwardT(inputIds, mask, tokenTypeIds, positionIds, inputEmbeds *ts.Tensor, train bool) (output *ts.Tensor, hiddenStates, attentions []*ts.Tensor, err error) {\n\thiddenState, _, hiddenStates, attentions, err := tc.roberta.ForwardT(inputIds, mask, tokenTypeIds, positionIds,...
[ "0.43194455", "0.41344866", "0.41018224", "0.40161535", "0.3945738", "0.391739", "0.37510443", "0.3740854", "0.36417305", "0.36374965", "0.35823515", "0.350722", "0.35027307", "0.3458461", "0.34578133", "0.3431309", "0.34074798", "0.33991477", "0.33961838", "0.33838093", "0.3...
0.43332857
0
NewRobertaClassificationHead create a new RobertaClassificationHead.
func NewRobertaClassificationHead(p *nn.Path, config *bert.BertConfig) *RobertaClassificationHead { dense := nn.NewLinear(p.Sub("dense"), config.HiddenSize, config.HiddenSize, nn.DefaultLinearConfig()) numLabels := int64(len(config.Id2Label)) outProj := nn.NewLinear(p.Sub("out_proj"), config.HiddenSize, numLabels, nn.DefaultLinearConfig()) dropout := util.NewDropout(config.HiddenDropoutProb) return &RobertaClassificationHead{ dense: dense, dropout: dropout, outProj: outProj, } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func NewRobertaLMHead(p *nn.Path, config *bert.BertConfig) *RobertaLMHead {\n\tdense := nn.NewLinear(p.Sub(\"dense\"), config.HiddenSize, config.HiddenSize, nn.DefaultLinearConfig())\n\n\tlayerNormConfig := nn.DefaultLayerNormConfig()\n\tlayerNormConfig.Eps = 1e-12\n\tlayerNorm := nn.NewLayerNorm(p.Sub(\"layer_nor...
[ "0.6364758", "0.5719456", "0.54601485", "0.51877445", "0.51676345", "0.5114384", "0.5104612", "0.5007022", "0.49942425", "0.49633253", "0.485414", "0.48249248", "0.4803933", "0.48032835", "0.4742509", "0.47365102", "0.47250208", "0.47056574", "0.46969962", "0.46928912", "0.46...
0.8124905
0
ForwardT forwards pass through model.
func (ch *RobertaClassificationHead) ForwardT(hiddenStates *ts.Tensor, train bool) *ts.Tensor { appliedDO1 := hiddenStates.MustSelect(1, 0, false).ApplyT(ch.dropout, train) appliedDense := appliedDO1.Apply(ch.dense) tanhTs := appliedDense.MustTanh(false) appliedDO2 := tanhTs.ApplyT(ch.dropout, train) retVal := appliedDO2.Apply(ch.outProj) appliedDO1.MustDrop() appliedDense.MustDrop() tanhTs.MustDrop() appliedDO2.MustDrop() return retVal }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (mod *backendModule) Forward(f *gatepb.Forward) error {\n\treturn mod.send(proto.Type(f.Typ), f)\n}", "func (m *Model) Forward(qkv attention.QKV) attention.Output {\n\tprojAtt := attention.QKV{\n\t\tQueries: m.Query.Forward(qkv.Queries...),\n\t\tKeys: m.Key.Forward(qkv.Keys...),\n\t\tValues: m.Value.For...
[ "0.7021994", "0.6554831", "0.6494534", "0.6460856", "0.64138633", "0.62935233", "0.6266006", "0.6205411", "0.61404204", "0.6131823", "0.61229944", "0.6081643", "0.6071474", "0.60542464", "0.5995417", "0.59735703", "0.5953793", "0.5952972", "0.59456825", "0.5916433", "0.584080...
0.5994497
15
NewRobertaForSequenceClassification creates a new RobertaForSequenceClassification model.
func NewRobertaForSequenceClassification(p *nn.Path, config *bert.BertConfig) *RobertaForSequenceClassification { roberta := bert.NewBertModel(p.Sub("roberta"), config) classifier := NewRobertaClassificationHead(p.Sub("classifier"), config) return &RobertaForSequenceClassification{ roberta: roberta, classifier: classifier, } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func NewRobertaClassificationHead(p *nn.Path, config *bert.BertConfig) *RobertaClassificationHead {\n\tdense := nn.NewLinear(p.Sub(\"dense\"), config.HiddenSize, config.HiddenSize, nn.DefaultLinearConfig())\n\tnumLabels := int64(len(config.Id2Label))\n\toutProj := nn.NewLinear(p.Sub(\"out_proj\"), config.HiddenSiz...
[ "0.63030326", "0.6088201", "0.5573898", "0.5011184", "0.48697948", "0.48697948", "0.47858444", "0.47579253", "0.47444493", "0.46542457", "0.46195018", "0.4588383", "0.4547116", "0.4539084", "0.45093924", "0.44767594", "0.4411735", "0.43808696", "0.42963836", "0.4287047", "0.4...
0.78834176
0
Load loads model from file or model name. It also updates default configuration parameters if provided. This method implements `PretrainedModel` interface.
func (sc *RobertaForSequenceClassification) Load(modelNameOrPath string, config interface{ pretrained.Config }, params map[string]interface{}, device gotch.Device) error { var urlOrFilename string // If modelName, infer to default configuration filename: if modelFile, ok := pretrained.RobertaModels[modelNameOrPath]; ok { urlOrFilename = modelFile } else { // Otherwise, just take the input urlOrFilename = modelNameOrPath } cachedFile, err := util.CachedPath(urlOrFilename) if err != nil { return err } vs := nn.NewVarStore(device) p := vs.Root() sc.roberta = bert.NewBertModel(p.Sub("roberta"), config.(*bert.BertConfig)) sc.classifier = NewRobertaClassificationHead(p.Sub("classifier"), config.(*bert.BertConfig)) err = vs.Load(cachedFile) if err != nil { return err } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Load(fileName string, src interface{}) error {\n\tfile, err := os.Open(fileName)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer file.Close()\n\tif err == nil {\n\t\tdecoder := gob.NewDecoder(file)\n\t\tif err = decoder.Decode(src); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\t// Restore parameters\n\tswitc...
[ "0.70846575", "0.6941715", "0.6930484", "0.67688364", "0.6714155", "0.6647504", "0.6631434", "0.65701777", "0.64257467", "0.63652515", "0.6344658", "0.620972", "0.6207725", "0.6139964", "0.61265564", "0.5930335", "0.59086376", "0.5823113", "0.5802144", "0.5792252", "0.5693667...
0.6917168
3
Forward forwards pass through the model.
func (sc *RobertaForSequenceClassification) ForwardT(inputIds, mask, tokenTypeIds, positionIds, inputEmbeds *ts.Tensor, train bool) (labels *ts.Tensor, hiddenStates, attentions []*ts.Tensor, err error) { hiddenState, _, hiddenStates, attentions, err := sc.roberta.ForwardT(inputIds, mask, tokenTypeIds, positionIds, inputEmbeds, ts.None, ts.None, train) if err != nil { return ts.None, nil, nil, err } labels = sc.classifier.ForwardT(hiddenState, train) hiddenState.MustDrop() return labels, hiddenStates, attentions, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *Model) Forward(x *Node, states States) (rv *Node, err error) {\n\trv = x\n\tfor _, l := range m.Layers {\n\t\tif rv, err = l.Forward(rv, states); err != nil {\n\t\t\treturn nil, errors.Wrap(err, l.Name())\n\t\t}\n\t}\n\treturn rv, nil\n}", "func (mod *backendModule) Forward(f *gatepb.Forward) error {\n\...
[ "0.7115619", "0.70571136", "0.69965667", "0.69702876", "0.6634736", "0.6614", "0.65342593", "0.6481366", "0.6453834", "0.64295715", "0.6368864", "0.6234271", "0.6223834", "0.62171066", "0.6211164", "0.61765057", "0.61700433", "0.6166981", "0.61651725", "0.6146318", "0.6138534...
0.0
-1
NewRobertaForMultipleChoice creates a new RobertaForMultipleChoice model.
func NewRobertaForMultipleChoice(p *nn.Path, config *bert.BertConfig) *RobertaForMultipleChoice { roberta := bert.NewBertModel(p.Sub("roberta"), config) dropout := util.NewDropout(config.HiddenDropoutProb) classifier := nn.NewLinear(p.Sub("classifier"), config.HiddenSize, 1, nn.DefaultLinearConfig()) return &RobertaForMultipleChoice{ roberta: roberta, dropout: dropout, classifier: classifier, } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func NewChoice(allowedValues ...string) Choice {\n\treturn Choice{AllowedValues: allowedValues}\n}", "func NewMultipleResponsesClient() MultipleResponsesClient {\n return NewMultipleResponsesClientWithBaseURI(DefaultBaseURI, )\n}", "func (r Response) MultipleChoices(code string, payload Payload, header ...R...
[ "0.48868722", "0.4619283", "0.4487543", "0.4456285", "0.42859465", "0.41999", "0.41992977", "0.4195675", "0.4194431", "0.415553", "0.41500634", "0.41447544", "0.41297522", "0.41187063", "0.40561128", "0.40476024", "0.40391248", "0.40369943", "0.40235433", "0.4007433", "0.3986...
0.755682
0
Load loads model from file or model name. It also updates default configuration parameters if provided. This method implements `PretrainedModel` interface.
func (mc *RobertaForMultipleChoice) Load(modelNameOrPath string, config interface{ pretrained.Config }, params map[string]interface{}, device gotch.Device) error { var urlOrFilename string // If modelName, infer to default configuration filename: if modelFile, ok := pretrained.RobertaModels[modelNameOrPath]; ok { urlOrFilename = modelFile } else { // Otherwise, just take the input urlOrFilename = modelNameOrPath } cachedFile, err := util.CachedPath(urlOrFilename) if err != nil { return err } vs := nn.NewVarStore(device) p := vs.Root() mc.roberta = bert.NewBertModel(p.Sub("roberta"), config.(*bert.BertConfig)) mc.dropout = util.NewDropout(config.(*bert.BertConfig).HiddenDropoutProb) classifier := nn.NewLinear(p.Sub("classifier"), config.(*bert.BertConfig).HiddenSize, 1, nn.DefaultLinearConfig()) mc.classifier = classifier err = vs.Load(cachedFile) if err != nil { return err } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Load(fileName string, src interface{}) error {\n\tfile, err := os.Open(fileName)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer file.Close()\n\tif err == nil {\n\t\tdecoder := gob.NewDecoder(file)\n\t\tif err = decoder.Decode(src); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\t// Restore parameters\n\tswitc...
[ "0.70856375", "0.6942267", "0.691719", "0.67702013", "0.6714875", "0.6647788", "0.6632438", "0.6570047", "0.6427433", "0.6367626", "0.63450754", "0.6210927", "0.62089187", "0.6141275", "0.6126932", "0.5932069", "0.59089965", "0.5824516", "0.5803706", "0.57926947", "0.56931376...
0.6931012
2
ForwardT forwards pass through the model.
func (mc *RobertaForMultipleChoice) ForwardT(inputIds, mask, tokenTypeIds, positionIds *ts.Tensor, train bool) (output *ts.Tensor, hiddenStates, attentions []*ts.Tensor, err error) { numChoices := inputIds.MustSize()[1] inputIdsSize := inputIds.MustSize() flatInputIds := inputIds.MustView([]int64{-1, inputIdsSize[len(inputIdsSize)-1]}, false) flatPositionIds := ts.None if positionIds.MustDefined() { positionIdsSize := positionIds.MustSize() flatPositionIds = positionIds.MustView([]int64{-1, positionIdsSize[len(positionIdsSize)-1]}, false) } flatTokenTypeIds := ts.None if tokenTypeIds.MustDefined() { tokenTypeIdsSize := tokenTypeIds.MustSize() flatTokenTypeIds = tokenTypeIds.MustView([]int64{-1, tokenTypeIdsSize[len(tokenTypeIdsSize)-1]}, false) } flatMask := ts.None if mask.MustDefined() { flatMaskSize := flatMask.MustSize() flatMask = mask.MustView([]int64{-1, flatMaskSize[len(flatMaskSize)-1]}, false) } var pooledOutput *ts.Tensor _, pooledOutput, hiddenStates, attentions, err = mc.roberta.ForwardT(flatInputIds, flatMask, flatTokenTypeIds, flatPositionIds, ts.None, ts.None, ts.None, train) if err != nil { return ts.None, nil, nil, err } appliedDO := pooledOutput.ApplyT(mc.dropout, train) appliedCls := appliedDO.Apply(mc.classifier) output = appliedCls.MustView([]int64{-1, numChoices}, true) appliedDO.MustDrop() return output, hiddenStates, attentions, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (mod *backendModule) Forward(f *gatepb.Forward) error {\n\treturn mod.send(proto.Type(f.Typ), f)\n}", "func (m *Model) Forward(qkv attention.QKV) attention.Output {\n\tprojAtt := attention.QKV{\n\t\tQueries: m.Query.Forward(qkv.Queries...),\n\t\tKeys: m.Key.Forward(qkv.Keys...),\n\t\tValues: m.Value.For...
[ "0.69482577", "0.6621834", "0.6508942", "0.6488017", "0.6365582", "0.6333117", "0.61553794", "0.613421", "0.61081684", "0.6089787", "0.6033418", "0.6019624", "0.6017017", "0.59916246", "0.5969902", "0.59648085", "0.59556293", "0.5928323", "0.5927592", "0.5907717", "0.5900434"...
0.562365
41
NewRobertaForTokenClassification creates a new RobertaForTokenClassification model.
func NewRobertaForTokenClassification(p *nn.Path, config *bert.BertConfig) *RobertaForTokenClassification { roberta := bert.NewBertModel(p.Sub("roberta"), config) dropout := util.NewDropout(config.HiddenDropoutProb) numLabels := int64(len(config.Id2Label)) classifier := nn.NewLinear(p.Sub("classifier"), config.HiddenSize, numLabels, nn.DefaultLinearConfig()) return &RobertaForTokenClassification{ roberta: roberta, dropout: dropout, classifier: classifier, } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func NewRobertaForSequenceClassification(p *nn.Path, config *bert.BertConfig) *RobertaForSequenceClassification {\n\troberta := bert.NewBertModel(p.Sub(\"roberta\"), config)\n\tclassifier := NewRobertaClassificationHead(p.Sub(\"classifier\"), config)\n\n\treturn &RobertaForSequenceClassification{\n\t\troberta: ...
[ "0.61778885", "0.5740375", "0.5316356", "0.51405746", "0.5083584", "0.49698022", "0.49512434", "0.49267012", "0.48441488", "0.48201382", "0.4790092", "0.47371662", "0.46439788", "0.46233004", "0.4608998", "0.45439917", "0.4526471", "0.45263392", "0.4505784", "0.44961753", "0....
0.817786
0
Load loads model from file or model name. It also updates default configuration parameters if provided. This method implements `PretrainedModel` interface.
func (tc *RobertaForTokenClassification) Load(modelNameOrPath string, config interface{ pretrained.Config }, params map[string]interface{}, device gotch.Device) error { var urlOrFilename string // If modelName, infer to default configuration filename: if modelFile, ok := pretrained.RobertaModels[modelNameOrPath]; ok { urlOrFilename = modelFile } else { // Otherwise, just take the input urlOrFilename = modelNameOrPath } cachedFile, err := util.CachedPath(urlOrFilename) if err != nil { return err } vs := nn.NewVarStore(device) p := vs.Root() roberta := bert.NewBertModel(p.Sub("roberta"), config.(*bert.BertConfig)) dropout := util.NewDropout(config.(*bert.BertConfig).HiddenDropoutProb) numLabels := int64(len(config.(*bert.BertConfig).Id2Label)) classifier := nn.NewLinear(p.Sub("classifier"), config.(*bert.BertConfig).HiddenSize, numLabels, nn.DefaultLinearConfig()) tc.roberta = roberta tc.dropout = dropout tc.classifier = classifier err = vs.Load(cachedFile) if err != nil { return err } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Load(fileName string, src interface{}) error {\n\tfile, err := os.Open(fileName)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer file.Close()\n\tif err == nil {\n\t\tdecoder := gob.NewDecoder(file)\n\t\tif err = decoder.Decode(src); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\t// Restore parameters\n\tswitc...
[ "0.7085643", "0.69431984", "0.6932099", "0.6917992", "0.6769626", "0.6714895", "0.66320646", "0.65712345", "0.6427181", "0.63659275", "0.6345356", "0.621093", "0.6208571", "0.6141206", "0.61269116", "0.5931221", "0.59096044", "0.5825058", "0.5803748", "0.57919365", "0.5693847...
0.6648425
6
ForwardT forwards pass through the model.
func (tc *RobertaForTokenClassification) ForwardT(inputIds, mask, tokenTypeIds, positionIds, inputEmbeds *ts.Tensor, train bool) (output *ts.Tensor, hiddenStates, attentions []*ts.Tensor, err error) { hiddenState, _, hiddenStates, attentions, err := tc.roberta.ForwardT(inputIds, mask, tokenTypeIds, positionIds, inputEmbeds, ts.None, ts.None, train) if err != nil { return ts.None, nil, nil, err } appliedDO := hiddenState.ApplyT(tc.dropout, train) output = appliedDO.Apply(tc.classifier) appliedDO.MustDrop() return output, hiddenStates, attentions, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (mod *backendModule) Forward(f *gatepb.Forward) error {\n\treturn mod.send(proto.Type(f.Typ), f)\n}", "func (m *Model) Forward(qkv attention.QKV) attention.Output {\n\tprojAtt := attention.QKV{\n\t\tQueries: m.Query.Forward(qkv.Queries...),\n\t\tKeys: m.Key.Forward(qkv.Keys...),\n\t\tValues: m.Value.For...
[ "0.6948116", "0.6622131", "0.6508866", "0.64886606", "0.6365477", "0.63336223", "0.61528224", "0.6133815", "0.6107234", "0.60896546", "0.60325277", "0.60204875", "0.60162234", "0.59912664", "0.59696406", "0.5964232", "0.5954961", "0.59260416", "0.59076136", "0.5899988", "0.58...
0.59257644
18
NewRobertaQuestionAnswering creates a new RobertaForQuestionAnswering model.
func NewRobertaForQuestionAnswering(p *nn.Path, config *bert.BertConfig) *RobertaForQuestionAnswering { roberta := bert.NewBertModel(p.Sub("roberta"), config) numLabels := int64(2) qaOutputs := nn.NewLinear(p.Sub("qa_outputs"), config.HiddenSize, numLabels, nn.DefaultLinearConfig()) return &RobertaForQuestionAnswering{ roberta: roberta, qaOutputs: qaOutputs, } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func NewQuestion() {\n\tNumAnswers = make(map[string]int)\n\n\tsetRandomQuestion()\n\tvar NewQuestionMessage = fmt.Sprintf(\"A new question, you have %d seconds to answer it!\", TimeToAnswer)\n\tsendMessageToActiveChannels(NewQuestionMessage)\n\tQuestionTimer = time.NewTimer(time.Second * TimeToAnswer)\n\tgo func(...
[ "0.5639432", "0.56050396", "0.5415833", "0.5342135", "0.5241635", "0.5227963", "0.5099866", "0.4905231", "0.4904413", "0.47154996", "0.47037742", "0.46589753", "0.45973787", "0.45820704", "0.4568143", "0.45237657", "0.45206556", "0.4516852", "0.45102775", "0.45020384", "0.444...
0.71885735
0
Load loads model from file or model name. It also updates default configuration parameters if provided. This method implements `PretrainedModel` interface.
func (qa *RobertaForQuestionAnswering) Load(modelNameOrPath string, config interface{ pretrained.Config }, params map[string]interface{}, device gotch.Device) error { var urlOrFilename string // If modelName, infer to default configuration filename: if modelFile, ok := pretrained.RobertaModels[modelNameOrPath]; ok { urlOrFilename = modelFile } else { // Otherwise, just take the input urlOrFilename = modelNameOrPath } cachedFile, err := util.CachedPath(urlOrFilename) if err != nil { return err } vs := nn.NewVarStore(device) p := vs.Root() roberta := bert.NewBertModel(p.Sub("roberta"), config.(*bert.BertConfig)) numLabels := int64(2) qaOutputs := nn.NewLinear(p.Sub("qa_outputs"), config.(*bert.BertConfig).HiddenSize, numLabels, nn.DefaultLinearConfig()) qa.roberta = roberta qa.qaOutputs = qaOutputs err = vs.Load(cachedFile) if err != nil { return err } return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Load(fileName string, src interface{}) error {\n\tfile, err := os.Open(fileName)\n\tif err != nil {\n\t\treturn err\n\t}\n\tdefer file.Close()\n\tif err == nil {\n\t\tdecoder := gob.NewDecoder(file)\n\t\tif err = decoder.Decode(src); err != nil {\n\t\t\treturn err\n\t\t}\n\t}\n\t// Restore parameters\n\tswitc...
[ "0.70864344", "0.6942464", "0.69314396", "0.69175935", "0.6770165", "0.6715859", "0.6647983", "0.66319776", "0.64276546", "0.6366989", "0.63446957", "0.6211604", "0.620865", "0.6141415", "0.6127916", "0.59314406", "0.5909854", "0.58246374", "0.5804325", "0.57926095", "0.56939...
0.6570828
8
ForwadT forwards pass through the model.
func (qa *RobertaForQuestionAnswering) ForwardT(inputIds, mask, tokenTypeIds, positionIds, inputEmbeds *ts.Tensor, train bool) (startScores, endScores *ts.Tensor, hiddenStates, attentions []*ts.Tensor, err error) { hiddenState, _, hiddenStates, attentions, err := qa.roberta.ForwardT(inputIds, mask, tokenTypeIds, positionIds, inputEmbeds, ts.None, ts.None, train) if err != nil { return ts.None, ts.None, nil, nil, err } sequenceOutput := hiddenState.Apply(qa.qaOutputs) logits := sequenceOutput.MustSplit(1, -1, true) startScores = logits[0].MustSqueeze1(-1, false) endScores = logits[1].MustSqueeze1(-1, false) for _, x := range logits { x.MustDrop() } return startScores, endScores, hiddenStates, attentions, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (m *Model) Forward(qkv attention.QKV) attention.Output {\n\tprojAtt := attention.QKV{\n\t\tQueries: m.Query.Forward(qkv.Queries...),\n\t\tKeys: m.Key.Forward(qkv.Keys...),\n\t\tValues: m.Value.Forward(qkv.Values...),\n\t}\n\tattOutput, attWeights := attention.ScaledDotProductAttention(m.Graph(), projAtt, ...
[ "0.6578177", "0.65311825", "0.60598856", "0.60520715", "0.59716046", "0.5958758", "0.59383243", "0.5921928", "0.5888444", "0.58074296", "0.57394636", "0.5717503", "0.5681384", "0.56571454", "0.5655735", "0.56228024", "0.5592294", "0.5528087", "0.5528087", "0.5523619", "0.5522...
0.0
-1
NotifyWithHipchat notify with hipchat
func (a *App) NotifyWithHipchat(body string, statusCode int) { if a.HipchatRoom == "" || a.HipchatToken == "" { return } url := "https://api.hipchat.com/v2/room/" + a.HipchatRoom + "/notification?auth_token=" + a.HipchatToken color := "red" if statusCode == 200 { color = "green" } input, err := json.Marshal(&hipchatRequest{Notify: true, MessageFormat: "text", Color: color, Message: "@all\n" + body}) if err != nil { log.Print(err) return } resp, err := a.HealthcheckNotifier.HipChatClient.Post(url, "application/json", bytes.NewBuffer(input)) if err != nil { log.Print(err) return } log.Printf("%s HipChat: %d", a.Name, resp.StatusCode) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func Notify(client *hipchat.Client, cfg *config.Config) error {\n\treq := &hipchat.NotificationRequest{\n\t\tMessage: cfg.FormattedMessage(),\n\t\tNotify: config.ToBool(cfg.Notify),\n\t\tColor: cfg.Color,\n\t}\n\n\tfmt.Printf(\"%+v\\n\", req)\n\n\t_, err := client.Room.Notification(cfg.Room, req)\n\tfmt.Printf(...
[ "0.7070116", "0.61992604", "0.60911196", "0.60856074", "0.60541904", "0.58165413", "0.57948136", "0.57418495", "0.5735038", "0.5724112", "0.5648719", "0.56233865", "0.5618996", "0.56145555", "0.5529747", "0.5523478", "0.5523463", "0.5488026", "0.5483475", "0.54820335", "0.547...
0.7933373
0
NotifyWithMail notify with mail
func (a *App) NotifyWithMail(body string, statusCode int) { from := a.HealthcheckNotifier.MailAddressFrom server := a.HealthcheckNotifier.SMTPServer to := a.MailAddressToDown subject := "[DOWN] " + a.Name if statusCode == 200 { to = a.MailAddressToUp subject = "[UP] " + a.Name } if server == "" || from == "" || len(to) == 0 { return } msg := "From: " + from + "\r\n" + "To: " + toLine(to) + "\r\n" + "Subject: " + subject + "\r\n\r\n" + body + "\r\n" err := smtp.SendMail(server, nil, from, to, []byte(msg)) if err != nil { log.Print(err) return } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (n *NotifyMail) Notify(mail M.Mail) {\n\tif err := n.store.Insert(&mail); err != nil {\n\t\tn.errChan.In() <- err\n\t\treturn\n\t}\n\tn.sendChan <- mail\n}", "func (n *SMTPNotifier) Notify(work *model.WorkRequest) error {\n\t// Get email body\n\tpayload := work.GetLogContent(n.PrefixFilter)\n\tif strings.Tr...
[ "0.7391516", "0.71107477", "0.705149", "0.6940533", "0.69082105", "0.68004864", "0.67699456", "0.6758246", "0.67541605", "0.67424625", "0.6731583", "0.6714901", "0.6697611", "0.66768765", "0.659905", "0.6598201", "0.6596002", "0.65796757", "0.6521858", "0.64662987", "0.646629...
0.746237
0
Run executes the agent using the given config and backend. It uses SQS for its internal queues. When the function finishes it returns an exit code of 0 if the agent terminated gracefully, either by receiving a TERM signal or because it passed more time than configured without reading a message.
func Run(cfg config.Config, store storage.Store, back backend.Backend, logger log.Logger) int { // Build queue writer. qw, err := sqs.NewWriter(cfg.SQSWriter.ARN, cfg.SQSWriter.Endpoint, logger) if err != nil { logger.Errorf("error creating SQS writer: %+v", err) return 1 } // Build queue reader. var maxTimeNoMsg *time.Duration if cfg.Agent.MaxNoMsgsInterval > 0 { t := time.Duration(cfg.Agent.MaxNoMsgsInterval) * time.Second maxTimeNoMsg = &t } // A nil queue.MessageProcessor is passed as argument because // RunWithQueues will set it before starting reading messages. qr, err := sqs.NewReader(logger, cfg.SQSReader, maxTimeNoMsg, nil) if err != nil { logger.Errorf("error creating SQS reader: %+v", err) return 1 } // Run agent with SQS queues. return RunWithQueues(cfg, store, back, qw, qr, logger) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func RunLoop(regInfo *RegistrationInfo, regInfoUpdatesCh <-chan string, eventsChannel chan<- *Event, regChannel chan<- time.Time) {\n\n\tlogging.Info(\"Initializing SQS client.\", nil)\n\tsvc := getSQSClient(regInfo)\n\tqueue := regInfo.ActionQueueEndpoint\n\n\tshouldLogError := true\n\tnumFailures := 0\n\tfor {\n...
[ "0.6085722", "0.59596044", "0.58610517", "0.5824352", "0.57012486", "0.56319433", "0.5514261", "0.5386275", "0.534189", "0.53253037", "0.5299745", "0.5179622", "0.51693946", "0.5121386", "0.51029456", "0.50969225", "0.50817096", "0.5079898", "0.5044915", "0.49995443", "0.4999...
0.78693676
0
RunWithQueues is like [Run] but accepts custom queue implementations. The fields [config.Config.SQSReader] and [config.Config.SQSWriter] must be zero.
func RunWithQueues(cfg config.Config, store storage.Store, back backend.Backend, statesQueue queue.Writer, jobsQueue AgentQueueReader, logger log.Logger) int { // Build state updater. stateUpdater := stateupdater.New(statesQueue) updater := struct { *stateupdater.Updater storage.Store }{stateUpdater, store} // Build job runner. jrunner, err := newJobRunner(cfg, back, updater, logger) if err != nil { logger.Errorf("error creating job runner: %+v", err) return 1 } // Set queue's message processor. jobsQueue.SetMessageProcessor(jrunner) // Run agent. if err := run(cfg, jrunner, updater, jobsQueue, logger); err != nil { logger.Errorf("error running agent: %+v", err) return 1 } return 0 }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (s *SQSServer) ListenAndServeQueues(queues ...QueueConf) error {\n\tif len(queues) == 0 {\n\t\treturn fmt.Errorf(\"Must specify at least one SQS queue to poll\")\n\t}\n\tpollctx, pollcancel := context.WithCancel(context.Background())\n\ttaskctx, taskcancel := context.WithCancel(context.Background())\n\ts.stop...
[ "0.61222285", "0.60373265", "0.5991201", "0.598443", "0.5974285", "0.5967612", "0.578413", "0.57465136", "0.56890184", "0.56563723", "0.5654841", "0.56327754", "0.5617098", "0.5601202", "0.55928594", "0.5576123", "0.5474809", "0.5413749", "0.5391861", "0.5387072", "0.5382978"...
0.65749663
0
newJobRunner returns a new job runner with the provided agent configuration.
func newJobRunner(cfg config.Config, back backend.Backend, updater jobrunner.CheckStateUpdater, logger log.Logger) (*jobrunner.Runner, error) { // Build the aborted checks component that will be used to know if a check // has been aborted or not before starting to execute it. var ( err error abortedChecks jobrunner.AbortedChecks ) if cfg.Stream.QueryEndpoint == "" { logger.Infof("stream query_endpoint is empty, the agent will not check for aborted checks") abortedChecks = &aborted.None{} } else { re := retryer.NewRetryer(cfg.Stream.Retries, cfg.Stream.RetryInterval, logger) abortedChecks, err = aborted.New(logger, cfg.Stream.QueryEndpoint, re) if err != nil { return nil, fmt.Errorf("create aborted checks: %w", err) } } // Build job runner. runnerCfg := jobrunner.RunnerConfig{ MaxTokens: cfg.Agent.ConcurrentJobs, DefaultTimeout: cfg.Agent.Timeout, MaxProcessMessageTimes: cfg.Agent.MaxProcessMessageTimes, } jrunner := jobrunner.New(logger, back, updater, abortedChecks, runnerCfg) return jrunner, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func NewRunner() *Runner {\n\treturn &Runner{\n\t\tl: logrus.WithField(\"component\", \"jobs-runner\"),\n\t\tjobs: make(chan Job, jobsBufferSize),\n\t\tjobsCancel: make(map[string]context.CancelFunc),\n\t\tmessages: make(chan *channel.AgentResponse),\n\t}\n}", "func newJobRunner(logger *persist....
[ "0.7033698", "0.62534994", "0.60575217", "0.5997101", "0.59296507", "0.5779144", "0.57462084", "0.57345396", "0.5700003", "0.5682094", "0.5681728", "0.5668359", "0.56301194", "0.56086624", "0.55953586", "0.55779314", "0.5573774", "0.5547928", "0.5511023", "0.55041426", "0.549...
0.7297188
0
run runs the agent.
func run(cfg config.Config, jrunner *jobrunner.Runner, updater api.CheckStateUpdater, jobsQueue queue.Reader, logger log.Logger) error { // Build metrics component. metrics := metrics.NewMetrics(logger, cfg.DataDog, jrunner) // Create a context to orchestrate the shutdown of the // different components. ctx, cancel := context.WithCancel(context.Background()) defer cancel() // Initialize check cancel stream if an endpoint is provided. var ( err error streamDone <-chan error ) if cfg.Stream.Endpoint == "" { logger.Infof("check cancel stream disabled") } else { re := retryer.NewRetryer(cfg.Stream.Retries, cfg.Stream.RetryInterval, logger) // metrics is passed as a stream message processor to // abort checks. stream := stream.New(logger, metrics, re, cfg.Stream.Endpoint) streamDone, err = stream.ListenAndProcess(ctx) if err != nil { return fmt.Errorf("stream start: %w", err) } } // Build stats components that exposes information about the // agent. stats := struct { *jobrunner.Runner queue.Reader }{jrunner, jobsQueue} // Start agent's HTTP API. api := api.New(logger, updater, stats) router := httprouter.New() httpapi.NewREST(logger, api, router) srv := http.Server{Handler: router} httpDone := make(chan error) go func() { var err error if cfg.API.Listener != nil { err = srv.Serve(cfg.API.Listener) } else { srv.Addr = cfg.API.Port err = srv.ListenAndServe() } httpDone <- err close(httpDone) }() // Wait while the agent runs. qrdone := jobsQueue.StartReading(ctx) metricsDone := metrics.StartPolling(ctx) var agentAddr string if cfg.API.Listener != nil { agentAddr = cfg.API.Listener.Addr().String() } else { agentAddr = cfg.API.Port } logger.Infof("agent running on address %s", agentAddr) sig := make(chan os.Signal, 1) signal.Notify(sig, syscall.SIGINT, syscall.SIGTERM) select { case <-sig: // Signal SQS queue reader to stop reading messages // from the queue. logger.Infof("SIG received, stopping agent") cancel() case err := <-httpDone: logger.Errorf("error running agent: %+v", err) cancel() case err := <-qrdone: cancel() if err != nil { if !errors.Is(err, queue.ErrMaxTimeNoRead) && !errors.Is(err, context.Canceled) { return fmt.Errorf("agent run: %w", err) } } } // Wait for all pending jobs to finish. logger.Infof("waiting for checks to finish") err = <-qrdone if err != nil && !errors.Is(err, context.Canceled) { logger.Errorf("error waiting for checks to finish %+v", err) } // Wait for metrics to stop polling. logger.Debugf("waiting for metrics to stop") <-metricsDone // Stop listening API HTTP requests. logger.Debugf("stop listening API requests") err = srv.Shutdown(context.Background()) if err != nil { return fmt.Errorf("http server shutdown: %w", err) } // Wait for HTTP API to shutdown. err = <-httpDone if err != nil && !errors.Is(err, http.ErrServerClosed) { return fmt.Errorf("http server shutdown wait: %w", err) } // Wait for stream to finish. if streamDone != nil { logger.Debugf("waiting for stream to stop") err = <-streamDone if err != nil && !errors.Is(err, context.Canceled) { return fmt.Errorf("stream stop wait: %w", err) } } logger.Infof("agent finished gracefully") return nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func run(ctx context.Context, cfg *config.Config, configFilepath string) {\n\tvar cancel context.CancelFunc\n\tctx, cancel = context.WithCancel(ctx)\n\n\t// Actions runner is currently created inside client.New.\n\t// It should be created separately.\n\t// TODO https://jira.percona.com/browse/PMM-7206\n\n\tsupervi...
[ "0.6973518", "0.69351375", "0.68651646", "0.6625114", "0.65953296", "0.6519422", "0.65191615", "0.6410184", "0.63794345", "0.63402677", "0.6325699", "0.6325376", "0.6293736", "0.6268254", "0.61926425", "0.61826265", "0.6170785", "0.61572945", "0.61572236", "0.61414754", "0.61...
0.6532814
5
NewStorageConfig loads storage configuration for a S3 FileSystem service in the given namespace. If no storage config is found in that namespace, the koaljasystem namespace is used. +kubebuilder:rbac:groups=,resources=configmaps,verbs=get;list;watch
func NewStorageConfig(ctx context.Context, c client.Reader, ns string) (*StorageConfig, error) { var configMap corev1.ConfigMap key := client.ObjectKey{ Name: constants.ConfigMapS3Storage, Namespace: ns, } if err := c.Get(ctx, key, &configMap); errors.IsNotFound(err) { // Try koalja-system namespace key.Namespace = constants.NamespaceKoaljaSystem if err := c.Get(ctx, key, &configMap); err != nil { return nil, maskAny(err) } } else if err != nil { return nil, maskAny(err) } // Parse config map sc, err := newStorageConfigFromConfigMap(ctx, &configMap, c, ns) if err != nil { return nil, maskAny(err) } return sc, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func newStorageConfigFromConfigMap(ctx context.Context, configMap *corev1.ConfigMap, c client.Reader, ns string) (*StorageConfig, error) {\n\tvar sc StorageConfig\n\thasDefault := false\n\tfor k, v := range configMap.Data {\n\t\tvar bc BucketConfig\n\t\terr := yaml.Unmarshal([]byte(v), &bc)\n\t\tif err != nil {\n\...
[ "0.63567257", "0.6230881", "0.60596675", "0.5861226", "0.5835127", "0.57842094", "0.5706375", "0.5677003", "0.56734043", "0.5671543", "0.56431067", "0.56058097", "0.55307925", "0.5496921", "0.5466971", "0.54075783", "0.5371226", "0.5356307", "0.5315504", "0.527059", "0.525538...
0.779615
0
Equals returns true if the given StorageConfig's are the same.
func (sc StorageConfig) Equals(other StorageConfig) bool { return reflect.DeepEqual(sc, other) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (c Config) Equal(rhs Config) bool {\n\n\tif len(c.Addresses) != len(rhs.Addresses) {\n\t\treturn false\n\t}\n\tfor i := range c.Addresses {\n\t\tif c.Addresses[i] != rhs.Addresses[i] {\n\t\t\treturn false\n\t\t}\n\t}\n\tif len(c.Middlewares) != len(rhs.Middlewares) {\n\t\treturn false\n\t}\n\tfor i := range c...
[ "0.7215028", "0.7100559", "0.70844483", "0.6989873", "0.691176", "0.6859806", "0.6853824", "0.6753042", "0.6624084", "0.65836465", "0.64694196", "0.6438307", "0.6187856", "0.6136497", "0.607758", "0.60505444", "0.5996127", "0.5987178", "0.59648985", "0.59618485", "0.59520084"...
0.8606742
0
newStorageConfigFromConfigMap creates a StorageConfig from the supplied ConfigMap
func newStorageConfigFromConfigMap(ctx context.Context, configMap *corev1.ConfigMap, c client.Reader, ns string) (*StorageConfig, error) { var sc StorageConfig hasDefault := false for k, v := range configMap.Data { var bc BucketConfig err := yaml.Unmarshal([]byte(v), &bc) if err != nil { return nil, err } bc.fixEndpoint() bc.isDefault = k == "default" // Try loading the secret var secret corev1.Secret key := client.ObjectKey{ Name: bc.SecretName, Namespace: configMap.GetNamespace(), } if err := c.Get(ctx, key, &secret); err != nil { return nil, maskAny(err) } if raw, found := secret.Data[constants.SecretKeyS3AccessKey]; found { bc.accessKey = string(raw) } else { return nil, maskAny(fmt.Errorf("Config %#v refers to Secret '%s' that has no '%s' field", configMap.Data, bc.SecretName, constants.SecretKeyS3AccessKey)) } if raw, found := secret.Data[constants.SecretKeyS3SecretKey]; found { bc.secretKey = string(raw) } else { return nil, maskAny(fmt.Errorf("Config %#v refers to Secret '%s' that has no '%s' field", configMap.Data, bc.SecretName, constants.SecretKeyS3SecretKey)) } // Add to config hasDefault = hasDefault || bc.isDefault sc.Buckets = append(sc.Buckets, bc) } if !hasDefault { return nil, maskAny(fmt.Errorf("Config %#v must have a default bucket", configMap.Data)) } sort.Slice(sc.Buckets, func(i, j int) bool { a, b := sc.Buckets[i], sc.Buckets[j] if a.isDefault && !b.isDefault { return true } if !a.isDefault && b.isDefault { return false } return strings.Compare(a.Name, b.Name) < 0 }) return &sc, nil }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func newConfigFromMap(cfgMap map[string]string) (*configstore, error) {\n\tdata, ok := cfgMap[configdatakey]\n\tif !ok {\n\t\treturn nil, fmt.Errorf(\"config data not present\")\n\t}\n\treturn &configstore{data}, nil\n}", "func NewStorageConfig(ctx context.Context, c client.Reader, ns string) (*StorageConfig, er...
[ "0.6375633", "0.6281944", "0.6112564", "0.6053278", "0.5994375", "0.5984264", "0.58559567", "0.583777", "0.58275783", "0.58275783", "0.58275783", "0.5785979", "0.5637561", "0.560425", "0.5552387", "0.5518179", "0.5495444", "0.54701823", "0.5440394", "0.5195264", "0.51931673",...
0.7016705
0
fixEndpoint removes the scheme from the endpoint
func (bc *BucketConfig) fixEndpoint() { if u, err := url.Parse(bc.Endpoint); err == nil { bc.Endpoint = u.Host if strings.ToLower(u.Scheme) == "https" { bc.Secure = true } } }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func normEndpoint(url string) string {\n\tif len(url) > 0 {\n\t\tif url[len(url)-1] != '/' {\n\t\t\turl += \"/\"\n\t\t}\n\t\treturn url\n\t}\n\tpanic(\"az: invalid endpoint\")\n}", "func stripProtocol(ref string) string {\n\tif strings.HasPrefix(ref, \"/ipfs/\") {\n\t\treturn ref[len(\"/ipfs/\"):]\n\t}\n\treturn...
[ "0.6320563", "0.62610084", "0.61440116", "0.56868225", "0.5683344", "0.560318", "0.554496", "0.5536776", "0.55366045", "0.55199313", "0.55152476", "0.550689", "0.55057013", "0.5503566", "0.5499093", "0.5494576", "0.54539376", "0.5447968", "0.5438922", "0.5434849", "0.5428037"...
0.7643794
0
hash returns a lowercase hash that uniquely identifies the bucket name & endpoint.
func (bc BucketConfig) hash() string { source := strings.ToLower(bc.Name + "/" + bc.Endpoint) hash := sha1.Sum([]byte(source)) return strings.ToLower(hex.EncodeToString(hash[0:6])) }
{ "objective": { "self": [], "paired": [], "triplet": [ [ "query", "document", "negatives" ] ] } }
[ "func (b *Bucket) Hash() string {\n\treturn b.descriptor.Hash()\n}", "func (authSvc *AuthService) computeHash(s string) hash.Hash {\n\t\n\tvar hash hash.Hash = sha256.New()\n\tvar bytes []byte = []byte(s)\n\thash.Write(authSvc.secretSalt)\n\thash.Write(bytes)\n\treturn hash\n}", "func (obj *bucket) Hash() hash....
[ "0.69260216", "0.6708413", "0.649422", "0.6427349", "0.63813674", "0.6372707", "0.63630694", "0.6348366", "0.6326957", "0.6312707", "0.6312707", "0.6312707", "0.63097775", "0.62950593", "0.6239183", "0.6225217", "0.6214106", "0.6207067", "0.6190727", "0.61862934", "0.6180711"...
0.8359008
0