text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestUnmarshalInteger(t *testing.T) { input := []byte(`{ "N": "123"}`) var av DynamoDBAttributeValue err := json.Unmarshal(input, &av) assert.Nil(t, err) var i int64 i, err = av.Integer() assert.Nil(t, err) assert.Equal(t, int64(123), i) }
explode_data.jsonl/61699
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 1806, 27121, 3486, 1155, 353, 8840, 836, 8, 341, 22427, 1669, 3056, 3782, 5809, 90, 330, 45, 788, 330, 16, 17, 18, 1, 5541, 692, 2405, 1822, 71813, 3506, 78554, 198, 9859, 1669, 2951, 38097, 5384, 11, 609, 402, 692, 6948...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestU32(t *testing.T) { w := newbw(nil) w.WriteUint32(uint32(4)) b := w.Bytes() if b == nil { fmt.Println("Bwriter failed.") t.FailNow() } r := newbr(b) n, ok := r.NextU32() if !ok { fmt.Println("Breader failed.") t.FailNow() } else if n != 4 { fmt.Println("expected 4, got", n) t.FailNow() } }
explode_data.jsonl/22546
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 161 }
[ 2830, 3393, 52, 18, 17, 1155, 353, 8840, 836, 8, 341, 6692, 1669, 501, 39824, 27907, 340, 6692, 4073, 21570, 18, 17, 8488, 18, 17, 7, 19, 4390, 2233, 1669, 289, 36868, 741, 743, 293, 621, 2092, 341, 197, 11009, 12419, 445, 33, 181...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestServiceCreateInvalidDataYAML(t *testing.T) { tempDir, err := ioutil.TempDir("", "kn-file") defer os.RemoveAll(tempDir) assert.NilError(t, err) tempFile := filepath.Join(tempDir, "invalid.yaml") // Remove dash invalidData := strings.Replace(serviceYAML, "- image", "image", 1) err = ioutil.WriteFile(tempFile, []byte(invalidData), os.FileMode(0666)) assert.NilError(t, err) _, _, _, err = fakeServiceCreate([]string{"service", "create", "foo", "--filename", tempFile}, false) assert.Assert(t, util.ContainsAll(err.Error(), "mapping", "values", "not", "allowed")) // Remove name key invalidData = strings.Replace(serviceYAML, "name:", "", 1) err = ioutil.WriteFile(tempFile, []byte(invalidData), os.FileMode(0666)) assert.NilError(t, err) _, _, _, err = fakeServiceCreate([]string{"service", "create", "foo", "--filename", tempFile}, false) assert.Assert(t, util.ContainsAll(err.Error(), "cannot", "unmarshal", "Go", "struct", "Service.metadata")) // Remove opening square bracket invalidData = strings.Replace(serviceYAML, "env", "\tenv", 1) err = ioutil.WriteFile(tempFile, []byte(invalidData), os.FileMode(0666)) assert.NilError(t, err) _, _, _, err = fakeServiceCreate([]string{"service", "create", "foo", "--filename", tempFile}, false) assert.Assert(t, util.ContainsAll(err.Error(), "found", "tab", "violates", "indentation")) }
explode_data.jsonl/42464
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 505 }
[ 2830, 3393, 1860, 4021, 7928, 1043, 56, 31102, 1155, 353, 8840, 836, 8, 341, 16280, 6184, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 19095, 14203, 1138, 16867, 2643, 84427, 9758, 6184, 340, 6948, 59678, 1454, 1155, 11, 1848, 340, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestResource_Delete(t *testing.T) { type fields struct { update *update query *query insert *insert remove *remove } type args struct { allOrNone bool records []string } tests := []struct { name string fields fields args args want []DeleteValue wantErr bool }{ { name: "success", args: args{ records: []string{"id1", "id2", "id3"}, }, fields: fields{ remove: &remove{ session: &mockSessionFormatter{ url: "something.com", client: mockHTTPClient(func(req *http.Request) *http.Response { if strings.HasPrefix(req.URL.String(), "something.com/composite/sobjects") == false { return &http.Response{ StatusCode: 500, Status: "Bad URL: " + req.URL.String(), Body: ioutil.NopCloser(strings.NewReader("resp")), Header: make(http.Header), } } if req.Method != http.MethodDelete { return &http.Response{ StatusCode: 500, Status: "Bad Method", Body: ioutil.NopCloser(strings.NewReader("resp")), Header: make(http.Header), } } values := req.URL.Query() if _, ok := values["allOrNone"]; ok == false { return &http.Response{ StatusCode: 500, Status: "allOrNone", Body: ioutil.NopCloser(strings.NewReader("resp")), Header: make(http.Header), } } if _, ok := values["ids"]; ok == false { return &http.Response{ StatusCode: 500, Status: "ids", Body: ioutil.NopCloser(strings.NewReader("resp")), Header: make(http.Header), } } resp := ` [ { "id" : "001RM000003oLrfYAE", "success" : true, "errors" : [ ] }, { "success" : false, "errors" : [ { "statusCode" : "MALFORMED_ID", "message" : "malformed id 001RM000003oLrB000", "fields" : [ ] } ] } ]` return &http.Response{ StatusCode: http.StatusOK, Status: "Some Status", Body: ioutil.NopCloser(strings.NewReader(resp)), Header: make(http.Header), } }), }, }, }, want: []DeleteValue{ { sobject.InsertValue{ Success: true, ID: "001RM000003oLrfYAE", Errors: make([]sfdc.Error, 0), }, }, { sobject.InsertValue{ Success: false, Errors: []sfdc.Error{ { ErrorCode: "MALFORMED_ID", Message: "malformed id 001RM000003oLrB000", Fields: make([]string, 0), }, }, }, }, }, wantErr: false, }, { name: "not initialized", fields: fields{}, args: args{}, wantErr: true, }, { name: "no records", fields: fields{ remove: &remove{}, }, args: args{}, wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { r := &Resource{ update: tt.fields.update, query: tt.fields.query, insert: tt.fields.insert, remove: tt.fields.remove, } got, err := r.Delete(tt.args.allOrNone, tt.args.records) if (err != nil) != tt.wantErr { t.Errorf("Resource.Delete() error = %v, wantErr %v", err, tt.wantErr) return } if !reflect.DeepEqual(got, tt.want) { t.Errorf("Resource.Delete() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/64169
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1931 }
[ 2830, 3393, 4783, 57418, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 27175, 353, 2386, 198, 197, 27274, 220, 353, 1631, 198, 197, 59847, 353, 4208, 198, 197, 47233, 353, 5399, 198, 197, 532, 13158, 2827, 2036, 341, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestBatchQueryInfo(t *testing.T) { session := createSession(t) defer session.Close() if session.cfg.ProtoVersion == 1 { t.Skip("atomic batches not supported. Please use Cassandra >= 2.0") } if err := createTable(session, "CREATE TABLE gocql_test.batch_query_info (id int, cluster int, value text, PRIMARY KEY (id, cluster))"); err != nil { t.Fatalf("failed to create table with error '%v'", err) } write := func(q *QueryInfo) ([]interface{}, error) { values := make([]interface{}, 3) values[0] = 4000 values[1] = 5000 values[2] = "bar" return values, nil } batch := session.NewBatch(LoggedBatch) batch.Bind("INSERT INTO batch_query_info (id, cluster, value) VALUES (?, ?,?)", write) if err := session.ExecuteBatch(batch); err != nil { t.Fatalf("batch insert into batch_query_info failed, err '%v'", err) } read := func(q *QueryInfo) ([]interface{}, error) { values := make([]interface{}, 2) values[0] = 4000 values[1] = 5000 return values, nil } qry := session.Bind("SELECT id, cluster, value FROM batch_query_info WHERE id = ? and cluster = ?", read) iter := qry.Iter() var id, cluster int var value string iter.Scan(&id, &cluster, &value) if err := iter.Close(); err != nil { t.Fatalf("query with batch_query_info info failed, err '%v'", err) } if value != "bar" { t.Fatalf("Expected value %s, but got %s", "bar", value) } }
explode_data.jsonl/11156
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 511 }
[ 2830, 3393, 21074, 2859, 1731, 1155, 353, 8840, 836, 8, 341, 25054, 1669, 1855, 5283, 1155, 340, 16867, 3797, 10421, 2822, 743, 3797, 30481, 7763, 983, 5637, 621, 220, 16, 341, 197, 3244, 57776, 445, 6618, 44792, 537, 7248, 13, 5209, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultiError_Append(t *testing.T) { tests := []multiErrorTest{ { name: "append error", setup: func() *MultiError { return NewMultiError() }, test: func(t *testing.T, multiError *MultiError) { assert.Equal(t, 0, len(multiError.Errors())) multiError.Append(errors.New("something broke :(")) assert.Equal(t, 1, len(multiError.Errors())) assert.Equal(t, "something broke :(", multiError.Error()) }, }, { name: "append nil error", setup: func() *MultiError { return NewMultiError() }, test: func(t *testing.T, multiError *MultiError) { assert.Equal(t, 0, multiError.Len()) multiError.Append(error(nil)) assert.Equal(t, 0, multiError.Len()) }, }, } runMultiErrorTestTable(t, tests) }
explode_data.jsonl/79126
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 325 }
[ 2830, 3393, 20358, 1454, 36117, 408, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 26268, 1454, 2271, 515, 197, 197, 515, 298, 11609, 25, 330, 5090, 1465, 756, 298, 84571, 25, 2915, 368, 353, 20358, 1454, 341, 571, 853, 1532, 20358...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestZeroScaledWindowReceive(t *testing.T) { // This test ensures that the endpoint sends a non-zero window size // advertisement when the scaled window transitions from 0 to non-zero, // but the actual window (not scaled) hasn't gotten to zero. c := context.New(t, defaultMTU) defer c.Cleanup() // Set the buffer size such that a window scale of 5 will be used. const bufSz = 65535 * 10 const ws = uint32(5) c.CreateConnectedWithRawOptions(context.TestInitialSequenceNumber, 30000, bufSz, []byte{ header.TCPOptionWS, 3, 0, header.TCPOptionNOP, }) // Write chunks of 50000 bytes. remain := 0 sent := 0 data := make([]byte, 50000) iss := seqnum.Value(context.TestInitialSequenceNumber).Add(1) // Keep writing till the window drops below len(data). for { c.SendPacket(data, &context.Headers{ SrcPort: context.TestPort, DstPort: c.Port, Flags: header.TCPFlagAck, SeqNum: iss.Add(seqnum.Size(sent)), AckNum: c.IRS.Add(1), RcvWnd: 30000, }) sent += len(data) pkt := c.GetPacket() checker.IPv4(t, pkt, checker.PayloadLen(header.TCPMinimumSize), checker.TCP( checker.DstPort(context.TestPort), checker.TCPSeqNum(uint32(c.IRS)+1), checker.TCPAckNum(uint32(iss)+uint32(sent)), checker.TCPFlags(header.TCPFlagAck), ), ) // Don't reduce window to zero here. if wnd := int(header.TCP(header.IPv4(pkt).Payload()).WindowSize()); wnd<<ws < len(data) { remain = wnd << ws break } } // Make the window non-zero, but the scaled window zero. for remain >= 16 { data = data[:remain-15] c.SendPacket(data, &context.Headers{ SrcPort: context.TestPort, DstPort: c.Port, Flags: header.TCPFlagAck, SeqNum: iss.Add(seqnum.Size(sent)), AckNum: c.IRS.Add(1), RcvWnd: 30000, }) sent += len(data) pkt := c.GetPacket() checker.IPv4(t, pkt, checker.PayloadLen(header.TCPMinimumSize), checker.TCP( checker.DstPort(context.TestPort), checker.TCPSeqNum(uint32(c.IRS)+1), checker.TCPAckNum(uint32(iss)+uint32(sent)), checker.TCPFlags(header.TCPFlagAck), ), ) // Since the receive buffer is split between window advertisement and // application data buffer the window does not always reflect the space // available and actual space available can be a bit more than what is // advertised in the window. wnd := int(header.TCP(header.IPv4(pkt).Payload()).WindowSize()) if wnd == 0 { break } remain = wnd << ws } // Read at least 2MSS of data. An ack should be sent in response to that. // Since buffer space is now split in half between window and application // data we need to read more than 1 MSS(65536) of data for a non-zero window // update to be sent. For 1MSS worth of window to be available we need to // read at least 128KB. Since our segments above were 50KB each it means // we need to read at 3 packets. w := tcpip.LimitedWriter{ W: ioutil.Discard, N: defaultMTU * 2, } for w.N != 0 { res, err := c.EP.Read(&w, tcpip.ReadOptions{}) t.Logf("err=%v res=%#v", err, res) if err != nil { t.Fatalf("Read failed: %s", err) } } checker.IPv4(t, c.GetPacket(), checker.PayloadLen(header.TCPMinimumSize), checker.TCP( checker.DstPort(context.TestPort), checker.TCPSeqNum(uint32(c.IRS)+1), checker.TCPAckNum(uint32(iss)+uint32(sent)), checker.TCPWindowGreaterThanEq(uint16(defaultMTU>>ws)), checker.TCPFlags(header.TCPFlagAck), ), ) }
explode_data.jsonl/75962
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1415 }
[ 2830, 3393, 17999, 94201, 4267, 14742, 1155, 353, 8840, 836, 8, 341, 197, 322, 1096, 1273, 25351, 429, 279, 14887, 21308, 264, 2477, 36929, 3241, 1379, 198, 197, 322, 32689, 979, 279, 30690, 3241, 33592, 504, 220, 15, 311, 2477, 36929, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestLock_MonitorRetry(t *testing.T) { t.Parallel() raw, s := makeClient(t) defer s.Stop() // Set up a server that always responds with 500 errors. failer := func(w http.ResponseWriter, req *http.Request) { w.WriteHeader(500) } outage := httptest.NewServer(http.HandlerFunc(failer)) defer outage.Close() // Set up a reverse proxy that will send some requests to the // 500 server and pass everything else through to the real Consul // server. var mutex sync.Mutex errors := 0 director := func(req *http.Request) { mutex.Lock() defer mutex.Unlock() req.URL.Scheme = "http" if errors > 0 && req.Method == "GET" && strings.Contains(req.URL.Path, "/v1/kv/test/lock") { req.URL.Host = outage.URL[7:] // Strip off "http://". errors-- } else { req.URL.Host = raw.config.Address } } proxy := httptest.NewServer(&httputil.ReverseProxy{Director: director}) defer proxy.Close() // Make another client that points at the proxy instead of the real // Consul server. config := raw.config config.Address = proxy.URL[7:] // Strip off "http://". c, err := NewClient(&config) if err != nil { t.Fatalf("err: %v", err) } // Set up a lock with retries enabled. opts := &LockOptions{ Key: "test/lock", SessionTTL: "60s", MonitorRetries: 3, } lock, err := c.LockOpts(opts) if err != nil { t.Fatalf("err: %v", err) } // Make sure the default got set. if lock.opts.MonitorRetryTime != DefaultMonitorRetryTime { t.Fatalf("bad: %d", lock.opts.MonitorRetryTime) } // Now set a custom time for the test. opts.MonitorRetryTime = 250 * time.Millisecond lock, err = c.LockOpts(opts) if err != nil { t.Fatalf("err: %v", err) } if lock.opts.MonitorRetryTime != 250*time.Millisecond { t.Fatalf("bad: %d", lock.opts.MonitorRetryTime) } // Should get the lock. leaderCh, err := lock.Lock(nil) if err != nil { t.Fatalf("err: %v", err) } if leaderCh == nil { t.Fatalf("not leader") } // Poke the key using the raw client to force the monitor to wake up // and check the lock again. This time we will return errors for some // of the responses. mutex.Lock() errors = 2 mutex.Unlock() pair, _, err := raw.KV().Get("test/lock", &QueryOptions{}) if err != nil { t.Fatalf("err: %v", err) } if _, err := raw.KV().Put(pair, &WriteOptions{}); err != nil { t.Fatalf("err: %v", err) } time.Sleep(5 * opts.MonitorRetryTime) // Should still be the leader. select { case <-leaderCh: t.Fatalf("should be leader") default: } // Now return an overwhelming number of errors. mutex.Lock() errors = 10 mutex.Unlock() if _, err := raw.KV().Put(pair, &WriteOptions{}); err != nil { t.Fatalf("err: %v", err) } time.Sleep(5 * opts.MonitorRetryTime) // Should lose leadership. select { case <-leaderCh: case <-time.After(time.Second): t.Fatalf("should not be leader") } }
explode_data.jsonl/27629
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1117 }
[ 2830, 3393, 11989, 1245, 30314, 51560, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 76559, 11, 274, 1669, 1281, 2959, 1155, 340, 16867, 274, 30213, 2822, 197, 322, 2573, 705, 264, 3538, 429, 2677, 30580, 448, 220, 20, 15, 15,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateContainerFromTask(t *testing.T) { assert := assert.New(t) require := require.New(t) assert.NoError(db.ClearCollections(task.Collection, model.VersionCollection, distro.Collection, model.ProjectRefCollection, model.ProjectVarsCollection, host.Collection)) t1 := task.Task{ Id: "t1", DisplayName: "t1", Version: "v1", DistroId: "distro", Project: "p", BuildVariant: "bv", HostId: "h1", } assert.NoError(t1.Insert()) versionYaml := ` tasks: - name: t1 commands: - command: host.create params: image: docker.io/library/hello-world distro: distro command: echo hi provider: docker num_hosts: 1 background: false buildvariants: - name: "bv" tasks: - name: t1 ` v1 := model.Version{ Id: "v1", Config: versionYaml, Identifier: "p", } assert.NoError(v1.Insert()) h1 := host.Host{ Id: "h1", RunningTask: t1.Id, } assert.NoError(h1.Insert()) parent := distro.Distro{Id: "parent-distro", PoolSize: 3, Provider: evergreen.ProviderNameMock} require.NoError(parent.Insert()) pool := &evergreen.ContainerPool{Distro: "parent-distro", Id: "test-pool", MaxContainers: 2} parentHost := &host.Host{ Id: "host1", Host: "host", User: "user", Distro: distro.Distro{Id: "parent-distro"}, Status: evergreen.HostRunning, HasContainers: true, ContainerPoolSettings: pool, } require.NoError(parentHost.Insert()) d := distro.Distro{Id: "distro", Provider: evergreen.ProviderNameMock, ContainerPool: "test-pool"} require.NoError(d.Insert()) p := model.ProjectRef{ Identifier: "p", } assert.NoError(p.Insert()) pvars := model.ProjectVars{ Id: "p", } assert.NoError(pvars.Insert()) dc := DBCreateHostConnector{} err := dc.CreateHostsFromTask(&t1, user.DBUser{Id: "me"}, "") assert.NoError(err) createdHosts, err := host.Find(host.IsUninitialized) assert.NoError(err) require.Len(createdHosts, 1) h := createdHosts[0] assert.Equal("me", h.StartedBy) assert.Equal("docker.io/library/hello-world", h.DockerOptions.Image) assert.Equal("echo hi", h.DockerOptions.Command) assert.Equal(distro.DockerImageBuildTypePull, h.DockerOptions.Method) }
explode_data.jsonl/63020
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1009 }
[ 2830, 3393, 4021, 4502, 3830, 6262, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 17957, 1669, 1373, 7121, 1155, 340, 6948, 35699, 9791, 13524, 52730, 17483, 28629, 11, 1614, 35842, 6482, 11, 1582, 299, 28629, 11, 161...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConfigError(t *testing.T) { confFile := configfile(`{}`) os.Remove(confFile) conf := config.NewConfig() err := conf.Load(confFile, logConfPath) AssertThat(t, err.Error(), EqualTo{fmt.Sprintf("browsers config: read error: open %s: no such file or directory", confFile)}) }
explode_data.jsonl/37914
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 103 }
[ 2830, 3393, 2648, 1454, 1155, 353, 8840, 836, 8, 341, 67850, 1703, 1669, 2193, 1192, 5809, 90, 27085, 25078, 13270, 29879, 1703, 340, 67850, 1669, 2193, 7121, 2648, 741, 9859, 1669, 2335, 13969, 29879, 1703, 11, 1487, 15578, 1820, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUpdateOAuthUserAttrs(t *testing.T) { th := Setup(t) defer th.TearDown() id := model.NewId() id2 := model.NewId() gitlabProvider := einterfaces.GetOauthProvider("gitlab") username := "user" + id username2 := "user" + id2 email := "user" + id + "@nowhere.com" email2 := "user" + id2 + "@nowhere.com" var user, user2 *model.User var gitlabUserObj oauthgitlab.GitLabUser user, gitlabUserObj = createGitlabUser(t, th.App, username, email) user2, _ = createGitlabUser(t, th.App, username2, email2) t.Run("UpdateUsername", func(t *testing.T) { t.Run("NoExistingUserWithSameUsername", func(t *testing.T) { gitlabUserObj.Username = "updateduser" + model.NewId() gitlabUser := getGitlabUserPayload(gitlabUserObj, t) data := bytes.NewReader(gitlabUser) user = getUserFromDB(th.App, user.Id, t) th.App.UpdateOAuthUserAttrs(data, user, gitlabProvider, "gitlab") user = getUserFromDB(th.App, user.Id, t) if user.Username != gitlabUserObj.Username { t.Fatal("user's username is not updated") } }) t.Run("ExistinguserWithSameUsername", func(t *testing.T) { gitlabUserObj.Username = user2.Username gitlabUser := getGitlabUserPayload(gitlabUserObj, t) data := bytes.NewReader(gitlabUser) user = getUserFromDB(th.App, user.Id, t) th.App.UpdateOAuthUserAttrs(data, user, gitlabProvider, "gitlab") user = getUserFromDB(th.App, user.Id, t) if user.Username == gitlabUserObj.Username { t.Fatal("user's username is updated though there already exists another user with the same username") } }) }) t.Run("UpdateEmail", func(t *testing.T) { t.Run("NoExistingUserWithSameEmail", func(t *testing.T) { gitlabUserObj.Email = "newuser" + model.NewId() + "@nowhere.com" gitlabUser := getGitlabUserPayload(gitlabUserObj, t) data := bytes.NewReader(gitlabUser) user = getUserFromDB(th.App, user.Id, t) th.App.UpdateOAuthUserAttrs(data, user, gitlabProvider, "gitlab") user = getUserFromDB(th.App, user.Id, t) if user.Email != gitlabUserObj.Email { t.Fatal("user's email is not updated") } if !user.EmailVerified { t.Fatal("user's email should have been verified") } }) t.Run("ExistingUserWithSameEmail", func(t *testing.T) { gitlabUserObj.Email = user2.Email gitlabUser := getGitlabUserPayload(gitlabUserObj, t) data := bytes.NewReader(gitlabUser) user = getUserFromDB(th.App, user.Id, t) th.App.UpdateOAuthUserAttrs(data, user, gitlabProvider, "gitlab") user = getUserFromDB(th.App, user.Id, t) if user.Email == gitlabUserObj.Email { t.Fatal("user's email is updated though there already exists another user with the same email") } }) }) t.Run("UpdateFirstName", func(t *testing.T) { gitlabUserObj.Name = "Updated User" gitlabUser := getGitlabUserPayload(gitlabUserObj, t) data := bytes.NewReader(gitlabUser) user = getUserFromDB(th.App, user.Id, t) th.App.UpdateOAuthUserAttrs(data, user, gitlabProvider, "gitlab") user = getUserFromDB(th.App, user.Id, t) if user.FirstName != "Updated" { t.Fatal("user's first name is not updated") } }) t.Run("UpdateLastName", func(t *testing.T) { gitlabUserObj.Name = "Updated Lastname" gitlabUser := getGitlabUserPayload(gitlabUserObj, t) data := bytes.NewReader(gitlabUser) user = getUserFromDB(th.App, user.Id, t) th.App.UpdateOAuthUserAttrs(data, user, gitlabProvider, "gitlab") user = getUserFromDB(th.App, user.Id, t) if user.LastName != "Lastname" { t.Fatal("user's last name is not updated") } }) }
explode_data.jsonl/31416
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1424 }
[ 2830, 3393, 4289, 57850, 1474, 53671, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 340, 16867, 270, 836, 682, 4454, 2822, 15710, 1669, 1614, 7121, 764, 741, 15710, 17, 1669, 1614, 7121, 764, 741, 90731, 14380, 5179, 1669, 384...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPeerSetAddRemoveOne(t *testing.T) { t.Parallel() peerSet := NewPeerSet() var peerList []Peer for i := 0; i < 5; i++ { p := newMockPeer(net.IP{127, 0, 0, byte(i)}) if err := peerSet.Add(p); err != nil { t.Error(err) } peerList = append(peerList, p) } n := len(peerList) // 1. Test removing from the front for i, peerAtFront := range peerList { removed := peerSet.Remove(peerAtFront) assert.True(t, removed) wantSize := n - i - 1 for j := 0; j < 2; j++ { assert.Equal(t, false, peerSet.Has(peerAtFront.ID()), "#%d Run #%d: failed to remove peer", i, j) assert.Equal(t, wantSize, peerSet.Size(), "#%d Run #%d: failed to remove peer and decrement size", i, j) // Test the route of removing the now non-existent element removed := peerSet.Remove(peerAtFront) assert.False(t, removed) } } // 2. Next we are testing removing the peer at the end // a) Replenish the peerSet for _, peer := range peerList { if err := peerSet.Add(peer); err != nil { t.Error(err) } } // b) In reverse, remove each element for i := n - 1; i >= 0; i-- { peerAtEnd := peerList[i] removed := peerSet.Remove(peerAtEnd) assert.True(t, removed) assert.Equal(t, false, peerSet.Has(peerAtEnd.ID()), "#%d: failed to remove item at end", i) assert.Equal(t, i, peerSet.Size(), "#%d: differing sizes after peerSet.Remove(atEndPeer)", i) } }
explode_data.jsonl/68619
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 559 }
[ 2830, 3393, 30888, 1649, 2212, 13021, 3966, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 16537, 1649, 1669, 1532, 30888, 1649, 2822, 2405, 14397, 852, 3056, 30888, 198, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 20, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestStateComp(t *testing.T) { st1 := state.NewState(dbm.NewMemDB()) _, _, err := st1.Update(func(ws state.Updatable) error { return ws.UpdateAccount(acm.NewAccountFromSecret("1")) }) require.NoError(t, err) _, _, err = st1.Update(func(ws state.Updatable) error { return ws.UpdateAccount(acm.NewAccountFromSecret("2")) }) require.NoError(t, err) db2 := dbm.NewMemDB() st2, err := st1.Copy(db2) require.NoError(t, err) err = CompareStateAtHeight(st2, st1, 0) require.Error(t, err) _, _, err = st2.Update(func(ws state.Updatable) error { return ws.UpdateAccount(acm.NewAccountFromSecret("3")) }) require.NoError(t, err) err = CompareStateAtHeight(st2, st1, 1) require.Error(t, err) }
explode_data.jsonl/79288
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 297 }
[ 2830, 3393, 1397, 13552, 1155, 353, 8840, 836, 8, 341, 18388, 16, 1669, 1584, 7121, 1397, 9791, 76, 7121, 18816, 3506, 2398, 197, 6878, 8358, 1848, 1669, 357, 16, 16689, 18552, 57786, 1584, 60828, 88831, 8, 1465, 341, 197, 853, 17624, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_adapter_FetchArtifacts(t *testing.T) { a, s := getMockAdapter(t, true, true) defer s.Close() var filters = []*model.Filter{} var resources, err = a.FetchArtifacts(filters) assert.NotNil(t, err) assert.Nil(t, resources) }
explode_data.jsonl/38949
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 97 }
[ 2830, 3393, 43682, 1400, 2995, 9286, 26401, 1155, 353, 8840, 836, 8, 341, 11323, 11, 274, 1669, 633, 11571, 5940, 1155, 11, 830, 11, 830, 340, 16867, 274, 10421, 741, 2405, 13406, 284, 29838, 2528, 31696, 16094, 2405, 4963, 11, 1848, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestGetExtensionDefaults(t *testing.T) { var setFloat64 float64 = 1 var setFloat32 float32 = 2 var setInt32 int32 = 3 var setInt64 int64 = 4 var setUint32 uint32 = 5 var setUint64 uint64 = 6 var setBool = true var setBool2 = false var setString = "Goodnight string" var setBytes = []byte("Goodnight bytes") var setEnum = pb.DefaultsMessage_TWO type testcase struct { ext *proto.ExtensionDesc // Extension we are testing. want interface{} // Expected value of extension, or nil (meaning that GetExtension will fail). def interface{} // Expected value of extension after ClearExtension(). } tests := []testcase{ {pb.E_NoDefaultDouble, setFloat64, nil}, {pb.E_NoDefaultFloat, setFloat32, nil}, {pb.E_NoDefaultInt32, setInt32, nil}, {pb.E_NoDefaultInt64, setInt64, nil}, {pb.E_NoDefaultUint32, setUint32, nil}, {pb.E_NoDefaultUint64, setUint64, nil}, {pb.E_NoDefaultSint32, setInt32, nil}, {pb.E_NoDefaultSint64, setInt64, nil}, {pb.E_NoDefaultFixed32, setUint32, nil}, {pb.E_NoDefaultFixed64, setUint64, nil}, {pb.E_NoDefaultSfixed32, setInt32, nil}, {pb.E_NoDefaultSfixed64, setInt64, nil}, {pb.E_NoDefaultBool, setBool, nil}, {pb.E_NoDefaultBool, setBool2, nil}, {pb.E_NoDefaultString, setString, nil}, {pb.E_NoDefaultBytes, setBytes, nil}, {pb.E_NoDefaultEnum, setEnum, nil}, {pb.E_DefaultDouble, setFloat64, float64(3.1415)}, {pb.E_DefaultFloat, setFloat32, float32(3.14)}, {pb.E_DefaultInt32, setInt32, int32(42)}, {pb.E_DefaultInt64, setInt64, int64(43)}, {pb.E_DefaultUint32, setUint32, uint32(44)}, {pb.E_DefaultUint64, setUint64, uint64(45)}, {pb.E_DefaultSint32, setInt32, int32(46)}, {pb.E_DefaultSint64, setInt64, int64(47)}, {pb.E_DefaultFixed32, setUint32, uint32(48)}, {pb.E_DefaultFixed64, setUint64, uint64(49)}, {pb.E_DefaultSfixed32, setInt32, int32(50)}, {pb.E_DefaultSfixed64, setInt64, int64(51)}, {pb.E_DefaultBool, setBool, true}, {pb.E_DefaultBool, setBool2, true}, {pb.E_DefaultString, setString, "Hello, string"}, {pb.E_DefaultBytes, setBytes, []byte("Hello, bytes")}, {pb.E_DefaultEnum, setEnum, pb.DefaultsMessage_ONE}, } checkVal := func(test testcase, msg *pb.DefaultsMessage, valWant interface{}) error { val, err := proto.GetExtension(msg, test.ext) if err != nil { if valWant != nil { return fmt.Errorf("GetExtension(): %s", err) } if want := proto.ErrMissingExtension; err != want { return fmt.Errorf("Unexpected error: got %v, want %v", err, want) } return nil } // All proto2 extension values are either a pointer to a value or a slice of values. ty := reflect.TypeOf(val) tyWant := reflect.TypeOf(test.ext.ExtensionType) if got, want := ty, tyWant; got != want { return fmt.Errorf("unexpected reflect.TypeOf(): got %v want %v", got, want) } tye := ty.Elem() tyeWant := tyWant.Elem() if got, want := tye, tyeWant; got != want { return fmt.Errorf("unexpected reflect.TypeOf().Elem(): got %v want %v", got, want) } // Check the name of the type of the value. // If it is an enum it will be type int32 with the name of the enum. if got, want := tye.Name(), tye.Name(); got != want { return fmt.Errorf("unexpected reflect.TypeOf().Elem().Name(): got %v want %v", got, want) } // Check that value is what we expect. // If we have a pointer in val, get the value it points to. valExp := val if ty.Kind() == reflect.Ptr { valExp = reflect.ValueOf(val).Elem().Interface() } if got, want := valExp, valWant; !reflect.DeepEqual(got, want) { return fmt.Errorf("unexpected reflect.DeepEqual(): got %v want %v", got, want) } return nil } setTo := func(test testcase) interface{} { setTo := reflect.ValueOf(test.want) if typ := reflect.TypeOf(test.ext.ExtensionType); typ.Kind() == reflect.Ptr { setTo = reflect.New(typ).Elem() setTo.Set(reflect.New(setTo.Type().Elem())) setTo.Elem().Set(reflect.ValueOf(test.want)) } return setTo.Interface() } for _, test := range tests { msg := &pb.DefaultsMessage{} name := test.ext.Name // Check the initial value. if err := checkVal(test, msg, test.def); err != nil { t.Errorf("%s: %v", name, err) } // Set the per-type value and check value. name = fmt.Sprintf("%s (set to %T %v)", name, test.want, test.want) if err := proto.SetExtension(msg, test.ext, setTo(test)); err != nil { t.Errorf("%s: SetExtension(): %v", name, err) continue } if err := checkVal(test, msg, test.want); err != nil { t.Errorf("%s: %v", name, err) continue } // Set and check the value. name += " (cleared)" proto.ClearExtension(msg, test.ext) if err := checkVal(test, msg, test.def); err != nil { t.Errorf("%s: %v", name, err) } } }
explode_data.jsonl/55064
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1964 }
[ 2830, 3393, 1949, 12049, 16273, 1155, 353, 8840, 836, 8, 341, 2405, 738, 5442, 21, 19, 2224, 21, 19, 284, 220, 16, 198, 2405, 738, 5442, 18, 17, 2224, 18, 17, 284, 220, 17, 198, 2405, 738, 1072, 18, 17, 526, 18, 17, 284, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestEncode(t *testing.T) { testcases := []struct { in Value outSQL string outASCII string }{{ in: NULL, outSQL: "null", outASCII: "null", }, { in: TestValue(Int64, "1"), outSQL: "1", outASCII: "1", }, { in: TestValue(VarChar, "foo"), outSQL: "'foo'", outASCII: "'Zm9v'", }, { in: TestValue(VarChar, "\x00'\"\b\n\r\t\x1A\\"), outSQL: "'\\0\\'\\\"\\b\\n\\r\\t\\Z\\\\'", outASCII: "'ACciCAoNCRpc'", }} for _, tcase := range testcases { buf := &bytes.Buffer{} tcase.in.EncodeSQL(buf) if tcase.outSQL != buf.String() { t.Errorf("%v.EncodeSQL = %q, want %q", tcase.in, buf.String(), tcase.outSQL) } buf = &bytes.Buffer{} tcase.in.EncodeASCII(buf) if tcase.outASCII != buf.String() { t.Errorf("%v.EncodeASCII = %q, want %q", tcase.in, buf.String(), tcase.outASCII) } } }
explode_data.jsonl/30813
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 452 }
[ 2830, 3393, 32535, 1155, 353, 8840, 836, 8, 341, 18185, 23910, 1669, 3056, 1235, 341, 197, 17430, 981, 5162, 198, 197, 13967, 6688, 256, 914, 198, 197, 13967, 56450, 914, 198, 197, 15170, 515, 197, 17430, 25, 981, 1770, 345, 197, 1396...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetAppWithoutReorderedMiddleware(t *testing.T) { app = nil r := mux.NewRouter() r.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) {}) n := GetApp() n.UseHandler(r) server := httptest.NewServer(n) defer server.Close() resp, err := http.Get(server.URL) if err != nil { t.Fatal("Expected run without errors but was", err.Error()) } if !strings.Contains(resp.Header.Get("Content-Type"), "application/json") { t.Error("content type should be application/json but not was", resp.Header.Get("Content-Type")) } MiddlewareStack = []negroni.Handler{} }
explode_data.jsonl/51058
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 212 }
[ 2830, 3393, 1949, 2164, 26040, 693, 10544, 24684, 1155, 353, 8840, 836, 8, 341, 28236, 284, 2092, 198, 7000, 1669, 59807, 7121, 9523, 741, 7000, 63623, 35460, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 35248, 9038, 1669, 2126...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDatabaseTerminate(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() d, mapCh, _ := newTestDatabase(t, ctrl, Bootstrapped) defer func() { close(mapCh) leaktest.CheckTimeout(t, time.Second)() }() require.NoError(t, d.Open()) require.NoError(t, d.Terminate()) require.Equal(t, errDatabaseAlreadyClosed, d.Close()) }
explode_data.jsonl/46525
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 141 }
[ 2830, 3393, 5988, 62519, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 2698, 11, 2415, 1143, 11, 716, 1669, 501, 2271, 5988, 1155, 11, 23743, 11, 15004, 495, 5677, 34...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestProcess(t *testing.T) { testdataBase := `../../testdata` for idx, spec := range processSpecs { destination := fmt.Sprintf(`/tmp/test-%s.txt`, spec.testDir) p, err := filepath.Abs(filepath.Join(testdataBase, spec.testDir, spec.template)) if err != nil { log.Println(err) } exp, err := filepath.Abs(filepath.Join(testdataBase, spec.testDir, spec.expectedOutputFile)) if err != nil { log.Println(err) } var f string vf := []string{} if len(spec.varfiles) > 0 { for _, v := range spec.varfiles { f, err = filepath.Abs(filepath.Join(testdataBase, spec.testDir, v)) if err != nil { log.Println(err) } vf = append(vf, f) } } r := ProcessRequest{ Source: p, Destination: destination, Varfiles: vf, Vars: spec.vars, PlaceholderSeparator: `:`, } err = Process(r) if err != nil { t.Errorf("spec %d <%s> expected error nil, got %v", idx, spec.testDir, err) } assertTextFilesEqual(t, destination, exp) } }
explode_data.jsonl/66295
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 489 }
[ 2830, 3393, 7423, 1155, 353, 8840, 836, 8, 341, 18185, 691, 3978, 1669, 1565, 2748, 92425, 3989, 2023, 7187, 11, 1398, 1669, 2088, 1882, 8327, 82, 341, 197, 197, 17997, 1669, 8879, 17305, 31813, 5173, 12697, 11069, 82, 3909, 7808, 1398,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestHealthHandler(t *testing.T) { req, _ := http.NewRequest("GET", "/_ah/health", nil) res := httptest.NewRecorder() healthCheckHandler(res, req) checkResponseCode(t, http.StatusOK, res.Code) if res.Body.String() != "ok" { t.Error("Health failed to return expected 'ok'") } }
explode_data.jsonl/74556
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 107 }
[ 2830, 3393, 14542, 3050, 1155, 353, 8840, 836, 8, 341, 24395, 11, 716, 1669, 1758, 75274, 445, 3806, 497, 3521, 62, 1466, 14, 12120, 497, 2092, 340, 10202, 1669, 54320, 70334, 7121, 47023, 2822, 197, 12120, 3973, 3050, 4590, 11, 4232, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestParse_Template(t *testing.T) { s := NewTestStatsd() s.Templates = []string{ "measurement.measurement.host.service", } lines := []string{ "cpu.idle.localhost:1|c", "cpu.busy.host01.myservice:11|c", } for _, line := range lines { err := s.parseStatsdLine(line) if err != nil { t.Errorf("Parsing line %s should not have resulted in an error\n", line) } } validations := []struct { name string value int64 }{ { "cpu_idle", 1, }, { "cpu_busy", 11, }, } // Validate counters for _, test := range validations { err := testValidateCounter(test.name, test.value, s.counters) if err != nil { t.Error(err.Error()) } } }
explode_data.jsonl/14371
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 306 }
[ 2830, 3393, 14463, 57917, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 1532, 2271, 16635, 67, 741, 1903, 836, 76793, 284, 3056, 917, 515, 197, 197, 1, 81425, 17326, 24359, 17840, 5736, 756, 197, 630, 78390, 1669, 3056, 917, 515, 197, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestTaskTemplateDisconnectActionTemplates(t *testing.T) { tt := TaskTemplate{ DataPipeTemplates: []DataPipeTemplate{ DataPipeTemplate{ SourceActionName: "Action1", SourceOutputName: "out1", DestActionName: "Action2", DestInputName: "in2", }, }, } err := tt.DisconnectActionTemplates("Action1", "out1", "Action2", "in2") assert.Nil(t, err) assert.Equal(t, 0, len(tt.DataPipeTemplates)) err = tt.DisconnectActionTemplates("Action2", "out3", "Action3", "in4") assert.Equal(t, errors.New("Not found"), err) }
explode_data.jsonl/19604
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 6262, 7275, 60651, 2512, 51195, 1155, 353, 8840, 836, 8, 341, 3244, 83, 1669, 5430, 7275, 515, 197, 40927, 34077, 51195, 25, 3056, 1043, 34077, 7275, 515, 298, 40927, 34077, 7275, 515, 571, 197, 3608, 2512, 675, 25, 330, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseCertificateFail(t *testing.T) { var tests = []struct { desc string in string wantErr string wantFatal bool }{ {desc: "SubjectInfoEmpty", in: "testdata/invalid/xf-ext-subject-info-empty.pem", wantErr: "empty SubjectInfoAccess"}, {desc: "RSAParamsNonNULL", in: "testdata/invalid/xf-pubkey-rsa-param-nonnull.pem", wantErr: "RSA key missing NULL parameters"}, {desc: "EmptyEKU", in: "testdata/invalid/xf-ext-extended-key-usage-empty.pem", wantErr: "empty ExtendedKeyUsage"}, {desc: "EKUEmptyOID", in: "testdata/invalid/xf-ext-extended-key-usage-empty-oid.pem", wantErr: "zero length OBJECT IDENTIFIER"}, {desc: "SECp192r1TooShort", in: "testdata/invalid/xf-pubkey-ecdsa-secp192r1.pem", wantErr: "insecure curve (secp192r1)"}, {desc: "SerialNumIntegerNotMinimal", in: "testdata/invalid/xf-der-invalid-nonminimal-int.pem", wantErr: "integer not minimally-encoded"}, {desc: "RSAIntegerNotMinimal", in: "testdata/invalid/xf-der-pubkey-rsa-nonminimal-int.pem", wantErr: "integer not minimally-encoded"}, {desc: "SubjectNonPrintable", in: "testdata/invalid/xf-subject-nonprintable.pem", wantErr: "PrintableString contains invalid character"}, {desc: "NegativeRSAModulus", in: "testdata/invalid/xf-pubkey-rsa-modulus-negative.pem", wantErr: "RSA modulus is not a positive number"}, } for _, test := range tests { t.Run(test.desc, func(t *testing.T) { data, err := ioutil.ReadFile(test.in) if err != nil { t.Fatalf("failed to read test data: %v", err) } block, _ := pem.Decode(data) got, err := ParseCertificate(block.Bytes) if err == nil { t.Fatalf("ParseCertificate()=%+v,nil; want nil, err containing %q", got, test.wantErr) } if !strings.Contains(err.Error(), test.wantErr) { t.Errorf("ParseCertificate()=_,%v; want nil, err containing %q", err, test.wantErr) } gotFatal := IsFatal(err) if gotFatal != test.wantFatal { t.Errorf("ParseCertificate()=_,%v with fatal=%t; want nil, err containing %q with fatal=%t", err, gotFatal, test.wantErr, test.wantFatal) } }) } }
explode_data.jsonl/68023
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 860 }
[ 2830, 3393, 14463, 33202, 19524, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 3056, 1235, 341, 197, 41653, 414, 914, 198, 197, 17430, 286, 914, 198, 197, 50780, 7747, 256, 914, 198, 197, 50780, 62396, 1807, 198, 197, 59403, 197, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestLUT(t *testing.T) { src := IMRead("images/gocvlogo.jpg", IMReadColor) if src.Empty() { t.Error("Invalid read of Source Mat in LUT test") } defer src.Close() lut := IMRead("images/lut.png", IMReadColor) if lut.Empty() { t.Error("Invalid read of LUT Mat in LUT test") } defer lut.Close() dst := NewMat() defer dst.Close() LUT(src, lut, &dst) if dst.Cols() != 400 || dst.Rows() != 343 { t.Errorf("Expected dst size of 200x172 got %dx%d", dst.Cols(), dst.Rows()) } }
explode_data.jsonl/81701
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 213 }
[ 2830, 3393, 43, 1381, 1155, 353, 8840, 836, 8, 341, 41144, 1669, 6517, 4418, 445, 3642, 4846, 509, 85, 10129, 4819, 497, 6517, 4418, 1636, 340, 743, 2286, 11180, 368, 341, 197, 3244, 6141, 445, 7928, 1349, 315, 8748, 6867, 304, 444, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestToken_lexIdentifier(t *testing.T) { tests := []struct { Identifier bool input string value string }{ { Identifier: true, input: "a", value: "a", }, { Identifier: true, input: "abc", value: "abc", }, { Identifier: true, input: "abc ", value: "abc", }, { Identifier: true, input: `" abc "`, value: ` abc `, }, { Identifier: true, input: "a9$", value: "a9$", }, { Identifier: true, input: "userName", value: "username", }, { Identifier: true, input: `"userName"`, value: "userName", }, // false tests { Identifier: false, input: `"`, }, { Identifier: false, input: "_sadsfa", }, { Identifier: false, input: "9sadsfa", }, { Identifier: false, input: " abc", }, } for _, test := range tests { tok, _, ok := lexIdentifier(test.input, cursor{}) assert.Equal(t, test.Identifier, ok, test.input) if ok { assert.Equal(t, test.value, tok.Value, test.input) } } }
explode_data.jsonl/60002
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 610 }
[ 2830, 3393, 3323, 74547, 8714, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 197, 8714, 1807, 198, 197, 22427, 414, 914, 198, 197, 16309, 414, 914, 198, 197, 59403, 197, 197, 515, 298, 197, 8714, 25, 830, 345, 298...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInjectCorrelationContextToHTTPReq(t *testing.T) { propagator := correlation.CorrelationContext{} props := propagation.New(propagation.WithInjectors(propagator)) tests := []struct { name string kvs []label.KeyValue wantInHeader []string wantedLen int }{ { name: "two simple values", kvs: []label.KeyValue{ label.String("key1", "val1"), label.String("key2", "val2"), }, wantInHeader: []string{"key1=val1", "key2=val2"}, }, { name: "two values with escaped chars", kvs: []label.KeyValue{ label.String("key1", "val1,val2"), label.String("key2", "val3=4"), }, wantInHeader: []string{"key1=val1%2Cval2", "key2=val3%3D4"}, }, { name: "values of non-string types", kvs: []label.KeyValue{ label.Bool("key1", true), label.Int("key2", 123), label.Int64("key3", 123), label.Int32("key4", 123), label.Uint("key5", 123), label.Uint32("key6", 123), label.Uint64("key7", 123), label.Float64("key8", 123.567), label.Float32("key9", 123.567), }, wantInHeader: []string{ "key1=true", "key2=123", "key3=123", "key4=123", "key5=123", "key6=123", "key7=123", "key8=123.567", "key9=123.567", }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { req, _ := http.NewRequest("GET", "http://example.com", nil) ctx := correlation.ContextWithMap(context.Background(), correlation.NewMap(correlation.MapUpdate{MultiKV: tt.kvs})) propagation.InjectHTTP(ctx, props, req.Header) gotHeader := req.Header.Get("otcorrelations") wantedLen := len(strings.Join(tt.wantInHeader, ",")) if wantedLen != len(gotHeader) { t.Errorf( "%s: Inject otcorrelations incorrect length %d != %d.", tt.name, tt.wantedLen, len(gotHeader), ) } for _, inHeader := range tt.wantInHeader { if !strings.Contains(gotHeader, inHeader) { t.Errorf( "%s: Inject otcorrelations missing part of header: %s in %s", tt.name, inHeader, gotHeader, ) } } }) } }
explode_data.jsonl/30216
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 963 }
[ 2830, 3393, 13738, 10580, 22221, 1972, 1249, 9230, 27234, 1155, 353, 8840, 836, 8, 341, 79244, 351, 850, 1669, 25588, 63560, 22221, 1972, 16094, 77691, 1669, 53643, 7121, 30638, 27137, 26124, 13738, 1087, 30638, 351, 850, 1171, 78216, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestLinkYaml(t *testing.T) { as := assert.New(t) linkTemplate := projectriff_v1.Link{} opts := options.InitOptions{ FunctionName: "myfunc", Input: "in", UserAccount: "me", Version: "0.0.1", } yaml, err := createLinkYaml(linkTemplate, opts) t.Log(yaml) as.NoError(err) as.Equal(yaml, `--- apiVersion: projectriff.io/v1alpha1 kind: Link metadata: name: myfunc spec: function: myfunc input: in windowing: size: 1 `) }
explode_data.jsonl/50587
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 209 }
[ 2830, 3393, 3939, 56, 9467, 1155, 353, 8840, 836, 8, 341, 60451, 1669, 2060, 7121, 1155, 692, 54238, 7275, 1669, 2390, 81, 3092, 2273, 16, 22534, 16094, 64734, 1669, 2606, 26849, 3798, 515, 197, 87522, 675, 25, 330, 2408, 2830, 756, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSortedSetIncyby(t *testing.T) { testRaw(t, func(c *client) { c.Do("ZINCRBY", "z", "1.0", "m") c.Do("ZINCRBY", "z", "1.0", "m") c.Do("ZINCRBY", "z", "1.0", "m") c.Do("ZINCRBY", "z", "2.0", "m") c.Do("ZINCRBY", "z", "3", "m2") c.Do("ZINCRBY", "z", "3", "m2") c.Do("ZINCRBY", "z", "3", "m2") // failure cases c.Error("wrong number", "ZINCRBY") c.Error("wrong number", "ZINCRBY", "key") c.Error("wrong number", "ZINCRBY", "key", "1.0") c.Error("not a valid float", "ZINCRBY", "key", "nofloat", "m") c.Error("wrong number", "ZINCRBY", "key", "1.0", "too", "many") c.Do("SET", "str", "I am a string") c.Error("wrong kind", "ZINCRBY", "str", "1.0", "member") }) }
explode_data.jsonl/23350
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 363 }
[ 2830, 3393, 51051, 1649, 641, 11130, 1694, 1155, 353, 8840, 836, 8, 341, 18185, 20015, 1155, 11, 2915, 1337, 353, 2972, 8, 341, 197, 1444, 33596, 445, 57, 687, 8973, 19912, 497, 330, 89, 497, 330, 16, 13, 15, 497, 330, 76, 1138, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInterpreterFStringDollars(t *testing.T) { s, err := parseFile("src/parse/asp/test_data/interpreter/fstrings.build") assert.NoError(t, err) assert.EqualValues(t, "mickey donald ${goofy} {sora}", s.Lookup("z")) }
explode_data.jsonl/81083
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 91 }
[ 2830, 3393, 58426, 37, 703, 35, 965, 1561, 1155, 353, 8840, 836, 8, 341, 1903, 11, 1848, 1669, 4715, 1703, 445, 3548, 14, 6400, 14, 13367, 12697, 1769, 14, 90554, 6663, 18594, 13239, 1138, 6948, 35699, 1155, 11, 1848, 340, 6948, 12808...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBooleanParsing(t *testing.T) { testCases := []struct { s string b bool }{ {s: "true", b: true}, {s: "'true'", b: true}, {s: "yes", b: true}, {s: "'yes'", b: true}, {s: "'1'", b: true}, {s: "1", b: true}, {s: "no", b: false}, {s: "0", b: false}, } for i, tc := range testCases { msg := fmt.Sprintf("test case %v", i) conf, err := ReadFromString(base64.StdEncoding.EncodeToString([]byte(fmt.Sprintf(` teleport: advertise_ip: 10.10.10.1 auth_service: enabled: yes disconnect_expired_cert: %v `, tc.s)))) require.NoError(t, err, msg) require.Equal(t, tc.b, conf.Auth.DisconnectExpiredCert.Value, msg) } }
explode_data.jsonl/47158
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 301 }
[ 2830, 3393, 6890, 68839, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 1903, 914, 198, 197, 2233, 1807, 198, 197, 59403, 197, 197, 84386, 25, 330, 1866, 497, 293, 25, 830, 1583, 197, 197, 84386, 25, 7178, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStartAndShutdown(t *testing.T) { r := &CountingReconciler{} impl := NewImplWithStats(r, TestLogger(t), "Testing", &FakeStatsReporter{}) ctx, cancel := context.WithCancel(context.Background()) doneCh := make(chan struct{}) go func() { defer close(doneCh) StartAll(ctx, impl) }() t.Cleanup(func() { cancel() <-doneCh }) select { case <-time.After(10 * time.Millisecond): // We don't expect completion before the context is cancelled. case <-doneCh: t.Error("StartAll finished early.") } cancel() select { case <-time.After(time.Second): t.Error("Timed out waiting for controller to finish.") case <-doneCh: // We expect the work to complete. } if got, want := r.count.Load(), int32(0); got != want { t.Errorf("count = %v, wanted %v", got, want) } }
explode_data.jsonl/45285
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 300 }
[ 2830, 3393, 3479, 3036, 62004, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 609, 2507, 287, 693, 40446, 5769, 16094, 197, 6383, 1669, 1532, 9673, 2354, 16635, 2601, 11, 3393, 7395, 1155, 701, 330, 16451, 497, 609, 52317, 16635, 52766, 6257...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFieldByIndexes(t *testing.T) { type C struct { C0 bool C1 string C2 int C3 map[string]int } type B struct { B1 C B2 *C } type A struct { A1 B A2 *B } testCases := []struct { value interface{} indexes []int expectedValue interface{} readOnly bool }{ { value: A{ A1: B{B1: C{C0: true}}, }, indexes: []int{0, 0, 0}, expectedValue: true, readOnly: true, }, { value: A{ A2: &B{B2: &C{C1: "answer"}}, }, indexes: []int{1, 1, 1}, expectedValue: "answer", readOnly: true, }, { value: &A{}, indexes: []int{1, 1, 3}, expectedValue: map[string]int{}, }, } for i, tc := range testCases { checkResults := func(v reflect.Value) { if tc.expectedValue == nil { if !v.IsNil() { t.Errorf("%d: expected nil, actual %v", i, v.Interface()) } } else { if !reflect.DeepEqual(tc.expectedValue, v.Interface()) { t.Errorf("%d: expected %v, actual %v", i, tc.expectedValue, v.Interface()) } } } checkResults(FieldByIndexes(reflect.ValueOf(tc.value), tc.indexes)) if tc.readOnly { checkResults(FieldByIndexesReadOnly(reflect.ValueOf(tc.value), tc.indexes)) } } }
explode_data.jsonl/59108
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 631 }
[ 2830, 3393, 1877, 1359, 62229, 1155, 353, 8840, 836, 8, 341, 13158, 356, 2036, 341, 197, 6258, 15, 1807, 198, 197, 6258, 16, 914, 198, 197, 6258, 17, 526, 198, 197, 6258, 18, 2415, 14032, 63025, 198, 197, 532, 13158, 425, 2036, 341,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestMetricsServiceV2UpdateLogMetricError(t *testing.T) { errCode := codes.Internal mockMetrics.err = grpc.Errorf(errCode, "test error") var formattedMetricName string = MetricsMetricPath("[PROJECT]", "[METRIC]") var metric *loggingpb.LogMetric = &loggingpb.LogMetric{} var request = &loggingpb.UpdateLogMetricRequest{ MetricName: formattedMetricName, Metric: metric, } c, err := NewMetricsClient(context.Background(), clientOpt) if err != nil { t.Fatal(err) } resp, err := c.UpdateLogMetric(context.Background(), request) if c := grpc.Code(err); c != errCode { t.Errorf("got error code %q, want %q", c, errCode) } _ = resp }
explode_data.jsonl/77784
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 248 }
[ 2830, 3393, 27328, 1860, 53, 17, 4289, 2201, 54310, 1454, 1155, 353, 8840, 836, 8, 341, 9859, 2078, 1669, 13912, 32579, 198, 77333, 27328, 18441, 284, 47900, 13080, 3964, 2078, 11, 330, 1944, 1465, 5130, 2405, 23126, 54310, 675, 914, 28...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestKafkaClient_decodeKeyAndOffset(t *testing.T) { module := fixtureModule() viper.Set("consumer.test.group-whitelist", "test.*") module.Configure("test", "consumer.test") keyBuf := bytes.NewBuffer([]byte("\x00\x09testgroup\x00\x09testtopic\x00\x00\x00\x0b")) valueBytes := []byte("\x00\x00\x00\x00\x00\x00\x00\x00\x20\xb4\x00\x08testdata\x00\x00\x00\x00\x00\x00\x06\x65") go module.decodeKeyAndOffset(keyBuf, valueBytes, zap.NewNop()) request := <-module.App.StorageChannel assert.Equalf(t, protocol.StorageSetConsumerOffset, request.RequestType, "Expected request sent with type StorageSetConsumerOffset, not %v", request.RequestType) assert.Equalf(t, "test", request.Cluster, "Expected request sent with cluster test, not %v", request.Cluster) assert.Equalf(t, "testtopic", request.Topic, "Expected request sent with topic testtopic, not %v", request.Topic) assert.Equalf(t, int32(11), request.Partition, "Expected request sent with partition 0, not %v", request.Partition) assert.Equalf(t, "testgroup", request.Group, "Expected request sent with Group testgroup, not %v", request.Group) assert.Equalf(t, int64(8372), request.Offset, "Expected Offset to be 8372, not %v", request.Offset) assert.Equalf(t, int64(1637), request.Timestamp, "Expected Timestamp to be 1637, not %v", request.Timestamp) }
explode_data.jsonl/34271
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 475 }
[ 2830, 3393, 42, 21883, 2959, 15227, 1592, 3036, 6446, 1155, 353, 8840, 836, 8, 341, 54020, 1669, 12507, 3332, 741, 5195, 12858, 4202, 445, 46764, 5958, 6175, 55332, 57645, 497, 330, 1944, 4908, 1138, 54020, 78281, 445, 1944, 497, 330, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPing(t *testing.T) { t.Parallel() p := Ping{} // good { msg, err := p.Read(payload.NewReader(id.Write())) assert.Nil(t, err) _, castOK := msg.(Ping) assert.True(t, castOK) assert.Truef(t, id.Equals(msg.(Ping).ID), "Expected equal %v vs %v", id, msg) } // bad { _, err := p.Read(payload.NewReader([]byte("bad"))) assert.NotNil(t, err) } }
explode_data.jsonl/15795
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 176 }
[ 2830, 3393, 69883, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 3223, 1669, 48657, 31483, 197, 322, 1661, 198, 197, 515, 197, 21169, 11, 1848, 1669, 281, 6503, 26772, 68587, 3724, 4073, 12145, 197, 6948, 59678, 1155, 11, 1848, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServer_Listen(t *testing.T) { // given s := &Server{ cfg: Config{ BindAddr: "127.0.0.1", Port: 56000, }, } // when _ = s.Start(context.Background()) ctx, cancel := context.WithTimeout(context.Background(), time.Second) defer cancel() cc, ccErr := grpc.DialContext(ctx, "127.0.0.1:56000", grpc.WithInsecure(), grpc.WithBlock()) var ccSt connectivity.State if cc != nil { ccSt = cc.GetState() } _ = s.Stop(context.Background()) // then require.Nil(t, ccErr) require.NotNil(t, cc) require.Equal(t, connectivity.Ready, ccSt) }
explode_data.jsonl/73180
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 238 }
[ 2830, 3393, 5475, 27104, 268, 1155, 353, 8840, 836, 8, 341, 197, 322, 2661, 198, 1903, 1669, 609, 5475, 515, 197, 50286, 25, 5532, 515, 298, 197, 9950, 13986, 25, 330, 16, 17, 22, 13, 15, 13, 15, 13, 16, 756, 298, 98459, 25, 257...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFilterToken(t *testing.T) { tests := []struct { name string useToken bool }{ { name: "no token", useToken: false, }, { name: "some with token", useToken: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { traces := buildTestTraces(tt.useToken) batches, err := jaeger.InternalTracesToJaegerProto(traces) require.NoError(t, err) assert.Equal(t, tt.useToken, hasToken(batches)) filterToken(batches) assert.False(t, hasToken(batches)) }) } }
explode_data.jsonl/72154
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 248 }
[ 2830, 3393, 5632, 3323, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 41819, 3323, 1807, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 257, 330, 2152, 3950, 756, 298, 41819, 3323, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEvalTakes(t *testing.T) { var strNode = ArgNode{ value: "a.B", values: strings.Split("a.B", "."), valuesLen: 2, } var m = map[string]interface{}{"a": "B"} var r, _ = EvalTakes(strNode, m) fmt.Println(r) }
explode_data.jsonl/52555
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 113 }
[ 2830, 3393, 54469, 51, 2050, 1155, 353, 8840, 836, 8, 341, 2405, 607, 1955, 284, 7638, 1955, 515, 197, 16309, 25, 257, 330, 64, 1785, 756, 197, 45939, 25, 262, 9069, 19823, 445, 64, 1785, 497, 5933, 4461, 197, 45939, 11271, 25, 220,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseImageReferences(t *testing.T) { refs := []string{ "gcr.io/library/busybox@sha256:e6693c20186f837fc393390135d8a598a96a833917917789d63766cab6c59582", "gcr.io/library/busybox:1.2", "sha256:e6693c20186f837fc393390135d8a598a96a833917917789d63766cab6c59582", "arbitrary-ref", } expectedTags := []string{ "gcr.io/library/busybox:1.2", } expectedDigests := []string{"gcr.io/library/busybox@sha256:e6693c20186f837fc393390135d8a598a96a833917917789d63766cab6c59582"} tags, digests := parseImageReferences(refs) assert.Equal(t, expectedTags, tags) assert.Equal(t, expectedDigests, digests) }
explode_data.jsonl/8825
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 289 }
[ 2830, 3393, 14463, 1906, 31712, 1155, 353, 8840, 836, 8, 341, 197, 16149, 1669, 3056, 917, 515, 197, 197, 59719, 5082, 4245, 45446, 96916, 88, 2011, 31, 15247, 17, 20, 21, 55976, 21, 21, 24, 18, 66, 17, 15, 16, 23, 21, 69, 23, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestJobRunsController_Update_BadInput(t *testing.T) { t.Parallel() ethClient, _, assertMocksCalled := cltest.NewEthMocksWithStartupAssertions(t) defer assertMocksCalled() app, cleanup := cltest.NewApplication(t, ethClient, ) defer cleanup() app.Start() client := app.NewHTTPClient() _, bt := cltest.NewBridgeType(t) assert.Nil(t, app.Store.CreateBridgeType(bt)) j := cltest.NewJobWithWebInitiator() j.Tasks = []models.TaskSpec{{Type: bt.Name}} assert.Nil(t, app.Store.CreateJob(&j)) jr := cltest.NewJobRunPendingBridge(j) assert.Nil(t, app.Store.CreateJobRun(&jr)) body := fmt.Sprint(`{`, jr.ID.String()) resp, cleanup := client.Patch("/v2/runs/"+jr.ID.String(), bytes.NewBufferString(body)) defer cleanup() assert.Equal(t, http.StatusInternalServerError, resp.StatusCode, "Response should be successful") jr, err := app.Store.FindJobRun(jr.ID) assert.NoError(t, err) assert.Equal(t, models.RunStatusPendingBridge, jr.GetStatus()) }
explode_data.jsonl/49855
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 375 }
[ 2830, 3393, 12245, 73920, 2051, 47393, 1668, 329, 2505, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 769, 2959, 11, 8358, 2060, 72577, 20960, 1669, 1185, 1944, 7121, 65390, 11571, 16056, 39076, 90206, 1155, 340, 16867, 2060,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInterpreterBuiltins(t *testing.T) { s, err := parseFile("src/parse/asp/test_data/interpreter/builtins.build") require.NoError(t, err) assert.Equal(t, 1, s.pkg.NumTargets()) assert.NotNil(t, s.pkg.Target("lib")) }
explode_data.jsonl/81061
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 94 }
[ 2830, 3393, 58426, 54300, 1330, 1155, 353, 8840, 836, 8, 341, 1903, 11, 1848, 1669, 4715, 1703, 445, 3548, 14, 6400, 14, 13367, 12697, 1769, 14, 90554, 3470, 11227, 1330, 13239, 1138, 17957, 35699, 1155, 11, 1848, 340, 6948, 12808, 1155...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestQueryAutocompleteMissingInput(t *testing.T) { c, _ := NewClient(WithAPIKey(apiKey)) r := &QueryAutocompleteRequest{} _, err := c.QueryAutocomplete(context.Background(), r) if err == nil { t.Errorf("Error expected: maps: Input missing") } if "maps: Input missing" != err.Error() { t.Errorf("Wrong error returned \"%v\"", err) } }
explode_data.jsonl/76298
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 128 }
[ 2830, 3393, 2859, 19602, 20104, 25080, 2505, 1155, 353, 8840, 836, 8, 341, 1444, 11, 716, 1669, 1532, 2959, 7, 2354, 7082, 1592, 24827, 1592, 1171, 7000, 1669, 609, 2859, 19602, 20104, 1900, 31483, 197, 6878, 1848, 1669, 272, 15685, 196...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestLineDirectives(t *testing.T) { for _, test := range []struct { src, msg string filename string line, col uint }{ // test validity of //line directive {`//line :`, "invalid line number: ", "", 1, 8}, {`//line :x`, "invalid line number: x", "", 1, 8}, {`//line foo :`, "invalid line number: ", "", 1, 12}, {`//line foo:123abc`, "invalid line number: 123abc", "", 1, 11}, {`/**///line foo:x`, "syntax error: package statement must be first", "", 1, 16}, //line directive not at start of line - ignored {`//line foo:0`, "invalid line number: 0", "", 1, 11}, {fmt.Sprintf(`//line foo:%d`, lineMax+1), fmt.Sprintf("invalid line number: %d", lineMax+1), "", 1, 11}, // test effect of //line directive on (relative) position information {"//line foo:123\n foo", "syntax error: package statement must be first", "foo", 123, 3}, {"//line foo:123\n//line bar:345\nfoo", "syntax error: package statement must be first", "bar", 345, 0}, } { _, err := ParseBytes(nil, []byte(test.src), nil, nil, 0) if err == nil { t.Errorf("%s: no error reported", test.src) continue } perr, ok := err.(Error) if !ok { t.Errorf("%s: got %v; want parser error", test.src, err) continue } if msg := perr.Msg; msg != test.msg { t.Errorf("%s: got msg = %q; want %q", test.src, msg, test.msg) } if filename := perr.Pos.RelFilename(); filename != test.filename { t.Errorf("%s: got filename = %q; want %q", test.src, filename, test.filename) } if line := perr.Pos.RelLine(); line != test.line { t.Errorf("%s: got line = %d; want %d", test.src, line, test.line) } if col := perr.Pos.Col(); col != test.col { t.Errorf("%s: got col = %d; want %d", test.src, col, test.col) } } }
explode_data.jsonl/24652
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 698 }
[ 2830, 3393, 2460, 16027, 1886, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 3056, 1235, 341, 197, 41144, 11, 3750, 220, 914, 198, 197, 66434, 220, 914, 198, 197, 27109, 11, 1375, 2622, 198, 197, 59403, 197, 197, 322, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestDeauthorizeOAuthApp(t *testing.T) { th := Setup().InitBasic() defer th.TearDown() Client := th.Client AdminClient := th.SystemAdminClient enableOAuth := th.App.Config().ServiceSettings.EnableOAuthServiceProvider defer func() { th.App.UpdateConfig(func(cfg *model.Config) { cfg.ServiceSettings.EnableOAuthServiceProvider = enableOAuth }) }() th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.EnableOAuthServiceProvider = true }) oapp := &model.OAuthApp{Name: GenerateTestAppName(), Homepage: "https://nowhere.com", Description: "test", CallbackUrls: []string{"https://nowhere.com"}} rapp, resp := AdminClient.CreateOAuthApp(oapp) CheckNoError(t, resp) authRequest := &model.AuthorizeRequest{ ResponseType: model.AUTHCODE_RESPONSE_TYPE, ClientId: rapp.Id, RedirectUri: rapp.CallbackUrls[0], Scope: "", State: "123", } _, resp = Client.AuthorizeOAuthApp(authRequest) CheckNoError(t, resp) pass, resp := Client.DeauthorizeOAuthApp(rapp.Id) CheckNoError(t, resp) if !pass { t.Fatal("should have passed") } _, resp = Client.DeauthorizeOAuthApp("junk") CheckBadRequestStatus(t, resp) _, resp = Client.DeauthorizeOAuthApp(model.NewId()) CheckNoError(t, resp) Client.Logout() _, resp = Client.DeauthorizeOAuthApp(rapp.Id) CheckUnauthorizedStatus(t, resp) }
explode_data.jsonl/30133
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 503 }
[ 2830, 3393, 1912, 52022, 57850, 2164, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1005, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 741, 71724, 1669, 270, 11716, 198, 197, 7210, 2959, 1669, 270, 16620, 7210, 2959, 271, 197, 1255...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTransportCancelDataResponseRace(t *testing.T) { cancel := make(chan struct{}) clientGotError := make(chan bool, 1) const msg = "Hello." st := newServerTester(t, func(w http.ResponseWriter, r *http.Request) { if strings.Contains(r.URL.Path, "/hello") { time.Sleep(50 * time.Millisecond) io.WriteString(w, msg) return } for i := 0; i < 50; i++ { io.WriteString(w, "Some data.") w.(http.Flusher).Flush() if i == 2 { close(cancel) <-clientGotError } time.Sleep(10 * time.Millisecond) } }, optOnlyServer) defer st.Close() tr := &Transport{TLSClientConfig: tlsConfigInsecure} defer tr.CloseIdleConnections() c := &http.Client{Transport: tr} req, _ := http.NewRequest("GET", st.ts.URL, nil) req.Cancel = cancel res, err := c.Do(req) if err != nil { t.Fatal(err) } if _, err = io.Copy(ioutil.Discard, res.Body); err == nil { t.Fatal("unexpected success") } clientGotError <- true res, err = c.Get(st.ts.URL + "/hello") if err != nil { t.Fatal(err) } slurp, err := ioutil.ReadAll(res.Body) if err != nil { t.Fatal(err) } if string(slurp) != msg { t.Errorf("Got = %q; want %q", slurp, msg) } }
explode_data.jsonl/16135
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 516 }
[ 2830, 3393, 27560, 9269, 1043, 2582, 55991, 1155, 353, 8840, 836, 8, 341, 84441, 1669, 1281, 35190, 2036, 37790, 25291, 32462, 1454, 1669, 1281, 35190, 1807, 11, 220, 16, 692, 4777, 3750, 284, 330, 9707, 10040, 18388, 1669, 501, 5475, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestSet(t *testing.T) { g := &TTY{} if err := json.Unmarshal([]byte(j), g); err != nil { t.Fatalf("load from JSON: got %v, want nil", err) } sets := [][]string{ {"speed", "0"}, {"rows", "72"}, {"cols", "238"}, {"brkint"}, {"~clocal"}, {"cread"}, {"~cstopb"}, {"echo"}, {"echoctl"}, {"echoe"}, {"echok"}, {"echoke"}, {"~echonl"}, {"~echoprt"}, {"eof", "0x04"}, {"eol2", "0xff"}, {"eol", "0xff"}, {"erase", "0x7f"}, {"~flusho"}, {"~hupcl"}, {"icanon"}, {"icrnl"}, {"iexten"}, {"~ignbrk"}, {"~igncr"}, {"ignpar"}, {"imaxbel"}, {"~inlcr"}, {"~inpck"}, {"intr", "0x03"}, {"isig"}, {"~istrip"}, {"iutf8"}, {"~ixany"}, {"~ixoff"}, {"ixon"}, {"kill", "0x15"}, {"lnext", "0x16"}, {"min", "0x00"}, {"~noflsh"}, {"~ocrnl"}, {"~ofdel"}, {"~ofill"}, {"onlcr"}, {"~onlret"}, {"~onocr"}, {"opost"}, {"~parenb"}, {"~parmrk"}, {"~parodd"}, {"pendin"}, {"quit", "0x1c"}, {"start", "0x11"}, {"stop", "0x13"}, {"susp", "0x1a"}, {"time", "0x03"}, {"~tostop"}, {"werase", "0x17"}, } if runtime.GOOS == "linux" { sets = append(sets, []string{"~iuclc"}, []string{"~olcuc"}, []string{"~xcase"}) } for _, set := range sets { if err := g.SetOpts(set); err != nil { t.Errorf("Setting %q: got %v, want nil", set, err) } } bad := [][]string{ {"hi", "1"}, {"rows"}, {"rows", "z"}, {"erase"}, {"erase", "z"}, {"hi"}, {"~hi"}, } for _, set := range bad { if err := g.SetOpts(set); err == nil { t.Errorf("Setting %q: got nil, want err", set) } } }
explode_data.jsonl/73094
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 829 }
[ 2830, 3393, 1649, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 609, 55544, 16094, 743, 1848, 1669, 2951, 38097, 10556, 3782, 3325, 701, 342, 1215, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 1078, 504, 4718, 25, 2684, 1018, 85, 11, 1366...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestYoutubeDLError_mockCommand_Run(t *testing.T) { t.Parallel() tests := []struct { name string cmd mockCommand expectedErr error runBeforeFunc func(mockCommand *mockCommand) }{ { "std1", mockCommand{ stdinData: "stdin1", stdoutData: "stdout1", stderrData: "stderr1", startErr: nil, waitErr: nil, runErr: nil, }, nil, func(mockCommand *mockCommand) { }, }, } var err error for _, test := range tests { if test.runBeforeFunc != nil { test.runBeforeFunc(&test.cmd) } var stdoutBuffer, stderrBuffer bytes.Buffer test.cmd.SetStdout(&stdoutBuffer) test.cmd.SetStderr(&stderrBuffer) err = test.cmd.Run() if err != test.expectedErr { t.Errorf("test (%v), expected error (%v), got error (%v)\n", test.name, test.expectedErr, err) } stdout, err := io.ReadAll(&stdoutBuffer) if err != nil { t.Errorf("test (%v), got error (%v) while reading stdoutBuffer\n", test.name, err) } if stdoutString := string(stdout); stdoutString != test.cmd.stdoutData { t.Errorf("test (%v), execCommand.stdout Reader returned stdout (%v), expected (%v)", test.name, stdoutString, test.cmd.stdoutData) } stderr, err := io.ReadAll(&stderrBuffer) if err != nil { t.Errorf("test (%v), got error (%v) while reading stderrBuffer\n", test.name, err) } if stderrString := string(stderr); stderrString != test.cmd.stderrData { t.Errorf("test (%v), execCommand.stderr Reader returned stderr (%v), expected (%v)", test.name, stderrString, test.cmd.stderrData) } } }
explode_data.jsonl/4288
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 685 }
[ 2830, 3393, 91667, 35, 94618, 34134, 4062, 84158, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 78216, 1669, 3056, 1235, 341, 197, 11609, 688, 914, 198, 197, 25920, 1843, 7860, 4062, 198, 197, 42400, 7747, 256, 1465, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDefaultValuesNotAllowedWithRequired(t *testing.T) { var args struct { A int `arg:"required" default:"123"` // required not allowed with default! } err := parse("", &args) assert.EqualError(t, err, ".A: 'required' cannot be used when a default value is specified") }
explode_data.jsonl/13082
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 3675, 6227, 97634, 2354, 8164, 1155, 353, 8840, 836, 8, 341, 2405, 2827, 2036, 341, 197, 22985, 526, 1565, 858, 2974, 6279, 1, 1638, 2974, 16, 17, 18, 39917, 442, 2567, 537, 5420, 448, 1638, 4894, 197, 630, 9859, 1669, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBQSerialization(t *testing.T) { for _, tc := range []struct { desc string opts *BoundedQuantilesOptions }{ {"default options", &BoundedQuantilesOptions{ Epsilon: ln3, Lower: 0, Upper: 1, Delta: 0, MaxContributionsPerPartition: 1, }}, {"non-default options", &BoundedQuantilesOptions{ Lower: -100, Upper: 555, Epsilon: ln3, Delta: 1e-5, MaxPartitionsContributed: 5, MaxContributionsPerPartition: 6, TreeHeight: 3, BranchingFactor: 12, Noise: noise.Gaussian(), }}, } { bq, err := NewBoundedQuantiles(tc.opts) if err != nil { t.Fatalf("Couldn't initialize bq: %v", err) } bqUnchanged, err := NewBoundedQuantiles(tc.opts) if err != nil { t.Fatalf("Couldn't initialize bqUnchanged: %v", err) } // Insert same elements to both. bq.Add(1.0) bqUnchanged.Add(1.0) bq.Add(2.0) bqUnchanged.Add(2.0) bytes, err := encode(bq) if err != nil { t.Fatalf("encode(BoundedQuantiles) error: %v", err) } bqUnmarshalled := new(BoundedQuantiles) if err := decode(bqUnmarshalled, bytes); err != nil { t.Fatalf("decode(BoundedQuantiles) error: %v", err) } // Check that encoding -> decoding is the identity function. if !cmp.Equal(bqUnchanged, bqUnmarshalled, cmp.Comparer(compareBoundedQuantiles)) { t.Errorf("decode(encode(_)): when %s got %+v, want %+v", tc.desc, bqUnmarshalled, bq) } if bq.state != serialized { t.Errorf("BoundedQuantiles should have its state set to Serialized, got %v , want Serialized", bq.state) } } }
explode_data.jsonl/80645
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 912 }
[ 2830, 3393, 33, 48, 35865, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 41653, 914, 198, 197, 64734, 353, 33, 13082, 44220, 3658, 3798, 198, 197, 59403, 197, 197, 4913, 2258, 2606, 497, 609, 33, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestConsulListRecords(t *testing.T) { consulReset() _, err := cache.ListRecords() cache.ResetRecords(&nanoBoth) _, err2 := cache.ListRecords() if err != nil || err2 != nil { t.Errorf("Failed to list records in consul cacher - %v%v", err, err2) } }
explode_data.jsonl/61013
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 106 }
[ 2830, 3393, 15220, 360, 852, 25876, 1155, 353, 8840, 836, 8, 341, 197, 6254, 360, 14828, 741, 197, 6878, 1848, 1669, 6500, 5814, 25876, 741, 52680, 36660, 25876, 2099, 93625, 20629, 340, 197, 6878, 1848, 17, 1669, 6500, 5814, 25876, 741...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestLoopBackManager_overrideDeviceWithSizeChanging(t *testing.T) { var mockexec = &mocks.GoMockExecutor{} var manager = NewLoopBackManager(mockexec, "", "", logger) // Initialize manager with local default settings manager.updateDevicesFromConfig() assert.Equal(t, defaultNumberOfDevices, len(manager.devices)) indexOfDeviceToOverride := 0 newSize := "200Mi" fakeDevicePath := "/dev/loop0" fakeFileName := "loopback.img" manager.devices[indexOfDeviceToOverride].devicePath = fakeDevicePath manager.devices[indexOfDeviceToOverride].fileName = fakeFileName mockexec.On("RunCmd", fmt.Sprintf(detachLoopBackDeviceCmdTmpl, fakeDevicePath)). Return("", "", nil) mockexec.On("RunCmd", fmt.Sprintf(deleteFileCmdTmpl, fakeFileName)). Return("", "", nil) // Change size of device to override devices := []*LoopBackDevice{ {SerialNumber: manager.devices[indexOfDeviceToOverride].SerialNumber, Size: newSize}, } manager.overrideDevicesFromNodeConfig(defaultNumberOfDevices, devices) // resizing is not supported assert.Equal(t, defaultSize, manager.devices[indexOfDeviceToOverride].Size) }
explode_data.jsonl/73568
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 363 }
[ 2830, 3393, 14620, 3707, 2043, 48576, 6985, 2354, 1695, 59046, 1155, 353, 8840, 836, 8, 341, 2405, 7860, 11748, 284, 609, 16712, 82, 67131, 11571, 25255, 16094, 2405, 6645, 284, 1532, 14620, 3707, 2043, 30389, 11748, 11, 7342, 7342, 5925,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestQuery_PathRequired(t *testing.T) { for _, cmd := range []string{"read", "range"} { c := StartCapture() exit = func(r int) {} os.Args = []string{ "dosa", "query", cmd, "--scope", "foo", "--namePrefix", "foo", "StrKey:eq:foo", } main() assert.Contains(t, c.stop(true), "--path' was not specified") } }
explode_data.jsonl/70149
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 155 }
[ 2830, 3393, 2859, 66388, 8164, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 5439, 1669, 2088, 3056, 917, 4913, 878, 497, 330, 9669, 9207, 341, 197, 1444, 1669, 5145, 27429, 741, 197, 14519, 284, 2915, 2601, 526, 8, 5613, 197, 25078, 5101...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseDSN(t *testing.T) { var dsn = "dspo:12345678@(localhost:3307)/dbname" settings, err := pygrator.ParseDSN(dsn) if err != nil { t.Fatal(err) } t.Logf("%+v", settings) }
explode_data.jsonl/72941
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 91 }
[ 2830, 3393, 14463, 5936, 45, 1155, 353, 8840, 836, 8, 341, 2405, 294, 9613, 284, 330, 67, 82859, 25, 16, 17, 18, 19, 20, 21, 22, 23, 59404, 8301, 25, 18, 18, 15, 22, 5620, 35265, 698, 62930, 11, 1848, 1669, 4510, 901, 850, 8937,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestWindowsDesktopService(t *testing.T) { t.Parallel() for _, test := range []struct { desc string mutate func(fc *FileConfig) expectError require.ErrorAssertionFunc }{ { desc: "NOK - invalid static host addr", expectError: require.Error, mutate: func(fc *FileConfig) { fc.WindowsDesktop.Hosts = []string{"badscheme://foo:1:2"} }, }, { desc: "NOK - invalid host label key", expectError: require.Error, mutate: func(fc *FileConfig) { fc.WindowsDesktop.HostLabels = []WindowsHostLabelRule{ {Match: ".*", Labels: map[string]string{"invalid label key": "value"}}, } }, }, { desc: "NOK - invalid host label regexp", expectError: require.Error, mutate: func(fc *FileConfig) { fc.WindowsDesktop.HostLabels = []WindowsHostLabelRule{ {Match: "g(-z]+ invalid regex", Labels: map[string]string{"key": "value"}}, } }, }, { desc: "OK - valid config", expectError: require.NoError, mutate: func(fc *FileConfig) { fc.WindowsDesktop.EnabledFlag = "yes" fc.WindowsDesktop.ListenAddress = "0.0.0.0:3028" fc.WindowsDesktop.Hosts = []string{"127.0.0.1:3389"} fc.WindowsDesktop.HostLabels = []WindowsHostLabelRule{ {Match: ".*", Labels: map[string]string{"key": "value"}}, } }, }, } { t.Run(test.desc, func(t *testing.T) { fc := &FileConfig{} test.mutate(fc) cfg := &service.Config{} err := applyWindowsDesktopConfig(fc, cfg) test.expectError(t, err) }) } }
explode_data.jsonl/47176
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 682 }
[ 2830, 3393, 13164, 23597, 1860, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 2023, 8358, 1273, 1669, 2088, 3056, 1235, 341, 197, 41653, 286, 914, 198, 197, 2109, 332, 349, 414, 2915, 77684, 353, 1703, 2648, 340, 197, 24952, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSessionReceiver_acceptSessionButAlreadyLocked(t *testing.T) { client, cleanup, queueName := setupLiveTest(t, &admin.QueueProperties{ RequiresSession: to.Ptr(true), }) defer cleanup() ctx := context.Background() receiver, err := client.AcceptSessionForQueue(ctx, queueName, "session-1", nil) require.NoError(t, err) require.NotNil(t, receiver) // You can address a session by name which makes lock contention possible (unlike // messages where the lock token is not a predefined value) receiver, err = client.AcceptSessionForQueue(ctx, queueName, "session-1", nil) require.EqualValues(t, internal.RecoveryKindFatal, internal.GetRecoveryKind(err)) require.Nil(t, receiver) }
explode_data.jsonl/70560
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 219 }
[ 2830, 3393, 5283, 25436, 35728, 5283, 3983, 38370, 49010, 1155, 353, 8840, 836, 8, 341, 25291, 11, 21290, 11, 7177, 675, 1669, 6505, 20324, 2271, 1155, 11, 609, 2882, 50251, 7903, 515, 197, 197, 46961, 5283, 25, 311, 94989, 3715, 1326, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRule_Validate(t *testing.T) { if err := (&Rule{}).Validate(); err == nil { t.Errorf("exptected empty name error") } if err := (&Rule{Alert: "alert"}).Validate(); err == nil { t.Errorf("exptected empty expr error") } if err := (&Rule{Alert: "alert", Expr: "test>0"}).Validate(); err != nil { t.Errorf("exptected valid rule; got %s", err) } }
explode_data.jsonl/45653
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 142 }
[ 2830, 3393, 11337, 62, 17926, 1155, 353, 8840, 836, 8, 341, 743, 1848, 1669, 15899, 11337, 6257, 568, 17926, 2129, 1848, 621, 2092, 341, 197, 3244, 13080, 445, 327, 417, 1569, 4287, 829, 1465, 1138, 197, 532, 743, 1848, 1669, 15899, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestReaderSuccessReturnsCorrectBodyWithZeroFunctions(t *testing.T) { m := metrics.MetricOptions{} c := &testServiceApiClient{ serviceListServices: []swarm.Service{}, serviceListError: nil, } handler := handlers.MakeFunctionReader(m, c) w := httptest.NewRecorder() r := &http.Request{} handler.ServeHTTP(w, r) expected := "[]" if w.Body.String() != expected { t.Errorf("handler returned wrong body: got %v want %v", w.Body.String(), expected) } }
explode_data.jsonl/63779
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 173 }
[ 2830, 3393, 5062, 7188, 16446, 33092, 5444, 2354, 17999, 25207, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 16734, 1321, 16340, 3798, 16094, 1444, 1669, 609, 1944, 1860, 56584, 515, 197, 52934, 852, 11025, 25, 3056, 2280, 2178, 13860, 38837...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestK8SServiceEnvVarAddsEnvVarsToPod(t *testing.T) { t.Parallel() deployment := renderK8SServiceDeploymentWithSetValues( t, map[string]string{ "envVars.DB_HOST": "mysql.default.svc.cluster.local", "envVars.DB_PORT": "3306", }, ) // Verify that there is only one container and that the environments section is populated. renderedPodContainers := deployment.Spec.Template.Spec.Containers require.Equal(t, len(renderedPodContainers), 1) appContainer := renderedPodContainers[0] environments := appContainer.Env assert.Equal(t, len(environments), 2) renderedEnvVar := map[string]string{} for _, env := range environments { renderedEnvVar[env.Name] = env.Value } assert.Equal(t, renderedEnvVar["DB_HOST"], "mysql.default.svc.cluster.local") assert.Equal(t, renderedEnvVar["DB_PORT"], "3306") }
explode_data.jsonl/59753
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 307 }
[ 2830, 3393, 42, 23, 1220, 1017, 14359, 3962, 72111, 14359, 28305, 1249, 23527, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 82213, 1669, 3141, 42, 23, 1220, 1017, 75286, 2354, 1649, 6227, 1006, 197, 3244, 345, 197, 19567,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSetConfigFromFlagsSetsImage(t *testing.T) { d, err := getTestDriver() if assert.NoError(t, err) { assert.Equal(t, "MY_TEST_IMAGE", d.deviceConfig.Image) } }
explode_data.jsonl/37427
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 71 }
[ 2830, 3393, 1649, 2648, 3830, 9195, 30175, 1906, 1155, 353, 8840, 836, 8, 341, 2698, 11, 1848, 1669, 633, 2271, 11349, 2822, 743, 2060, 35699, 1155, 11, 1848, 8, 341, 197, 6948, 12808, 1155, 11, 330, 19159, 11641, 19121, 497, 294, 183...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestNewService(t *testing.T) { cluster := &clusterv1.Cluster{ ObjectMeta: metav1.ObjectMeta{Name: "test-cluster"}, } client := fake.NewFakeClientWithScheme(scheme.Scheme, cluster) s, err := scope.NewClusterScope(context.Background(), scope.ClusterScopeParams{ AzureClients: scope.AzureClients{ Authorizer: autorest.NullAuthorizer{}, }, Client: client, Cluster: cluster, AzureCluster: &infrav1.AzureCluster{ Spec: infrav1.AzureClusterSpec{ Location: "test-location", ResourceGroup: "my-rg", SubscriptionID: "123", NetworkSpec: infrav1.NetworkSpec{ Vnet: infrav1.VnetSpec{Name: "my-vnet", ResourceGroup: "my-rg"}, }, }, }, }) g := NewGomegaWithT(t) g.Expect(err).ToNot(HaveOccurred()) mps, err := scope.NewMachinePoolScope(scope.MachinePoolScopeParams{ Client: client, Logger: s.Logger, MachinePool: new(clusterv1exp.MachinePool), AzureMachinePool: new(infrav1exp.AzureMachinePool), ClusterScope: s, }) g.Expect(err).ToNot(HaveOccurred()) actual := NewService(mps, resourceskus.NewStaticCache(nil)) g.Expect(actual).ToNot(BeNil()) }
explode_data.jsonl/17830
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 485 }
[ 2830, 3393, 3564, 1860, 1155, 353, 8840, 836, 8, 341, 197, 18855, 1669, 609, 564, 590, 648, 16, 72883, 515, 197, 23816, 12175, 25, 77520, 16, 80222, 63121, 25, 330, 1944, 93208, 7115, 197, 532, 25291, 1669, 12418, 7121, 52317, 2959, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestJetStream_UnsubscribeDeleteNoPermissions(t *testing.T) { conf := createConfFile(t, []byte(` listen: 127.0.0.1:-1 jetstream: {max_mem_store: 64GB, max_file_store: 10TB} no_auth_user: guest accounts: { JS: { # User should not be able to delete consumer. jetstream: enabled users: [ {user: guest, password: "", permissions: { publish: { deny: "$JS.API.CONSUMER.DELETE.>" } }}] } } `)) defer os.Remove(conf) s, _ := RunServerWithConfig(conf) defer s.Shutdown() if config := s.JetStreamConfig(); config != nil { defer os.RemoveAll(config.StoreDir) } errCh := make(chan error, 2) nc, err := nats.Connect(s.ClientURL(), nats.ErrorHandler(func(_ *nats.Conn, _ *nats.Subscription, err error) { errCh <- err })) if err != nil { t.Fatalf("Unexpected error: %v", err) } defer nc.Close() js, err := nc.JetStream(nats.MaxWait(time.Second)) if err != nil { t.Fatal(err) } js.AddStream(&nats.StreamConfig{ Name: "foo", }) js.Publish("foo", []byte("test")) sub, err := js.SubscribeSync("foo") if err != nil { t.Fatal(err) } _, err = sub.NextMsg(2 * time.Second) if err != nil { t.Fatal(err) } // Should fail due to lack of permissions. err = sub.Unsubscribe() if err == nil { t.Errorf("Unexpected success attempting to delete consumer without permissions") } select { case <-time.After(2 * time.Second): t.Error("Timeout waiting for permissions error") case err = <-errCh: if !strings.Contains(err.Error(), `Permissions Violation for Publish to "$JS.API.CONSUMER.DELETE`) { t.Error("Expected permissions violation error") } } }
explode_data.jsonl/29177
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 654 }
[ 2830, 3393, 35641, 3027, 40687, 9384, 6435, 2753, 23851, 1155, 353, 8840, 836, 8, 341, 67850, 1669, 1855, 15578, 1703, 1155, 11, 3056, 3782, 61528, 197, 14440, 268, 25, 220, 16, 17, 22, 13, 15, 13, 15, 13, 16, 10944, 16, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPRRejectUnmarshalJSON(t *testing.T) { policyJSONUmarshallerTests{ newDest: func() json.Unmarshaler { return &prReject{} }, newValidObject: func() (interface{}, error) { return NewPRReject(), nil }, otherJSONParser: func(validJSON []byte) (interface{}, error) { return newPolicyRequirementFromJSON(validJSON) }, invalidObjects: []mSI{ // Missing "type" field {}, // Wrong "type" field {"type": 1}, {"type": "this is invalid"}, // Extra fields { "type": string(prTypeReject), "unknown": "foo", }, }, duplicateFields: []string{"type"}, }.run(t) }
explode_data.jsonl/36501
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 255 }
[ 2830, 3393, 6480, 78413, 1806, 27121, 5370, 1155, 353, 8840, 836, 8, 341, 3223, 8018, 5370, 52, 52541, 18200, 515, 197, 8638, 34830, 25, 2915, 368, 2951, 38097, 261, 314, 470, 609, 649, 78413, 6257, 1153, 197, 8638, 4088, 1190, 25, 29...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestActiveFailedConnectionAttemptIncrement(t *testing.T) { c := context.New(t, defaultMTU) defer c.Cleanup() stats := c.Stack().Stats() ep, err := c.Stack().NewEndpoint(tcp.ProtocolNumber, ipv4.ProtocolNumber, &c.WQ) if err != nil { t.Fatalf("NewEndpoint failed: %s", err) } c.EP = ep want := stats.TCP.FailedConnectionAttempts.Value() + 1 { err := c.EP.Connect(tcpip.FullAddress{NIC: 2, Addr: context.TestAddr, Port: context.TestPort}) if d := cmp.Diff(&tcpip.ErrNoRoute{}, err); d != "" { t.Errorf("c.EP.Connect(...) mismatch (-want +got):\n%s", d) } } if got := stats.TCP.FailedConnectionAttempts.Value(); got != want { t.Errorf("got stats.TCP.FailedConnectionAttempts.Value() = %d, want = %d", got, want) } if got := c.EP.Stats().(*tcp.Stats).FailedConnectionAttempts.Value(); got != want { t.Errorf("got EP stats FailedConnectionAttempts = %d, want = %d", got, want) } }
explode_data.jsonl/75919
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 361 }
[ 2830, 3393, 5728, 9408, 4526, 47052, 38311, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 2266, 7121, 1155, 11, 1638, 8505, 52, 340, 16867, 272, 727, 60639, 2822, 79659, 1669, 272, 58646, 1005, 16635, 741, 96626, 11, 1848, 1669, 272, 58646,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestEncoder(t *testing.T) { for _, p := range pairs { bb := &bytes.Buffer{} encoder := NewEncoder(StdEncoding, bb) encoder.Write([]byte(p.decoded)) encoder.Close() testEqual(t, "Encode(%q) = %q, want %q", p.decoded, bb.String(), p.encoded) } }
explode_data.jsonl/35050
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 119 }
[ 2830, 3393, 19921, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 281, 1669, 2088, 13530, 341, 197, 2233, 65, 1669, 609, 9651, 22622, 16094, 197, 197, 27008, 1669, 1532, 19921, 7, 22748, 14690, 11, 16520, 340, 197, 197, 27008, 4073, 10556, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestServiceInspect(t *testing.T) { expectedURL := "/services/service_id" client := &Client{ client: newMockClient(func(req *http.Request) (*http.Response, error) { if !strings.HasPrefix(req.URL.Path, expectedURL) { return nil, fmt.Errorf("Expected URL '%s', got '%s'", expectedURL, req.URL) } content, err := json.Marshal(swarm.Service{ ID: "service_id", }) if err != nil { return nil, err } return &http.Response{ StatusCode: http.StatusOK, Body: ioutil.NopCloser(bytes.NewReader(content)), }, nil }), } serviceInspect, _, err := client.ServiceInspectWithRaw(context.Background(), "service_id") if err != nil { t.Fatal(err) } if serviceInspect.ID != "service_id" { t.Fatalf("expected `service_id`, got %s", serviceInspect.ID) } }
explode_data.jsonl/82218
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 333 }
[ 2830, 3393, 1860, 58533, 1155, 353, 8840, 836, 8, 341, 42400, 3144, 1669, 3521, 12779, 34186, 842, 698, 25291, 1669, 609, 2959, 515, 197, 25291, 25, 501, 11571, 2959, 18552, 6881, 353, 1254, 9659, 8, 4609, 1254, 12574, 11, 1465, 8, 34...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestEventChannel_OutputFormat(t *testing.T) { toGzip := func(payload string) []byte { var buf bytes.Buffer zw := gzip.NewWriter(&buf) if _, err := zw.Write([]byte(payload)); err != nil { assert.Fail(t, err.Error()) } if err := zw.Close(); err != nil { assert.Fail(t, err.Error()) } return buf.Bytes() } data := make([]byte, 0) send := func(payload []byte) error { data = append(data, payload...) return nil } eventChannel := NewEventChannel(send, 15000, 10, 2*time.Minute) eventChannel.Push([]byte("one")) eventChannel.flush() eventChannel.Push([]byte("two")) eventChannel.Push([]byte("three")) eventChannel.Close() time.Sleep(10 * time.Millisecond) expected := append(toGzip("one"), toGzip("twothree")...) assert.Equal(t, expected, data) }
explode_data.jsonl/43922
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 304 }
[ 2830, 3393, 1556, 9629, 65062, 4061, 1155, 353, 8840, 836, 8, 1476, 31709, 38, 9964, 1669, 2915, 26772, 914, 8, 3056, 3782, 341, 197, 2405, 6607, 5820, 22622, 198, 197, 20832, 86, 1669, 57795, 7121, 6492, 2099, 5909, 692, 197, 743, 83...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_Hoverfly_SetModeWithArguments_CanSetModeToModify(t *testing.T) { RegisterTestingT(t) unit := NewHoverflyWithConfiguration(&Configuration{}) Expect(unit.SetModeWithArguments( v2.ModeView{ Mode: "modify", })).To(BeNil()) Expect(unit.Cfg.Mode).To(Equal("modify")) }
explode_data.jsonl/45397
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 2039, 1975, 21642, 14812, 3636, 2354, 19139, 920, 276, 1649, 3636, 1249, 44427, 1155, 353, 8840, 836, 8, 341, 79096, 16451, 51, 1155, 692, 81189, 1669, 1532, 34379, 21642, 2354, 7688, 2099, 7688, 6257, 692, 35911, 24144, 4202,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestErrorsPostWhenEmptyBody(t *testing.T) { r := NewRouter() bh := &bodyHandler{} bh.handle = func(Context) error { assert.FailNow(t, "called Handle") return nil } r.Post("/", func(Context) (Handler, error) { return bh, nil }) rec := httptest.NewRecorder() req := httptest.NewRequest("POST", "/", nil) req.Header.Set("content-type", contentTypeJSON) r.ServeHTTP(rec, req) resp := rec.Result() assert.Equal(t, http.StatusBadRequest, resp.StatusCode) body, err := ioutil.ReadAll(resp.Body) require.NoError(t, err) assert.Contains(t, string(body), "EOF") }
explode_data.jsonl/18960
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 228 }
[ 2830, 3393, 13877, 4133, 4498, 3522, 5444, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1532, 9523, 2822, 2233, 71, 1669, 609, 2599, 3050, 16094, 2233, 71, 10132, 284, 2915, 14001, 8, 1465, 341, 197, 6948, 57243, 7039, 1155, 11, 330, 438...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRetryPolicyIsNotRetriable(t *testing.T) { theErr := fatalError{s: "it's dead Jim"} srv, close := mock.NewServer() defer close() srv.AppendResponse(mock.WithStatusCode(http.StatusRequestTimeout)) srv.AppendError(theErr) pl := NewPipeline(srv, NewRetryPolicy(testRetryOptions())) req, err := NewRequest(context.Background(), http.MethodGet, srv.URL()) if err != nil { t.Fatalf("unexpected error: %v", err) } _, err = pl.Do(req) if err == nil { t.Fatal("unexpected nil error") } if !errors.Is(err, theErr) { t.Fatalf("unexpected error type: got %v wanted %v", err, theErr) } if r := srv.Requests(); r != 2 { t.Fatalf("wrong retry count, got %d expected %d", r, 3) } }
explode_data.jsonl/24386
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 284 }
[ 2830, 3393, 51560, 13825, 3872, 2623, 12020, 461, 480, 1155, 353, 8840, 836, 8, 341, 32088, 7747, 1669, 57863, 84386, 25, 330, 275, 594, 5593, 11387, 16707, 1903, 10553, 11, 3265, 1669, 7860, 7121, 5475, 741, 16867, 3265, 741, 1903, 105...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestStruct_Basic(t *testing.T) { testItem := newPerson() // Pass by value - not ok err := Scrub(testItem, []string{}) assert.Error(t, err) // Pass by reference - ok err = Scrub(&testItem, []string{}) assert.NoError(t, err) assert.NoError(t, err) assert.Equal(t, int32(0), testItem.Height) assert.NotNil(t, testItem.Father) assert.NotNil(t, testItem.Children) assert.Nil(t, testItem.FullName) assert.Nil(t, testItem.Mother) assert.Nil(t, testItem.FullName) assert.True(t, len(testItem.Children) > 0) assert.Equal(t, int32(0), testItem.Children[0].Height) assert.Equal(t, int32(0), testItem.Children[1].Height) assert.Nil(t, testItem.Children[0].FullName) assert.Nil(t, testItem.Children[0].FullName) }
explode_data.jsonl/9422
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 295 }
[ 2830, 3393, 9422, 1668, 5971, 1155, 353, 8840, 836, 8, 1476, 18185, 1234, 1669, 501, 10680, 2822, 197, 322, 9970, 553, 897, 481, 537, 5394, 198, 9859, 1669, 32134, 392, 8623, 1234, 11, 3056, 917, 37790, 6948, 6141, 1155, 11, 1848, 692...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFindFitSomeError(t *testing.T) { predicates := map[string]algorithmpredicates.FitPredicate{"true": truePredicate, "matches": matchesPredicate} nodes := makeNodeList([]string{"3", "2", "1"}) scheduler := makeScheduler(predicates, nodes) pod := &v1.Pod{ObjectMeta: metav1.ObjectMeta{Name: "1", UID: types.UID("1")}} _, predicateMap, err := scheduler.findNodesThatFit(pod, nodes) if err != nil { t.Errorf("unexpected error: %v", err) } if len(predicateMap) != (len(nodes) - 1) { t.Errorf("unexpected failed predicate map: %v", predicateMap) } for _, node := range nodes { if node.Name == pod.Name { continue } t.Run(node.Name, func(t *testing.T) { failures, found := predicateMap[node.Name] if !found { t.Errorf("failed to find node in %v", predicateMap) } if len(failures) != 1 || failures[0] != algorithmpredicates.ErrFakePredicate { t.Errorf("unexpected failures: %v", failures) } }) } }
explode_data.jsonl/6746
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 373 }
[ 2830, 3393, 9885, 23346, 8373, 1454, 1155, 353, 8840, 836, 8, 341, 3223, 1151, 24821, 1669, 2415, 14032, 60, 278, 6063, 1307, 1151, 24821, 991, 275, 36329, 4913, 1866, 788, 830, 36329, 11, 330, 19914, 788, 9071, 36329, 532, 79756, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestStream_writeRTValue(t *testing.T) { t.Run("test write failed", func(t *testing.T) { assert := base.NewAssert(t) testRange := getTestRange(streamPosBody, 3*streamBlockSize, 80, 80, 61) for _, testData := range streamTestWriteCollections["rtValue"] { for _, i := range testRange { testRuntime.thread.Reset() stream := NewStream() stream.SetWritePos(i) assert(stream.writeRTValue(testData[0].(RTValue))). Equals(testData[1]) if testData[1].(string) != StreamWriteOK { assert(stream.GetWritePos()).Equals(i) } stream.Release() } } }) t.Run("test write ok", func(t *testing.T) { assert := base.NewAssert(t) testRange := getTestRange(streamPosBody, 3*streamBlockSize, 80, 80, 61) for key := range streamTestSuccessCollections { for _, testData := range streamTestSuccessCollections[key] { for _, i := range testRange { testRuntime.thread.Reset() stream := NewStream() stream.SetWritePos(i) stream.SetReadPos(i) assert(stream.Write(testData[0])).Equals(StreamWriteOK) rtValue, _ := stream.ReadRTValue(testRuntime) stream.SetWritePos(i) assert(stream.writeRTValue(rtValue)).Equals(StreamWriteOK) assert(stream.GetWritePos()). Equals(len(testData[1].([]byte)) + i) stream.SetReadPos(i) assert(stream.Read()).Equals(testData[0], nil) stream.Release() } } } }) }
explode_data.jsonl/21215
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 602 }
[ 2830, 3393, 3027, 9165, 5350, 1130, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 1944, 3270, 4641, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 6948, 1669, 2331, 7121, 8534, 1155, 340, 197, 18185, 6046, 1669, 633, 2271, 6046, 2057...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestValidate(t *testing.T) { testCases := []struct { name string version Version err string }{{ name: "empty", version: Version(""), err: `invalid schema version "": Invalid Semantic Version`, }, { name: "invalid", version: Version("not-semver"), err: `invalid schema version "not-semver": Invalid Semantic Version`, }, { name: "valid", version: Version("v1.0.0"), }} for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { err := tc.version.Validate() if tc.err != "" { assert.EqualError(t, err, tc.err) } else { assert.NoError(t, err) } }) } }
explode_data.jsonl/75540
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 282 }
[ 2830, 3393, 17926, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 74954, 6079, 198, 197, 9859, 257, 914, 198, 197, 15170, 515, 197, 11609, 25, 262, 330, 3194, 756, 197, 74954, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSocketConfig(t *testing.T) { createParser := func() func(string) (proto.Message, error) { return func(s string) (proto.Message, error) { config := new(SocketConfig) if err := json.Unmarshal([]byte(s), config); err != nil { return nil, err } return config.Build() } } runMultiTestCase(t, []TestCase{ { Input: `{ "mark": 1, "tcpFastOpen": true }`, Parser: createParser(), Output: &internet.SocketConfig{ Mark: 1, Tfo: internet.SocketConfig_Enable, }, }, }) }
explode_data.jsonl/15413
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 229 }
[ 2830, 3393, 10286, 2648, 1155, 353, 8840, 836, 8, 341, 39263, 6570, 1669, 2915, 368, 2915, 3609, 8, 320, 15110, 8472, 11, 1465, 8, 341, 197, 853, 2915, 1141, 914, 8, 320, 15110, 8472, 11, 1465, 8, 341, 298, 25873, 1669, 501, 73066, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestContentLengthZero(t *testing.T) { ts := httptest.NewServer(HandlerFunc(func(rw ResponseWriter, req *Request) {})) defer ts.Close() for _, version := range []string{"HTTP/1.0", "HTTP/1.1"} { conn, err := net.Dial("tcp", ts.Listener.Addr().String()) if err != nil { t.Fatalf("error dialing: %v", err) } _, err = fmt.Fprintf(conn, "GET / %v\r\nConnection: keep-alive\r\nHost: foo\r\n\r\n", version) if err != nil { t.Fatalf("error writing: %v", err) } req, _ := NewRequest("GET", "/", nil) res, err := ReadResponse(bufio.NewReader(conn), req) if err != nil { t.Fatalf("error reading response: %v", err) } if te := res.TransferEncoding; len(te) > 0 { t.Errorf("For version %q, Transfer-Encoding = %q; want none", version, te) } if cl := res.ContentLength; cl != 0 { t.Errorf("For version %q, Content-Length = %v; want 0", version, cl) } conn.Close() } }
explode_data.jsonl/22451
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 379 }
[ 2830, 3393, 2762, 4373, 17999, 1155, 353, 8840, 836, 8, 341, 57441, 1669, 54320, 70334, 7121, 5475, 7, 3050, 9626, 18552, 2601, 86, 5949, 6492, 11, 4232, 353, 1900, 8, 4687, 1171, 16867, 10591, 10421, 2822, 2023, 8358, 2319, 1669, 2088,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCleanup007(t *testing.T) { var inWord string var got, want string inWord = `` want = `` got = cleanupWord(inWord) if got != want { t.Errorf("cleanupWord(inWord) == %q, want %q", got, want) } }
explode_data.jsonl/59192
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 86 }
[ 2830, 3393, 67335, 15, 15, 22, 1155, 353, 8840, 836, 8, 341, 2405, 304, 10879, 914, 198, 2405, 2684, 11, 1366, 914, 271, 17430, 10879, 284, 1565, 3989, 50780, 284, 1565, 3989, 3174, 354, 284, 21290, 10879, 5900, 10879, 692, 743, 2684,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPostStoreGetSingle(t *testing.T) { Setup() o1 := &model.Post{} o1.ChannelId = model.NewId() o1.UserId = model.NewId() o1.Message = "zz" + model.NewId() + "b" o1 = (<-store.Post().Save(o1)).Data.(*model.Post) if r1 := <-store.Post().GetSingle(o1.Id); r1.Err != nil { t.Fatal(r1.Err) } else { if r1.Data.(*model.Post).CreateAt != o1.CreateAt { t.Fatal("invalid returned post") } } if err := (<-store.Post().GetSingle("123")).Err; err == nil { t.Fatal("Missing id should have failed") } }
explode_data.jsonl/55939
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 231 }
[ 2830, 3393, 4133, 6093, 1949, 10888, 1155, 353, 8840, 836, 8, 341, 197, 21821, 2822, 22229, 16, 1669, 609, 2528, 23442, 16094, 22229, 16, 38716, 764, 284, 1614, 7121, 764, 741, 22229, 16, 37478, 284, 1614, 7121, 764, 741, 22229, 16, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestSigReleaseMasterBlockingOrInformingJobsShouldUseFastBuilds(t *testing.T) { jobs := allStaticJobs() for _, job := range jobs { dashboards, ok := job.Annotations["testgrid-dashboards"] if !ok || !strings.Contains(dashboards, "sig-release-master-blocking") || !strings.Contains(dashboards, "sig-release-master-informing") { continue } extract := "" for _, arg := range job.Spec.Containers[0].Args { if strings.HasPrefix(arg, "--extract=") { extract = strings.TrimPrefix(arg, "--extract=") if extract == "ci/latest" { t.Errorf("%s: release-master-blocking e2e jobs must use --extract=ci/latest-fast, found --extract=ci/latest instead", job.Name) } } } } }
explode_data.jsonl/55589
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 274 }
[ 2830, 3393, 47246, 16077, 18041, 48266, 2195, 37891, 287, 40667, 14996, 10253, 32174, 11066, 82, 1155, 353, 8840, 836, 8, 341, 12428, 5481, 1669, 678, 11690, 40667, 741, 2023, 8358, 2618, 1669, 2088, 6887, 341, 197, 2698, 988, 19270, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestCreateVolume(t *testing.T) { testCases := []struct { name string testFunc func(t *testing.T) }{ { name: "valid request", testFunc: func(t *testing.T) { d, err := NewFakeDriver(t) require.NoError(t, err) stdCapacityRangetest := &csi.CapacityRange{ RequiredBytes: pkg.GiBToBytes(10), LimitBytes: pkg.GiBToBytes(15), } req := &csi.CreateVolumeRequest{ Name: testVolumeName, VolumeCapabilities: stdVolumeCapabilities, CapacityRange: stdCapacityRangetest, } disk := NewFakeDisk(stdCapacityRangetest) testCloud := d.GetCloud() mockDisksClient := testCloud.DisksClient.(*mockdiskclient.MockInterface) //disk := getTestDisk(test.diskName) mockDisksClient.EXPECT().CreateOrUpdate(gomock.Any(), testCloud.ResourceGroup, testVolumeName, gomock.Any()).Return(nil).AnyTimes() mockDisksClient.EXPECT().Get(gomock.Any(), testCloud.ResourceGroup, testVolumeName).Return(disk, nil).AnyTimes() _, err = d.CreateVolume(context.Background(), req) require.NoError(t, err) }, }, } for _, tc := range testCases { t.Run(tc.name, tc.testFunc) } }
explode_data.jsonl/49369
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 504 }
[ 2830, 3393, 4021, 18902, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 18185, 9626, 2915, 1155, 353, 8840, 836, 340, 197, 59403, 197, 197, 515, 298, 11609, 25, 330, 1891, 1681, 756,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRMIDComparison(t *testing.T) { a := "rmid1:0d77c-b0b2e66aece-00000000-00000001" b := "rmid1:0d77c-b0b2e66aece-00000000-00000002" rmidA, err := ReplicationGroupMessageIDFromString(a) if err != nil { t.Error(err) } if rmidA == nil { t.Error("Expected non nil RMID, got nil") } rmidB, err := ReplicationGroupMessageIDFromString(b) if err != nil { t.Error(err) } if rmidB == nil { t.Error("Expected non nil RMID, got nil") } result, err := rmidA.Compare(rmidB) if err != nil { t.Error(err) } if result != -1 { t.Errorf("Expected result to be -1, got %d", result) } result, err = rmidB.Compare(rmidA) if err != nil { t.Error(err) } if result != 1 { t.Errorf("Expected result to be 1, got %d", result) } }
explode_data.jsonl/72226
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 338 }
[ 2830, 3393, 23652, 915, 33487, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 330, 8719, 307, 16, 25, 15, 67, 22, 22, 66, 1455, 15, 65, 17, 68, 21, 21, 5918, 346, 12, 15, 15, 15, 15, 15, 15, 15, 15, 12, 15, 15, 15, 15, 15, 15,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestTrie_VisitReturnError(t *testing.T) { trie := NewTrie() data := []testData{ {"Pepa", 0, success}, {"Pepa Zdepa", 1, success}, {"Pepa Kuchar", 2, success}, {"Honza", 3, success}, {"Jenik", 4, success}, } for _, v := range data { t.Logf("INSERT prefix=%v, item=%v, success=%v", v.key, v.value, v.retVal) if ok := trie.Insert([]byte(v.key), v.value); ok != v.retVal { t.Fatalf("Unexpected return value, expected=%v, got=%v", v.retVal, ok) } } someErr := errors.New("Something exploded") if err := trie.Visit(func(prefix Prefix, item Item) error { t.Logf("VISITING prefix=%q, item=%v", prefix, item) if item.(int) == 3 { return someErr } if item.(int) != 3 { t.Errorf("Unexpected prefix encountered, %q", prefix) } return nil }); err != nil && err != someErr { t.Fatal(err) } }
explode_data.jsonl/2365
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 364 }
[ 2830, 3393, 51, 7231, 2334, 285, 275, 5598, 1454, 1155, 353, 8840, 836, 8, 341, 197, 8927, 1669, 1532, 51, 7231, 2822, 8924, 1669, 3056, 1944, 1043, 515, 197, 197, 4913, 47, 747, 64, 497, 220, 15, 11, 2393, 1583, 197, 197, 4913, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPipeline(t *testing.T) { tests := []struct { name string steps []pipeline.PipelineStep data []map[string]interface{} }{ { name: "Empty", steps: []pipeline.PipelineStep{}, data: []map[string]interface{}{}, }, { name: "SingleButEmpty", steps: []pipeline.PipelineStep{ pipeline.ProvideStep{ Data: []map[string]interface{}{}, }, }, data: []map[string]interface{}{}, }, { name: "Single", steps: []pipeline.PipelineStep{ pipeline.ProvideStep{ Data: []map[string]interface{}{ {"Data": "A"}, }, }, }, data: []map[string]interface{}{ {"Data": "A"}, }, }, } for i, test := range tests { t.Run(fmt.Sprintf("[%d]%s", i, test.name), func(t *testing.T) { pipe := pipeline.NewPipeline(test.steps, pipeline.NextEOF) VerifyNext(t, pipe.Next, test.data) require.NoError(t, pipe.Close()) }) } }
explode_data.jsonl/29442
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 433 }
[ 2830, 3393, 34656, 1155, 353, 8840, 836, 8, 1476, 78216, 1669, 3056, 1235, 341, 197, 11609, 220, 914, 198, 197, 18388, 7124, 3056, 51258, 1069, 8790, 8304, 198, 197, 8924, 220, 3056, 2186, 14032, 31344, 16094, 197, 59403, 197, 197, 515,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBreakerTripping(t *testing.T) { cb := NewBreaker() if cb.Tripped() { t.Fatal("expected breaker to not be tripped") } cb.Trip() if !cb.Tripped() { t.Fatal("expected breaker to be tripped") } cb.Reset() if cb.Tripped() { t.Fatal("expected breaker to have been reset") } }
explode_data.jsonl/60793
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 123 }
[ 2830, 3393, 22524, 261, 21884, 10732, 1155, 353, 8840, 836, 8, 341, 63810, 1669, 1532, 22524, 261, 2822, 743, 9858, 836, 461, 6924, 368, 341, 197, 3244, 26133, 445, 7325, 66767, 311, 537, 387, 2406, 6924, 1138, 197, 630, 63810, 836, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAddManyAgents(t *testing.T) { count := 1000 for i := 0; i < count; i ++ { agentList, err := agents.SharedAgentList() if err != nil { t.Fatal(err) } agent := agents.NewAgentConfig() agent.On = false agent.Name = "Web" + fmt.Sprintf("%d", i) agent.Host = "192.168.0." + fmt.Sprintf("%d", i) agent.AllowAll = true agent.Allow = []string{} agent.Key = stringutil.Rand(32) //agent.GroupIds = []string{"2kMMzOcWWPFrhdaM"} err = agent.Save() if err != nil { t.Fatal(err) } agentList.AddAgent(agent.Filename()) err = agentList.Save() if err != nil { t.Fatal(err) } } }
explode_data.jsonl/32405
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 279 }
[ 2830, 3393, 2212, 8441, 91804, 1155, 353, 8840, 836, 8, 341, 18032, 1669, 220, 16, 15, 15, 15, 198, 2023, 600, 1669, 220, 15, 26, 600, 366, 1760, 26, 600, 3443, 341, 197, 197, 8092, 852, 11, 1848, 1669, 13009, 32969, 16810, 852, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestStruct(t *testing.T) { type str struct { IntVal int `json:"int"` StrVal string `json:"str"` FloatVal float64 `json:"float"` BoolVal bool `json:"bool"` private string } var s str m := Map{"int": 4, "str": "now's the time", "float": 3.14159, "bool": true, "private": "Somewhere over the rainbow"} mverr := m.Struct(&s) if mverr != nil { t.Fatal("mverr:", mverr.Error()) } fmt.Printf("Struct, m: %#v\n", m) fmt.Printf("Struct, s: %#v\n", s) }
explode_data.jsonl/59168
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 222 }
[ 2830, 3393, 9422, 1155, 353, 8840, 836, 8, 341, 13158, 607, 2036, 341, 197, 57152, 2208, 256, 526, 257, 1565, 2236, 2974, 396, 8805, 197, 197, 2580, 2208, 256, 914, 220, 1565, 2236, 2974, 495, 8805, 197, 197, 5442, 2208, 2224, 21, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetFileContentTypeKO(t *testing.T) { file := `../testdata/files/test.txt` _, err := GetFileContentType(file) if err == nil { t.Log("Error -> ", err) t.Fail() } t.Log(err) }
explode_data.jsonl/24004
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 82 }
[ 2830, 3393, 1949, 1703, 29504, 54947, 1155, 353, 8840, 836, 8, 341, 17661, 1669, 1565, 1244, 92425, 33220, 12697, 3909, 3989, 197, 6878, 1848, 1669, 2126, 1703, 29504, 4866, 692, 743, 1848, 621, 2092, 341, 197, 3244, 5247, 445, 1454, 14...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestURLString(t *testing.T) { for _, tt := range urltests { u, err := Parse(tt.in) if err != nil { t.Errorf("Parse(%q) returned error %s", tt.in, err) continue } expected := tt.in if tt.roundtrip != "" { expected = tt.roundtrip } s := u.String() if s != expected { t.Errorf("Parse(%q).String() == %q (expected %q)", tt.in, s, expected) } } for _, tt := range stringURLTests { if got := tt.url.String(); got != tt.want { t.Errorf("%+v.String() = %q; want %q", tt.url, got, tt.want) } } }
explode_data.jsonl/71720
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 255 }
[ 2830, 3393, 3144, 703, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 2515, 23841, 341, 197, 10676, 11, 1848, 1669, 14775, 47152, 1858, 340, 197, 743, 1848, 961, 2092, 341, 298, 3244, 13080, 445, 14463, 15238, 80, 8, 592...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestMessageCommandArgumentsWithArguments(t *testing.T) { message := tgbotapi.Message{Text: "/command with arguments"} message.Entities = &[]tgbotapi.MessageEntity{{Type: "bot_command", Offset: 0, Length: 8}} if message.CommandArguments() != "with arguments" { t.Fail() } }
explode_data.jsonl/25803
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 94 }
[ 2830, 3393, 2052, 4062, 19139, 2354, 19139, 1155, 353, 8840, 836, 8, 341, 24753, 1669, 53188, 6331, 2068, 8472, 90, 1178, 25, 3521, 5631, 448, 5977, 16707, 24753, 23793, 284, 609, 1294, 41428, 6331, 2068, 8472, 3030, 2979, 929, 25, 330,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestValidateIdentityRemappingPrefix(t *testing.T) { for _, s := range []string{ "localhost", "example.com", "example.com:80", "example.com/repo", "example.com/ns1/ns2/ns3/repo.with.dots-dashes_underscores", "example.com:80/ns1/ns2/ns3/repo.with.dots-dashes_underscores", // NOTE: These values are invalid, do not actually work, and may be rejected by this function // and in NewPRMRemapIdentity in the future. "shortname", "ns/shortname", } { err := validateIdentityRemappingPrefix(s) assert.NoError(t, err, s) } for _, s := range []string{ "", "repo_with_underscores", // Not a valid DNS name, at least per docker/reference "example.com/", "example.com/UPPERCASEISINVALID", "example.com/repo/", "example.com/repo:tag", "example.com/repo@sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", "example.com/repo:tag@sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", } { err := validateIdentityRemappingPrefix(s) assert.Error(t, err, s) } }
explode_data.jsonl/36521
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 440 }
[ 2830, 3393, 17926, 18558, 6590, 3629, 14335, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 274, 1669, 2088, 3056, 917, 515, 197, 197, 1, 8301, 756, 197, 197, 1, 8687, 905, 756, 197, 197, 1, 8687, 905, 25, 23, 15, 756, 197, 197, 1, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGC_TrackDeletedManifests(t *testing.T) { require.NoError(t, testutil.TruncateAllTables(suite.db)) // disable other triggers that also insert on gc_blob_review_queue so that they don't interfere with this test enable, err := testutil.GCTrackConfigurationBlobsTrigger.Disable(suite.db) require.NoError(t, err) defer enable() enable, err = testutil.GCTrackBlobUploadsTrigger.Disable(suite.db) require.NoError(t, err) defer enable() // create repo r := randomRepository(t) rs := datastore.NewRepositoryStore(suite.db) r, err = rs.CreateByPath(suite.ctx, r.Path) require.NoError(t, err) // create config blob bs := datastore.NewBlobStore(suite.db) b := randomBlob(t) err = bs.Create(suite.ctx, b) require.NoError(t, err) err = rs.LinkBlob(suite.ctx, r, b.Digest) require.NoError(t, err) // create manifest ms := datastore.NewManifestStore(suite.db) m := randomManifest(t, r, b) err = ms.Create(suite.ctx, m) require.NoError(t, err) // confirm that the review queue remains empty brs := datastore.NewGCBlobTaskStore(suite.db) count, err := brs.Count(suite.ctx) require.NoError(t, err) require.Zero(t, count) // delete manifest ok, err := rs.DeleteManifest(suite.ctx, r, m.Digest) require.NoError(t, err) require.True(t, ok) // check that a corresponding task was created for the config blob and scheduled for 1 day ahead tt, err := brs.FindAll(suite.ctx) require.NoError(t, err) require.Equal(t, 1, len(tt)) require.Equal(t, 0, tt[0].ReviewCount) require.Equal(t, b.Digest, tt[0].Digest) // ignore the few milliseconds between blob creation and queueing for review in response to the manifest delete require.WithinDuration(t, tt[0].ReviewAfter, b.CreatedAt.Add(24*time.Hour), 200*time.Millisecond) }
explode_data.jsonl/48564
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 656 }
[ 2830, 3393, 22863, 21038, 473, 26039, 38495, 82, 1155, 353, 8840, 836, 8, 341, 17957, 35699, 1155, 11, 1273, 1314, 8240, 26900, 2403, 21670, 89516, 7076, 4390, 197, 322, 11156, 1008, 30754, 429, 1083, 5656, 389, 22122, 45908, 38661, 10841...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewResource(t *testing.T) { r, err := NewResource(resType, resName, resGroup, resVersion, resKind, resNsd) if err != nil { t.Fatalf("failed creating new resource: %v", err) } if n := r.Name(); n != resName { t.Errorf("expected name: %s, got: %s", resName, n) } if g := r.Group(); g != resGroup { t.Errorf("expected group: %s, got: %s", resGroup, g) } if v := r.Version(); v != resVersion { t.Errorf("expected version: %s, got: %s", resVersion, v) } if k := r.Kind(); k != resKind { t.Errorf("expected kind: %s, got: %s", resKind, k) } if n := r.Namespaced(); n != resNsd { t.Errorf("expected namespaced: %v, got: %v", resNsd, n) } }
explode_data.jsonl/38888
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 279 }
[ 2830, 3393, 3564, 4783, 1155, 353, 8840, 836, 8, 341, 7000, 11, 1848, 1669, 1532, 4783, 4590, 929, 11, 592, 675, 11, 592, 2808, 11, 592, 5637, 11, 592, 10629, 11, 592, 45, 13446, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestHamming(t *testing.T) { for _, row := range hamming_testdata { res := Hamming(row[0], row[1]) expected, err := strconv.Atoi(row[2]) if err != nil { t.Error("bad row in test data") } if res != expected { t.Errorf("Hamming(%q, %q) => %d, expected %d", row[0], row[1], res, expected) } } }
explode_data.jsonl/58064
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 141 }
[ 2830, 3393, 39941, 5311, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 2802, 1669, 2088, 13515, 5311, 4452, 691, 341, 197, 10202, 1669, 9582, 5311, 7835, 58, 15, 1125, 2802, 58, 16, 2546, 197, 42400, 11, 1848, 1669, 33317, 67107, 7835, 58...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCapability_CompatibleWithNetCap(t *testing.T) { assert := assert.New(t) // sanity checks bcast := NewCapabilities(nil, nil) orch := NewCapabilities(nil, nil) assert.Nil(bcast.bitstring) assert.Nil(orch.bitstring) assert.True(bcast.CompatibleWith(orch.ToNetCapabilities())) assert.True(orch.CompatibleWith(bcast.ToNetCapabilities())) // orchestrator is not compatible with broadcaster - empty cap set bcast = NewCapabilities([]Capability{1}, nil) assert.Empty(orch.bitstring) assert.False(bcast.CompatibleWith(orch.ToNetCapabilities())) assert.True(orch.CompatibleWith(bcast.ToNetCapabilities())) // sanity check; not commutative // orchestrator is not compatible with broadcaster - different cap set orch = NewCapabilities([]Capability{2}, nil) assert.False(bcast.CompatibleWith(orch.ToNetCapabilities())) // B / O are equivalent orch = NewCapabilities([]Capability{1}, nil) assert.Equal(bcast.bitstring, orch.bitstring) assert.True(bcast.CompatibleWith(orch.ToNetCapabilities())) assert.True(orch.CompatibleWith(bcast.ToNetCapabilities())) // O supports a superset of B's capabilities orch = NewCapabilities([]Capability{1, 2}, nil) assert.True(bcast.CompatibleWith(orch.ToNetCapabilities())) // check a mandatory capability - no match mandatory := []Capability{3} orch = NewCapabilities([]Capability{1, 2, 3}, mandatory) assert.False(bcast.CompatibleWith(orch.ToNetCapabilities())) assert.False(orch.CompatibleWith(bcast.ToNetCapabilities())) // check a mandatory capability - match only the single mandatory capability assert.Equal(NewCapabilityString(mandatory), orch.mandatories) bcast = NewCapabilities(mandatory, nil) assert.True(bcast.CompatibleWith(orch.ToNetCapabilities())) // check a mandatory capability - match with B's multiple capabilities bcast = NewCapabilities([]Capability{1, 3}, nil) assert.True(bcast.CompatibleWith(orch.ToNetCapabilities())) // broadcaster "mandatory" capabilities have no effect during regular match orch = NewCapabilities(nil, nil) bcast = NewCapabilities(nil, []Capability{1}) assert.True(bcast.CompatibleWith(orch.ToNetCapabilities())) }
explode_data.jsonl/74080
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 699 }
[ 2830, 3393, 63746, 16946, 37079, 2354, 6954, 12903, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 197, 322, 46842, 12341, 198, 2233, 3829, 1669, 1532, 55315, 27907, 11, 2092, 340, 197, 21584, 1669, 1532, 55315, 27907, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewHeader(t *testing.T) { user := mspmocks.NewMockSigningIdentity("test", "1234") ctx := mocks.NewMockContext(user) creator, err := ctx.Serialize() require.NoError(t, err) txh, err := NewHeader(ctx, testChannel) require.NoError(t, err) require.NotEmptyf(t, txh.nonce, "Expecting nonce") require.Equal(t, creator, txh.creator) require.NotEmpty(t, txh.id) creator = []byte("someothercreator") nonce := []byte("123456") txh, err = NewHeader(ctx, testChannel, fab.WithCreator(creator), fab.WithNonce(nonce)) require.NoError(t, err) require.Equal(t, nonce, txh.nonce) require.Equal(t, creator, txh.creator) require.NotEmpty(t, txh.id) }
explode_data.jsonl/26394
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 265 }
[ 2830, 3393, 3564, 4047, 1155, 353, 8840, 836, 8, 341, 19060, 1669, 296, 2154, 16712, 82, 7121, 11571, 93358, 18558, 445, 1944, 497, 330, 16, 17, 18, 19, 1138, 20985, 1669, 68909, 7121, 11571, 1972, 4277, 692, 197, 32398, 11, 1848, 166...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestColocationPuzzleInfoCanonicalDigest(t *testing.T) { puzzle := ColocationPuzzleInfo{ Goal: []byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15}, Rounds: 123, StartOffset: 456, StartRange: 789, } h := sha512.New384() puzzle.canonicalDigest(h) digest := h.Sum(nil) expected := []byte{0x18, 0x42, 0x72, 0x3a, 0x37, 0x19, 0x2b, 0x1, 0xd7, 0xca, 0xf2, 0x6c, 0x6, 0x30, 0xee, 0x2e, 0xe1, 0x6f, 0xce, 0x5a, 0x12, 0xa2, 0x14, 0x48, 0x84, 0xd1, 0x7d, 0x34, 0x8c, 0x50, 0x2b, 0xce, 0x7f, 0x1, 0x47, 0x4c, 0x67, 0xe4, 0xb3, 0x37, 0x5c, 0xa3, 0x3c, 0xc0, 0x9c, 0xf9, 0xd3, 0x10} assert.Equal(t, expected, digest) }
explode_data.jsonl/45222
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 375 }
[ 2830, 3393, 6127, 2276, 47, 14945, 1731, 70914, 45217, 1155, 353, 8840, 836, 8, 341, 3223, 14945, 1669, 4254, 2276, 47, 14945, 1731, 515, 197, 9600, 78, 278, 25, 286, 3056, 3782, 90, 15, 11, 220, 16, 11, 220, 17, 11, 220, 18, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSecureCancelHandshake(t *testing.T) { // t.Skip("Skipping in favor of another test") ctx, cancel := context.WithCancel(context.Background()) c1, c2, p1, p2 := setupSingleConn(t, ctx) done := make(chan error) go secureHandshake(t, ctx, p1.PrivKey, c1, done) <-time.After(time.Millisecond) cancel() // cancel ctx go secureHandshake(t, ctx, p2.PrivKey, c2, done) for i := 0; i < 2; i++ { if err := <-done; err == nil { t.Error("cancel should've errored out") } } }
explode_data.jsonl/62879
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 200 }
[ 2830, 3393, 49813, 9269, 2314, 29661, 1155, 353, 8840, 836, 8, 341, 197, 322, 259, 57776, 445, 85945, 304, 4694, 315, 2441, 1273, 5130, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 1444, 16, 11, 272, 17, 11, 281, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFetchIntoObjectPool_CollectLogStatistics(t *testing.T) { cfg, repo, _ := testcfg.BuildWithRepo(t) testcfg.BuildGitalyHooks(t, cfg) locator := config.NewLocator(cfg) logger, hook := test.NewNullLogger() serverSocketPath := runObjectPoolServer(t, cfg, locator, logger) conn, err := grpc.Dial(serverSocketPath, grpc.WithInsecure()) require.NoError(t, err) t.Cleanup(func() { testhelper.MustClose(t, conn) }) client := gitalypb.NewObjectPoolServiceClient(conn) ctx, cancel := testhelper.Context() defer cancel() ctx = ctxlogrus.ToContext(ctx, log.WithField("test", "logging")) pool := initObjectPool(t, cfg, cfg.Storages[0]) req := &gitalypb.FetchIntoObjectPoolRequest{ ObjectPool: pool.ToProto(), Origin: repo, Repack: true, } _, err = client.FetchIntoObjectPool(ctx, req) require.NoError(t, err) const key = "count_objects" for _, logEntry := range hook.AllEntries() { if stats, ok := logEntry.Data[key]; ok { require.IsType(t, map[string]interface{}{}, stats) var keys []string for key := range stats.(map[string]interface{}) { keys = append(keys, key) } require.ElementsMatch(t, []string{ "count", "garbage", "in-pack", "packs", "prune-packable", "size", "size-garbage", "size-pack", }, keys) return } } require.FailNow(t, "no info about statistics") }
explode_data.jsonl/62526
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 561 }
[ 2830, 3393, 20714, 26591, 1190, 10551, 920, 24605, 2201, 38599, 1155, 353, 8840, 836, 8, 341, 50286, 11, 15867, 11, 716, 1669, 1273, 14072, 25212, 2354, 25243, 1155, 692, 18185, 14072, 25212, 38, 2174, 88, 67769, 1155, 11, 13286, 692, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetDeployment(t *testing.T) { testCases := []*deployTestData{ defaultDeployTestData, { testName: "Bad Environment", spaceName: "mySpace", appName: "myApp", envName: "doesNotExist", cassetteName: "getdeployment", shouldFail: true, }, { // Verifies that a newer scaled down deployment is favoured over // an older one with replicas and an even newer deployment that failed testName: "Scaled Down", spaceName: "mySpace", appName: "myApp", envName: "run", expectVersion: "1.0.2", expectPodStatus: [][]string{}, expectPodsTotal: 0, expectConsoleURL: "http://console.myCluster/console/project/my-run", expectLogURL: "http://console.myCluster/console/project/my-run/browse/rc/myDeploy-2?tab=logs", expectAppURL: "http://myDeploy-my-run.example.com", // Contains RCs in ascending deployment version: // 1. Visible 2. Scaled-down "active" 3. Failed cassetteName: "getdeployment-scaled-down", }, { // Tests handling of a deployment config with missing space label // FIXME When our workaround is no longer needed, we should expect // an error testName: "No Space Label", spaceName: "mySpace", appName: "myApp", envName: "run", expectVersion: "1.0.2", expectPodStatus: [][]string{ {"Running", "2"}, }, expectPodsTotal: 2, expectPodsQuotaCpucores: 0.976, expectPodsQuotaMemory: 524288000, expectConsoleURL: "http://console.myCluster/console/project/my-run", expectLogURL: "http://console.myCluster/console/project/my-run/browse/rc/myDeploy-1?tab=logs", expectAppURL: "http://myDeploy-my-run.example.com", cassetteName: "getdeployment-nospace", }, { // Tests handling of a deployment config with a space label // different from the argument passed to GetDeployment // FIXME When our workaround is no longer needed, we should expect // an error testName: "Wrong Space Label", spaceName: "myWrongSpace", appName: "myApp", envName: "run", expectVersion: "1.0.2", expectPodStatus: [][]string{ {"Running", "2"}, }, expectPodsTotal: 2, expectPodsQuotaCpucores: 0.976, expectPodsQuotaMemory: 524288000, expectConsoleURL: "http://console.myCluster/console/project/my-run", expectLogURL: "http://console.myCluster/console/project/my-run/browse/rc/myDeploy-1?tab=logs", expectAppURL: "http://myDeploy-my-run.example.com", cassetteName: "getdeployment-wrongspace", }, { testName: "Build List Error", spaceName: "mySpace", appName: "myApp", envName: "run", cassetteName: "getdeployment-build-error", shouldFail: true, errorChecker: errors.IsBadParameterError, }, { testName: "DC Get Error", spaceName: "mySpace", appName: "myApp", envName: "run", cassetteName: "getdeployment-dc-error", shouldFail: true, errorChecker: errors.IsBadParameterError, }, { testName: "RC List Error", spaceName: "mySpace", appName: "myApp", envName: "run", cassetteName: "getdeployment-rc-error", shouldFail: true, errorChecker: errors.IsBadParameterError, }, { testName: "Pod List Error", spaceName: "mySpace", appName: "myApp", envName: "run", cassetteName: "getdeployment-pod-error", shouldFail: true, errorChecker: errors.IsBadParameterError, }, { testName: "Service List Error", spaceName: "mySpace", appName: "myApp", envName: "run", cassetteName: "getdeployment-svc-error", shouldFail: true, errorChecker: errors.IsBadParameterError, }, { testName: "Route List Error", spaceName: "mySpace", appName: "myApp", envName: "run", cassetteName: "getdeployment-route-error", shouldFail: true, errorChecker: errors.IsBadParameterError, }, } for _, testCase := range testCases { t.Run(testCase.testName, func(t *testing.T) { r, err := recorder.New(pathToTestJSON + testCase.cassetteName) require.NoError(t, err, "Failed to open cassette") defer r.Stop() fixture := &testFixture{} kc := getDefaultKubeClient(fixture, r.Transport, t) dep, err := kc.GetDeployment(testCase.spaceName, testCase.appName, testCase.envName) if testCase.shouldFail { require.Error(t, err, "Expected an error") if testCase.errorChecker != nil { matches, _ := testCase.errorChecker(err) require.True(t, matches, "Error or cause must be the expected type") } } else { require.NoError(t, err, "Unexpected error occurred") verifyDeployment(dep, testCase, t) } }) } }
explode_data.jsonl/41272
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2196 }
[ 2830, 3393, 1949, 75286, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 29838, 35794, 83920, 515, 197, 11940, 69464, 83920, 345, 197, 197, 515, 298, 18185, 675, 25, 257, 330, 17082, 11586, 756, 298, 1903, 1306, 675, 25, 262, 330, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestScrapeLoopAppendGracefullyIfAmendOrOutOfOrderOrOutOfBounds(t *testing.T) { app := &errorAppender{} sl := newScrapeLoop(context.Background(), nil, nil, nil, nopMutator, nopMutator, func() storage.Appender { return app }, nil, 0, true, ) now := time.Unix(1, 0) total, added, seriesAdded, err := sl.append([]byte("out_of_order 1\namend 1\nnormal 1\nout_of_bounds 1\n"), "", now) if err != nil { t.Fatalf("Unexpected append error: %s", err) } want := []sample{ { metric: labels.FromStrings(model.MetricNameLabel, "normal"), t: timestamp.FromTime(now), v: 1, }, } if !reflect.DeepEqual(want, app.result) { t.Fatalf("Appended samples not as expected. Wanted: %+v Got: %+v", want, app.result) } testutil.Equals(t, 4, total) testutil.Equals(t, 1, added) testutil.Equals(t, 1, seriesAdded) }
explode_data.jsonl/56134
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 369 }
[ 2830, 3393, 3326, 19842, 14620, 23877, 86543, 3641, 2679, 6091, 408, 2195, 31731, 4431, 2195, 61349, 1155, 353, 8840, 836, 8, 341, 28236, 1669, 609, 841, 2164, 1659, 31483, 78626, 1669, 501, 3326, 19842, 14620, 5378, 19047, 3148, 197, 841...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMapProxy_PutIfAbsentWithNilValue(t *testing.T) { _, err := mp.PutIfAbsent("test", nil) AssertErrorNotNil(t, err, "putIfAbsent did not return an error for nil value") mp.Clear() }
explode_data.jsonl/57012
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 73 }
[ 2830, 3393, 2227, 16219, 1088, 332, 2679, 80251, 2354, 19064, 1130, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 10490, 39825, 2679, 80251, 445, 1944, 497, 2092, 340, 18017, 1454, 96144, 1155, 11, 1848, 11, 330, 628, 2679, 80251...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1