text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestMapProxy_PutWithNilValue(t *testing.T) { testKey := "testingKey" _, err := mp.Put(testKey, nil) AssertErrorNotNil(t, err, "put did not return an error for nil value") mp.Clear() }
explode_data.jsonl/56957
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 74 }
[ 2830, 3393, 2227, 16219, 1088, 332, 2354, 19064, 1130, 1155, 353, 8840, 836, 8, 341, 18185, 1592, 1669, 330, 8840, 1592, 698, 197, 6878, 1848, 1669, 10490, 39825, 8623, 1592, 11, 2092, 340, 18017, 1454, 96144, 1155, 11, 1848, 11, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestParseMalformedJson(t *testing.T) { input := `{"invalid: json"}` reader := strings.NewReader(input) result, err := Parse(reader) if err == nil { t.Errorf("Unexpected error: %s\n", err) } if result != nil { t.Errorf("Unexpected result: %v\n", result) } }
explode_data.jsonl/24718
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 110 }
[ 2830, 3393, 14463, 29600, 10155, 5014, 1155, 353, 8840, 836, 8, 341, 22427, 1669, 1565, 4913, 11808, 25, 2951, 9207, 3989, 61477, 1669, 9069, 68587, 5384, 340, 9559, 11, 1848, 1669, 14775, 21987, 340, 743, 1848, 621, 2092, 341, 197, 324...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDeriveKey(t *testing.T) { crypto := gopaque.CryptoDefault key := crypto.NewKey(nil) //keyBytes := []byte("password") // It will produce and error on key lenght // Just enc some value then decrypt for now plain := []byte("foo") // Derive authEncrypt key from key // Key should be at least 32 bytes keyBytes, err := key.MarshalBinary() assertNoErr(t, err) assert(t, len(keyBytes) >= 32, "Key has not sufficient bytes. It has %v", len(keyBytes)) authEncKey := crypto.NewKeyFromReader(bytes.NewReader(keyBytes)) encBytes, err := crypto.AuthEncrypt(authEncKey, plain) assertNoErr(t, err) // Derive authDecrypt key from key authDecKey := crypto.NewKeyFromReader(bytes.NewReader(keyBytes)) decBytes, err := crypto.AuthDecrypt(authDecKey, encBytes) assertNoErr(t, err) assert(t, bytes.Equal(decBytes, plain), "Mismatch, got %v wanted %v", decBytes, plain) // Derive authDecrypt key from a random key // It should produce an error while decrypting keyRand := crypto.NewKey(nil) keyRandBytes, err := keyRand.MarshalBinary() assertNoErr(t, err) assert(t, len(keyBytes) >= 32, "Key has not sufficient bytes. It has %v", len(keyRandBytes)) authDecKey = crypto.NewKeyFromReader(bytes.NewReader(keyRandBytes)) decBytes, err = crypto.AuthDecrypt(authDecKey, encBytes) assert(t, false == bytes.Equal(decBytes, plain), "Match. A successful decryption occurred\nKey 1 derived from %v\nKey 2 derived from %v", keyBytes, keyRandBytes) }
explode_data.jsonl/68728
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 494 }
[ 2830, 3393, 22171, 533, 1592, 1155, 353, 8840, 836, 8, 341, 1444, 9444, 1669, 342, 95480, 727, 9444, 3675, 198, 23634, 1669, 19028, 7121, 1592, 27907, 340, 197, 322, 792, 7078, 1669, 3056, 3782, 445, 3833, 899, 442, 1084, 686, 8193, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNew(t *testing.T) { testingFilePath := "/my/dir/vault.json" fs = afero.NewMemMapFs() // inmemory vault v := New() assert.NotNil(t, v) // open/create new vault v, err := Create(testingFilePath, "secret") assert.NoError(t, err) assert.NotNil(t, v) ok, _ := afero.Exists(fs, testingFilePath) assert.True(t, ok) // Reopen again v, err = Open(testingFilePath, "secret") assert.NoError(t, err) assert.NotNil(t, v) privKey, pubKey, _ := testing2.ReadTestKey("../../testdata/key-1.json") addr, _ := address.NewAddress("foobar!") acc := &AccountInfo{ Address: addr, Name: "Foo Bar", Settings: nil, Keys: []KeyPair{ { KeyPair: bmcrypto.KeyPair{ Generator: "", FingerPrint: pubKey.Fingerprint(), PrivKey: *privKey, PubKey: *pubKey, }, Active: true, }, }, Pow: &proofofwork.ProofOfWork{}, RoutingID: "12345678", } v.AddAccount(*acc) // Write to disk err = v.Persist() assert.NoError(t, err) // Check if the backup exists ok, _ = afero.Exists(fs, "/my/dir/vault.json.backup") assert.True(t, ok) // Open vault with wrong password v, err = Open(testingFilePath, "incorrect password") assert.Errorf(t, err, "incorrect password") assert.Nil(t, v) // Open vault with correct password v, err = Open(testingFilePath, "secret") assert.NoError(t, err) assert.Len(t, v.Store.Accounts, 1) }
explode_data.jsonl/50661
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 608 }
[ 2830, 3393, 3564, 1155, 353, 8840, 836, 8, 341, 197, 8840, 19090, 1669, 3521, 2408, 88226, 5457, 945, 4323, 1837, 53584, 284, 264, 802, 78, 7121, 18816, 2227, 48300, 2822, 197, 322, 304, 17269, 34584, 198, 5195, 1669, 1532, 741, 6948, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddPKCS7Padding(t *testing.T) { type args struct { s string blockSize int } tests := []struct { name string args args want string }{ {"Normal Padding", args{"YELLOW SUBMARINE", 20}, "YELLOW SUBMARINE\x04\x04\x04\x04"}, {"Normal Padding", args{"RED", 3}, "RED\x03\x03\x03"}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := AddPKCS7Padding(tt.args.s, tt.args.blockSize); got != tt.want { t.Errorf("AddPKCS7Padding() = %v, want %v", []byte(got), []byte(tt.want)) } }) } }
explode_data.jsonl/66879
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 254 }
[ 2830, 3393, 2212, 22242, 6412, 22, 21616, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 1903, 260, 914, 198, 197, 47996, 1695, 526, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIngestMemtableOverlaps(t *testing.T) { comparers := []Comparer{ {Name: "default", Compare: DefaultComparer.Compare}, {Name: "reverse", Compare: func(a, b []byte) int { return DefaultComparer.Compare(b, a) }}, } m := make(map[string]*Comparer) for i := range comparers { c := &comparers[i] m[c.Name] = c } for _, comparer := range comparers { t.Run(comparer.Name, func(t *testing.T) { var mem *memTable parseMeta := func(s string) *fileMetadata { parts := strings.Split(s, "-") meta := &fileMetadata{} if len(parts) != 2 { t.Fatalf("malformed table spec: %s", s) } if strings.Contains(parts[0], ".") { if !strings.Contains(parts[1], ".") { t.Fatalf("malformed table spec: %s", s) } meta.Smallest = base.ParseInternalKey(parts[0]) meta.Largest = base.ParseInternalKey(parts[1]) } else { meta.Smallest = InternalKey{UserKey: []byte(parts[0])} meta.Largest = InternalKey{UserKey: []byte(parts[1])} } if mem.cmp(meta.Smallest.UserKey, meta.Largest.UserKey) > 0 { meta.Smallest, meta.Largest = meta.Largest, meta.Smallest } return meta } datadriven.RunTest(t, "testdata/ingest_memtable_overlaps", func(d *datadriven.TestData) string { switch d.Cmd { case "define": b := newBatch(nil) if err := runBatchDefineCmd(d, b); err != nil { return err.Error() } opts := &Options{ Comparer: &comparer, } opts.EnsureDefaults() if len(d.CmdArgs) > 1 { return fmt.Sprintf("%s expects at most 1 argument", d.Cmd) } if len(d.CmdArgs) == 1 { opts.Comparer = m[d.CmdArgs[0].String()] if opts.Comparer == nil { return fmt.Sprintf("%s unknown comparer: %s", d.Cmd, d.CmdArgs[0].String()) } } mem = newMemTable(memTableOptions{Options: opts}) if err := mem.apply(b, 0); err != nil { return err.Error() } return "" case "overlaps": var buf bytes.Buffer for _, data := range strings.Split(d.Input, "\n") { var meta []*fileMetadata for _, part := range strings.Fields(data) { meta = append(meta, parseMeta(part)) } fmt.Fprintf(&buf, "%t\n", ingestMemtableOverlaps(mem.cmp, mem, meta)) } return buf.String() default: return fmt.Sprintf("unknown command: %s", d.Cmd) } }) }) } }
explode_data.jsonl/40258
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1116 }
[ 2830, 3393, 641, 6969, 18816, 2005, 1918, 89722, 1155, 353, 8840, 836, 8, 341, 32810, 1732, 388, 1669, 3056, 31942, 515, 197, 197, 63121, 25, 330, 2258, 497, 23768, 25, 7899, 31942, 32377, 1583, 197, 197, 63121, 25, 330, 25903, 497, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStore(t *testing.T) { _, storage, backing, si, destroyFunc := newStorage(t) defer destroyFunc() if err := si.Create(context.TODO(), key(), validNewRangeAllocation(), nil, 0); err != nil { t.Fatalf("unexpected error: %v", err) } if err := storage.Allocate(net.ParseIP("192.168.1.2")); err != nil { t.Fatal(err) } ok, err := backing.Allocate(1) if err != nil { t.Fatal(err) } if ok { t.Fatal("Expected allocation to fail") } if err := storage.Allocate(net.ParseIP("192.168.1.2")); err != ipallocator.ErrAllocated { t.Fatal(err) } }
explode_data.jsonl/354
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 237 }
[ 2830, 3393, 6093, 1155, 353, 8840, 836, 8, 341, 197, 6878, 5819, 11, 24668, 11, 4403, 11, 6921, 9626, 1669, 501, 5793, 1155, 340, 16867, 6921, 9626, 741, 743, 1848, 1669, 4403, 7251, 5378, 90988, 1507, 1376, 1507, 2697, 3564, 6046, 78...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestCommandHandler_Immediate(t *testing.T) { inner := &mocks.CommandHandler{} m := NewMiddleware() h := eh.UseCommandHandlerMiddleware(inner, m) cmd := mocks.Command{ ID: uuid.New(), Content: "content", } if err := h.HandleCommand(context.Background(), cmd); err != nil { t.Error("there should be no error:", err) } if !reflect.DeepEqual(inner.Commands, []eh.Command{cmd}) { t.Error("the command should have been handled:", inner.Commands) } }
explode_data.jsonl/24290
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 173 }
[ 2830, 3393, 4062, 3050, 62, 52734, 1155, 353, 8840, 836, 8, 341, 197, 4382, 1669, 609, 16712, 82, 12714, 3050, 16094, 2109, 1669, 1532, 24684, 741, 9598, 1669, 35246, 9046, 4062, 3050, 24684, 68603, 11, 296, 340, 25920, 1669, 68909, 127...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRegister(t *testing.T) { mockPGUserRepo := mocks.PGUserRepoMock{} t.Run("success", func(t *testing.T) { mockUser := mocks.SetMockUser() mockPGUserRepo.On( "Create", mock.Anything, mock.Anything, ).Return(mockUser, nil).Once() userServiceMock := NewUserService(&mockPGUserRepo) ctx := context.Background() user, err := userServiceMock.Register( ctx, &models.User{Username: "name", Password: "password12"}, ) assert.NoError(t, err) assert.Exactly(t, mockUser, user) }) t.Run("error-failed", func(t *testing.T) { mockPGUserRepo.On( "Create", mock.Anything, mock.Anything, ).Return(models.User{}, errors.New("Unexpexted Error")).Once() userServiceMock := NewUserService(&mockPGUserRepo) user, err := userServiceMock.Register( context.TODO(), &models.User{Username: "name", Password: "password12"}, ) assert.Error(t, err) assert.Exactly(t, models.User{}, user) }) }
explode_data.jsonl/51799
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 397 }
[ 2830, 3393, 8690, 1155, 353, 8840, 836, 8, 341, 77333, 11383, 1474, 25243, 1669, 68909, 1069, 38, 1474, 25243, 11571, 31483, 3244, 16708, 445, 5630, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 77333, 1474, 1669, 68909, 4202, 11571, 14...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCheckRunnerPodCompletedStatus(t *testing.T) { tests := map[string]struct { isErr bool engine chaosTypes.EngineInfo }{ "Test Positive-1": { engine: chaosTypes.EngineInfo{ Instance: &v1alpha1.ChaosEngine{ ObjectMeta: metav1.ObjectMeta{ Name: "engine-runner-p1", Namespace: "default", }, Spec: v1alpha1.ChaosEngineSpec{ Appinfo: v1alpha1.ApplicationParams{ Applabel: "app=nginx", AppKind: "deployment", }, EngineState: v1alpha1.EngineStateActive, AnnotationCheck: "false", Components: v1alpha1.ComponentParams{ Runner: v1alpha1.RunnerInfo{ Image: "fake-runner-image", }, }, Experiments: []v1alpha1.ExperimentList{ { Name: "exp-1", }, }, }, Status: v1alpha1.ChaosEngineStatus{ EngineStatus: v1alpha1.EngineStatusCompleted, }, }, }, isErr: false, }, "Test Positive-2": { engine: chaosTypes.EngineInfo{ Instance: &v1alpha1.ChaosEngine{ ObjectMeta: metav1.ObjectMeta{ Name: "engine-runner-p2", Namespace: "default", }, Spec: v1alpha1.ChaosEngineSpec{ Appinfo: v1alpha1.ApplicationParams{ Applabel: "app=nginx", AppKind: "deployment", }, EngineState: v1alpha1.EngineStateActive, AnnotationCheck: "false", Components: v1alpha1.ComponentParams{ Runner: v1alpha1.RunnerInfo{ Image: "fake-runner-image", }, }, Experiments: []v1alpha1.ExperimentList{ { Name: "exp-1", }, }, }, Status: v1alpha1.ChaosEngineStatus{ EngineStatus: v1alpha1.EngineStatusCompleted, }, }, }, isErr: false, }, } for name, mock := range tests { t.Run(name, func(t *testing.T) { r := CreateFakeClient(t) err := r.client.Create(context.TODO(), mock.engine.Instance) if err != nil { fmt.Printf("Unable to create engine: %v", err) } val := r.checkRunnerContainerCompletedStatus(&mock.engine) if mock.isErr && val == false { t.Fatalf("Test %q failed: expected error not to be nil", name) } if !mock.isErr && val == true { t.Fatalf("Test %q failed: expected error to be nil", name) } }) } }
explode_data.jsonl/32132
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1111 }
[ 2830, 3393, 3973, 19486, 23527, 22724, 2522, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 2415, 14032, 60, 1235, 341, 197, 19907, 7747, 220, 1807, 198, 197, 80118, 26915, 4173, 54424, 1731, 198, 197, 59403, 197, 197, 1, 2271, 43903, 12, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestCustomer_Retrieve(t *testing.T) { key := "test api key" mockResponse := new(invdendpoint.Customer) mockResponse.Id = int64(1234) mockResponse.Name = "nomenclature" mockResponse.CreatedAt = time.Now().UnixNano() server, err := invdmockserver.New(200, mockResponse, "json", true) if err != nil { t.Fatal(err) } defer server.Close() conn := mockConnection(key, server) entity := conn.NewCustomer() retrievedPayment, err := entity.Retrieve(int64(1234)) if err != nil { t.Fatal("Error retrieving entity", err) } if !reflect.DeepEqual(retrievedPayment.Customer, mockResponse) { t.Fatal("Error messages do not match up") } }
explode_data.jsonl/15009
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 239 }
[ 2830, 3393, 12792, 2568, 295, 45004, 1155, 353, 8840, 836, 8, 341, 23634, 1669, 330, 1944, 6330, 1376, 1837, 77333, 2582, 1669, 501, 5900, 16598, 32540, 37293, 340, 77333, 2582, 6444, 284, 526, 21, 19, 7, 16, 17, 18, 19, 340, 77333, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestObjectsAllWithLimit(t *testing.T) { ctx := context.Background() c, rollback := makeConnectionWithObjectHeaders(t) defer rollback() objects, err := c.ObjectsAll(ctx, CONTAINER, &swift.ObjectsOpts{Limit: 1}) if err != nil { t.Fatal(err) } if len(objects) != 1 || objects[0].Name != OBJECT { t.Error("Incorrect listing", objects) } }
explode_data.jsonl/12702
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 11543, 2403, 2354, 16527, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 1444, 11, 60414, 1669, 1281, 4526, 2354, 1190, 10574, 1155, 340, 16867, 60414, 741, 197, 19210, 11, 1848, 1669, 272, 40314, 2403, 7502, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_fn(t *testing.T) { ast := assert.New(t) for _, tc := range tcs { fmt.Printf("~~%v~~\n", tc) ast.Equal(tc.ans, judgeSquareSum(tc.c), "输入:%v", tc) } }
explode_data.jsonl/61745
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 15246, 1155, 353, 8840, 836, 8, 341, 88836, 1669, 2060, 7121, 1155, 692, 2023, 8358, 17130, 1669, 2088, 259, 4837, 341, 197, 11009, 19367, 445, 5817, 4, 85, 5817, 59, 77, 497, 17130, 340, 197, 88836, 12808, 44415, 13, 596,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestSeekFirst0(t *testing.T) { b := TreeNew(cmp) _, err := b.SeekFirst() if g, e := err, io.EOF; g != e { t.Fatal(g, e) } }
explode_data.jsonl/80508
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 39350, 5338, 15, 1155, 353, 8840, 836, 8, 341, 2233, 1669, 8942, 3564, 98665, 340, 197, 6878, 1848, 1669, 293, 76465, 5338, 741, 743, 342, 11, 384, 1669, 1848, 11, 6399, 86492, 26, 342, 961, 384, 341, 197, 3244, 26133, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestGolastic_Bulk(t *testing.T) { golastic, err := New("http://localhost:9200") if err != nil { Error(t, err) } b := Bulk() for i := 1; i <= 10; i++ { b.Index(strconv.Itoa(i), TestProduct{i, fmt.Sprintf("Product %d", i)}) } errs := golastic.From("test", "words").Bulk(b) if len(errs) > 0 { t.Error("An error has ocurred: " + errs[0].Error()) } result, err := golastic.From("test", "product").Exec(POST_METHOD, Query("match_all")) if err != nil { t.Error("An error has ocurred: " + err.Error()) } totalHits := len(result.Hits.Hits) if totalHits < 2 { t.Errorf("Wrong number of hits: %d\n", totalHits) } result, err = golastic.From("test", "words").Exec(DELETE_METHOD, Query("match_all")) if err != nil { t.Error("An error has ocurred: " + err.Error()) } totalHits = len(result.Hits.Hits) if totalHits > 0 { t.Errorf("Wrong number of hits: %d\n", totalHits) } }
explode_data.jsonl/41761
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 386 }
[ 2830, 3393, 38, 337, 5064, 1668, 21785, 1155, 353, 8840, 836, 8, 341, 3174, 337, 5064, 11, 1848, 1669, 1532, 445, 1254, 1110, 8301, 25, 24, 17, 15, 15, 1138, 743, 1848, 961, 2092, 341, 197, 58421, 1155, 11, 1848, 340, 197, 630, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestConnectionDefinition_IncludesConnectionAndEdgeFields(t *testing.T) { query := ` query FriendsQuery { user { friends(first: 2) { totalCount edges { friendshipTime node { name } } } } } ` expected := &graphql.Result{ Data: map[string]interface{}{ "user": map[string]interface{}{ "friends": map[string]interface{}{ "totalCount": 5, "edges": []interface{}{ map[string]interface{}{ "friendshipTime": "Yesterday", "node": map[string]interface{}{ "name": "Dan", }, }, map[string]interface{}{ "friendshipTime": "Yesterday", "node": map[string]interface{}{ "name": "Nick", }, }, }, }, }, }, } result := graphql.Graphql(graphql.Params{ Schema: connectionTestSchema, RequestString: query, }) if !reflect.DeepEqual(result, expected) { t.Fatalf("wrong result, graphql result diff: %v", testutil.Diff(expected, result)) } }
explode_data.jsonl/35754
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 553 }
[ 2830, 3393, 4526, 10398, 25972, 7396, 4526, 3036, 11656, 8941, 1155, 353, 8840, 836, 8, 341, 27274, 1669, 22074, 414, 3239, 22508, 2859, 341, 286, 1196, 341, 688, 4780, 17981, 25, 220, 17, 8, 341, 310, 59713, 198, 310, 12822, 341, 106...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDealStreamSendReceiveMultipleSuccessful(t *testing.T) { // send proposal, read in handler, send response back, // read response, bgCtx := context.Background() td := shared_testutil.NewLibp2pTestData(bgCtx, t) fromNetwork := network.NewFromLibp2pHost(td.Host1) toNetwork := network.NewFromLibp2pHost(td.Host2) toPeer := td.Host2.ID() // set up stream handler, channels, and response dr := shared_testutil.MakeTestStorageNetworkSignedResponse() done := make(chan bool) var resigningFunc network.ResigningFunc = func(ctx context.Context, data interface{}) (*crypto.Signature, error) { return nil, nil } tr2 := &testReceiver{t: t, dealStreamHandler: func(s network.StorageDealStream) { _, err := s.ReadDealProposal() require.NoError(t, err) require.NoError(t, s.WriteDealResponse(dr, resigningFunc)) done <- true }} require.NoError(t, toNetwork.SetDelegate(tr2)) ctx, cancel := context.WithTimeout(bgCtx, 10*time.Second) defer cancel() // start sending deal proposal ds1, err := fromNetwork.NewDealStream(ctx, toPeer) require.NoError(t, err) dp := shared_testutil.MakeTestStorageNetworkProposal() // write proposal require.NoError(t, ds1.WriteDealProposal(dp)) // read response and verify it's the one we told toNetwork to send responseReceived, _, err := ds1.ReadDealResponse() require.NoError(t, err) assert.Equal(t, dr, responseReceived) select { case <-ctx.Done(): t.Errorf("failed to receive messages") case <-done: } }
explode_data.jsonl/19994
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 515 }
[ 2830, 3393, 72841, 3027, 11505, 14742, 32089, 36374, 1155, 353, 8840, 836, 8, 341, 197, 322, 3624, 13734, 11, 1349, 304, 7013, 11, 3624, 2033, 1182, 345, 197, 322, 1349, 2033, 3554, 76131, 23684, 1669, 2266, 19047, 741, 76373, 1669, 609...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBucketRegion(t *testing.T) { maybeSkipManualTest(t) ctx := context.Background() region := getBucketRegion(t, ctx, "grail-ysaito") require.Equal(t, region, "us-west-2") region = getBucketRegion(t, ctx, "grail-test-us-east-1") require.Equal(t, region, "us-east-1") region = getBucketRegion(t, ctx, "grail-test-us-east-2") require.Equal(t, region, "us-east-2") }
explode_data.jsonl/41236
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 160 }
[ 2830, 3393, 36018, 14091, 1155, 353, 8840, 836, 8, 341, 2109, 49791, 35134, 52092, 2271, 1155, 692, 20985, 1669, 2266, 19047, 741, 197, 3943, 1669, 633, 36018, 14091, 1155, 11, 5635, 11, 330, 901, 604, 12, 1047, 1315, 78, 1138, 17957, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDocDelete(t *testing.T) { ctx := context.Background() c, srv := newMock(t) srv.addRPC( &pb.CommitRequest{ Database: "projects/projectID/databases/(default)", Writes: []*pb.Write{ {Operation: &pb.Write_Delete{"projects/projectID/databases/(default)/documents/C/d"}}, }, }, &pb.CommitResponse{ WriteResults: []*pb.WriteResult{{}}, }) wr, err := c.Collection("C").Doc("d").Delete(ctx) if err != nil { t.Fatal(err) } if !testEqual(wr, &WriteResult{}) { t.Errorf("got %+v, want %+v", wr, writeResultForSet) } }
explode_data.jsonl/15813
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 240 }
[ 2830, 3393, 9550, 6435, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 1444, 11, 43578, 1669, 501, 11571, 1155, 340, 1903, 10553, 1364, 29528, 1006, 197, 197, 5, 16650, 53036, 1900, 515, 298, 197, 5988, 25, 330, 17161, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFilteredGenMeta(t *testing.T) { var testConfig = common.NewConfig() filteredGen := &GenDefaultMeta{} podIndexer, err := NewPodNameIndexer(*testConfig, filteredGen) assert.Nil(t, err) podName := "testpod" ns := "testns" pod := Pod{ Metadata: ObjectMeta{ Name: podName, Namespace: ns, Labels: map[string]string{ "foo": "bar", "x": "y", }, Annotations: map[string]string{ "a": "b", "c": "d", }, }, Spec: PodSpec{}, } indexers := podIndexer.GetMetadata(&pod) assert.Equal(t, len(indexers), 1) rawLabels, _ := indexers[0].Data["labels"] assert.NotNil(t, rawLabels) labelMap, ok := rawLabels.(common.MapStr) assert.Equal(t, ok, true) assert.Equal(t, len(labelMap), 2) rawAnnotations := indexers[0].Data["annotations"] assert.Nil(t, rawAnnotations) filteredGen.labels = []string{"foo"} filteredGen.annotations = []string{"a"} podIndexer, err = NewPodNameIndexer(*testConfig, filteredGen) assert.Nil(t, err) indexers = podIndexer.GetMetadata(&pod) assert.Equal(t, len(indexers), 1) rawLabels, _ = indexers[0].Data["labels"] assert.NotNil(t, rawLabels) labelMap, ok = rawLabels.(common.MapStr) assert.Equal(t, ok, true) assert.Equal(t, len(labelMap), 1) ok, _ = labelMap.HasKey("foo") assert.Equal(t, ok, true) rawAnnotations = indexers[0].Data["annotations"] assert.NotNil(t, rawAnnotations) annotationsMap, ok := rawAnnotations.(common.MapStr) assert.Equal(t, ok, true) assert.Equal(t, len(annotationsMap), 1) ok, _ = annotationsMap.HasKey("a") assert.Equal(t, ok, true) }
explode_data.jsonl/80957
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 658 }
[ 2830, 3393, 67310, 9967, 12175, 1155, 353, 8840, 836, 8, 341, 2405, 1273, 2648, 284, 4185, 7121, 2648, 2822, 50108, 291, 9967, 1669, 609, 9967, 3675, 12175, 16094, 3223, 347, 1552, 261, 11, 1848, 1669, 1532, 23527, 675, 1552, 261, 4071,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSeriesTable(t *testing.T) { tRef := time.Date(2017, 1, 1, 5, 30, 12, 0, time.UTC) // Create new series init := []Series{ NewSeriesFloat64("test", &SeriesInit{1, 0}, 1.0, 2.0, 3.0), NewSeriesInt64("test", &SeriesInit{1, 0}, 1, 2, 3), NewSeriesString("test", &SeriesInit{1, 0}, "1", "2", "3"), NewSeriesTime("test", &SeriesInit{1, 0}, tRef, tRef.Add(24*time.Hour), tRef.Add(2*24*time.Hour)), NewSeriesGeneric("test", civil.Date{}, &SeriesInit{0, 1}, civil.Date{2018, time.May, 01}, civil.Date{2018, time.May, 02}, civil.Date{2018, time.May, 03}), } expected := []string{ `+-----+---------+ | | TEST | +-----+---------+ | 0: | 1 | | 1: | 2 | | 2: | 3 | +-----+---------+ | 3X1 | FLOAT64 | +-----+---------+`, `+-----+-------+ | | TEST | +-----+-------+ | 0: | 1 | | 1: | 2 | | 2: | 3 | +-----+-------+ | 3X1 | INT64 | +-----+-------+`, `+-----+--------+ | | TEST | +-----+--------+ | 0: | 1 | | 1: | 2 | | 2: | 3 | +-----+--------+ | 3X1 | STRING | +-----+--------+`, `+-----+-------------------------------+ | | TEST | +-----+-------------------------------+ | 0: | 2017-01-01 05:30:12 +0000 UTC | | 1: | 2017-01-02 05:30:12 +0000 UTC | | 2: | 2017-01-03 05:30:12 +0000 UTC | +-----+-------------------------------+ | 3X1 | TIME | +-----+-------------------------------+`, `+-----+------------+ | | TEST | +-----+------------+ | 0: | 2018-05-01 | | 1: | 2018-05-02 | | 2: | 2018-05-03 | +-----+------------+ | 3X1 | CIVIL DATE | +-----+------------+`, } for i := range init { s := init[i] if v, ok := s.(Tabler); ok { if strings.TrimSpace(v.Table()) != strings.TrimSpace(expected[i]) { t.Errorf("wrong val: expected: %v actual: %v", expected[i], v.Table()) } } } }
explode_data.jsonl/10004
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 865 }
[ 2830, 3393, 25544, 2556, 1155, 353, 8840, 836, 8, 1476, 3244, 3945, 1669, 882, 8518, 7, 17, 15, 16, 22, 11, 220, 16, 11, 220, 16, 11, 220, 20, 11, 220, 18, 15, 11, 220, 16, 17, 11, 220, 15, 11, 882, 87069, 692, 197, 322, 423...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestValidateJoinConfiguration(t *testing.T) { var tests = []struct { s *kubeadm.JoinConfiguration expected bool }{ {&kubeadm.JoinConfiguration{}, false}, {&kubeadm.JoinConfiguration{ CACertPath: "/some/cert.crt", Discovery: kubeadm.Discovery{ BootstrapToken: &kubeadm.BootstrapTokenDiscovery{ Token: "abcdef.1234567890123456@foobar", }, File: &kubeadm.FileDiscovery{ KubeConfigPath: "foo", }, }, }, false}, {&kubeadm.JoinConfiguration{ // Pass without JoinControlPlane CACertPath: "/some/cert.crt", Discovery: kubeadm.Discovery{ BootstrapToken: &kubeadm.BootstrapTokenDiscovery{ Token: "abcdef.1234567890123456", APIServerEndpoint: "1.2.3.4:6443", CACertHashes: []string{"aaaa"}, }, TLSBootstrapToken: "abcdef.1234567890123456", }, NodeRegistration: kubeadm.NodeRegistrationOptions{ Name: "aaa", CRISocket: "/var/run/dockershim.sock", }, }, true}, {&kubeadm.JoinConfiguration{ // Pass with JoinControlPlane CACertPath: "/some/cert.crt", Discovery: kubeadm.Discovery{ BootstrapToken: &kubeadm.BootstrapTokenDiscovery{ Token: "abcdef.1234567890123456", APIServerEndpoint: "1.2.3.4:6443", CACertHashes: []string{"aaaa"}, }, TLSBootstrapToken: "abcdef.1234567890123456", }, NodeRegistration: kubeadm.NodeRegistrationOptions{ Name: "aaa", CRISocket: "/var/run/dockershim.sock", }, ControlPlane: &kubeadm.JoinControlPlane{ LocalAPIEndpoint: kubeadm.APIEndpoint{ AdvertiseAddress: "1.2.3.4", BindPort: 1234, }, }, }, true}, {&kubeadm.JoinConfiguration{ // Fail JoinControlPlane.AdvertiseAddress validation CACertPath: "/some/cert.crt", Discovery: kubeadm.Discovery{ BootstrapToken: &kubeadm.BootstrapTokenDiscovery{ Token: "abcdef.1234567890123456", APIServerEndpoint: "1.2.3.4:6443", CACertHashes: []string{"aaaa"}, }, TLSBootstrapToken: "abcdef.1234567890123456", }, NodeRegistration: kubeadm.NodeRegistrationOptions{ Name: "aaa", CRISocket: "/var/run/dockershim.sock", }, ControlPlane: &kubeadm.JoinControlPlane{ LocalAPIEndpoint: kubeadm.APIEndpoint{ AdvertiseAddress: "aaa", BindPort: 1234, }, }, }, false}, {&kubeadm.JoinConfiguration{ // Fail JoinControlPlane.BindPort validation CACertPath: "/some/cert.crt", Discovery: kubeadm.Discovery{ BootstrapToken: &kubeadm.BootstrapTokenDiscovery{ Token: "abcdef.1234567890123456", APIServerEndpoint: "1.2.3.4:6443", CACertHashes: []string{"aaaa"}, }, TLSBootstrapToken: "abcdef.1234567890123456", }, NodeRegistration: kubeadm.NodeRegistrationOptions{ Name: "aaa", CRISocket: "/var/run/dockershim.sock", }, ControlPlane: &kubeadm.JoinControlPlane{ LocalAPIEndpoint: kubeadm.APIEndpoint{ AdvertiseAddress: "1.2.3.4", BindPort: -1, }, }, }, false}, } for _, rt := range tests { actual := ValidateJoinConfiguration(rt.s) if (len(actual) == 0) != rt.expected { t.Errorf( "failed ValidateJoinConfiguration:\n\texpected: %t\n\t actual: %t", rt.expected, (len(actual) == 0), ) } } }
explode_data.jsonl/39228
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1599 }
[ 2830, 3393, 17926, 12292, 7688, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 3056, 1235, 341, 197, 1903, 286, 353, 74, 392, 3149, 76, 22363, 7688, 198, 197, 42400, 1807, 198, 197, 59403, 197, 197, 90, 5, 74, 392, 3149, 76, 22363, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestHas(t *testing.T) { var e1, e2, e3 float32 e := createRandomObject(e1) if v, ok := e.(float32); ok { e1 = v } e = createRandomObject(e2) if v, ok := e.(float32); ok { e2 = v } e = createRandomObject(e2) if v, ok := e.(float32); ok { e3 = v } s := New() if s.Has(e1) { t.Errorf("expected a new set to not contain %v", e1) } s.Add(e1) s.Add(e2) if !s.Has(e1) { t.Errorf("expected the set to contain %v", e1) } if !s.Has(e2) { t.Errorf("expected the set to contain %v", e2) } if s.Has(e3) { t.Errorf("did not expect the set to contain %v", e3) } }
explode_data.jsonl/35007
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 293 }
[ 2830, 3393, 10281, 1155, 353, 8840, 836, 8, 341, 2405, 384, 16, 11, 384, 17, 11, 384, 18, 2224, 18, 17, 198, 7727, 1669, 1855, 13999, 1190, 2026, 16, 340, 743, 348, 11, 5394, 1669, 384, 12832, 3649, 18, 17, 1215, 5394, 341, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestCancelMultipleSessions(t *testing.T) { defer leaktest.AfterTest(t)() ctx := context.TODO() tc := serverutils.StartTestCluster(t, 2, /* numNodes */ base.TestClusterArgs{ ReplicationMode: base.ReplicationManual, }) defer tc.Stopper().Stop(ctx) // Open two connections on node 1. var conns [2]*gosql.Conn for i := 0; i < 2; i++ { var err error if conns[i], err = tc.ServerConn(0).Conn(ctx); err != nil { t.Fatal(err) } if _, err := conns[i].ExecContext(ctx, "SET application_name = 'killme'"); err != nil { t.Fatal(err) } } // Open a control connection on node 2. ctlconn, err := tc.ServerConn(1).Conn(ctx) if err != nil { t.Fatal(err) } // Cancel the sessions on node 1. if _, err = ctlconn.ExecContext(ctx, `CANCEL SESSIONS SELECT session_id FROM [SHOW CLUSTER SESSIONS] WHERE application_name = 'killme'`, ); err != nil { t.Fatal(err) } // Verify that the connections on node 1 are closed. for i := 0; i < 2; i++ { _, err := conns[i].ExecContext(ctx, "SELECT 1") if err != gosqldriver.ErrBadConn { t.Fatalf("session %d not canceled; actual error: %s", i, err) } } }
explode_data.jsonl/54861
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 467 }
[ 2830, 3393, 9269, 32089, 59062, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 20985, 1669, 2266, 90988, 2822, 78255, 1669, 3538, 6031, 12101, 2271, 28678, 1155, 11, 220, 17, 11, 1391, 1629, 12288, 735, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestJsCommonPlugin(t *testing.T) { plugintest.Cases(t, &builtin.JsCommonPlugin{}, map[string]plugintest.Case{ "empty file": { Input: "", Directives: plugintest.WithDirectives( "proto_plugin", "js implementation builtin:js:common", ), PluginName: "js", Configuration: plugintest.WithConfiguration( plugintest.WithOutputs("test_pb.js"), plugintest.WithOptions("import_style=commonjs"), ), }, "only services": { Input: "service S{}", Directives: plugintest.WithDirectives( "proto_plugin", "js implementation builtin:js:common", ), PluginName: "js", Configuration: plugintest.WithConfiguration( plugintest.WithOutputs("test_pb.js"), plugintest.WithOptions("import_style=commonjs"), ), }, "single message & enum": { Input: "message M{}", Directives: plugintest.WithDirectives( "proto_plugin", "js implementation builtin:js:common", ), PluginName: "js", Configuration: plugintest.WithConfiguration( plugintest.WithOutputs("test_pb.js"), plugintest.WithOptions("import_style=commonjs"), ), }, "with a package": { Input: "package pkg;\n\nmessage M{}", Directives: plugintest.WithDirectives( "proto_plugin", "js implementation builtin:js:common", ), PluginName: "js", Configuration: plugintest.WithConfiguration( plugintest.WithOutputs("test_pb.js"), plugintest.WithOptions("import_style=commonjs"), ), }, "relative directory": { Rel: "rel", Input: "message M{}", Directives: plugintest.WithDirectives( "proto_plugin", "js implementation builtin:js:common", ), PluginName: "js", Configuration: plugintest.WithConfiguration( plugintest.WithOutputs("rel/test_pb.js"), plugintest.WithOptions("import_style=commonjs"), ), }, }) }
explode_data.jsonl/69908
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 723 }
[ 2830, 3393, 30480, 10839, 11546, 1155, 353, 8840, 836, 8, 341, 197, 47474, 396, 477, 727, 2264, 1155, 11, 609, 42457, 3503, 82, 10839, 11546, 22655, 2415, 14032, 60, 47474, 396, 477, 727, 519, 515, 197, 197, 1, 3194, 1034, 788, 341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSuccessRevParse(t *testing.T) { testRepo := newTestRepo(t) defer testRepo.cleanup(t) mainRev, err := testRepo.sut.RevParse(git.DefaultBranch) require.Nil(t, err) require.Equal(t, testRepo.firstCommit, mainRev) branchRev, err := testRepo.sut.RevParse(testRepo.branchName) require.Nil(t, err) require.Equal(t, testRepo.thirdBranchCommit, branchRev) tagRev, err := testRepo.sut.RevParse(testRepo.firstTagName) require.Nil(t, err) require.Equal(t, testRepo.firstCommit, tagRev) tagRev, err = testRepo.sut.RevParse(testRepo.firstCommit) require.Nil(t, err) require.Equal(t, testRepo.firstCommit, tagRev) }
explode_data.jsonl/13985
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 271 }
[ 2830, 3393, 7188, 36184, 14463, 1155, 353, 8840, 836, 8, 341, 18185, 25243, 1669, 501, 2271, 25243, 1155, 340, 16867, 1273, 25243, 87689, 1155, 692, 36641, 36184, 11, 1848, 1669, 1273, 25243, 514, 332, 2817, 85, 14463, 3268, 275, 13275, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestForInLoop(t *testing.T) { const SCRIPT = ` function Proto() {} Proto.prototype.x = 42; var o = new Proto(); o.y = 44; o.x = 45; var hasX = false; var hasY = false; for (var i in o) { switch(i) { case "x": if (hasX) { throw new Error("Already has X"); } hasX = true; break; case "y": if (hasY) { throw new Error("Already has Y"); } hasY = true; break; } } hasX && hasY; ` testScript1(SCRIPT, valueTrue, t) }
explode_data.jsonl/75291
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 323 }
[ 2830, 3393, 2461, 641, 14620, 1155, 353, 8840, 836, 8, 341, 4777, 53679, 284, 22074, 7527, 57677, 368, 5613, 197, 31549, 6003, 1993, 284, 220, 19, 17, 280, 2405, 297, 284, 501, 57677, 543, 22229, 2384, 284, 220, 19, 19, 280, 22229, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCopyImageInputToRepoWithLockOutput(t *testing.T) { env := BuildEnv(t) imgpkg := Imgpkg{t, Logger{}, env.ImgpkgPath} // create generic image assetsPath := filepath.Join("assets", "simple-app") // force digest to change so test is meaningful randFile, err := addRandomFile(assetsPath) if err != nil { t.Fatalf("failed to create unuique file: %v", err) } defer os.Remove(randFile) tag := time.Now().UnixNano() out := imgpkg.Run([]string{"push", "--tty", "-i", fmt.Sprintf("%s:%v", env.Image, tag), "-f", assetsPath}) imageDigestTag := fmt.Sprintf("@%s", extractDigest(out, t)) lockOutputPath := filepath.Join(os.TempDir(), "image-relocate-lock.yml") defer os.Remove(lockOutputPath) // copy via create ref imgpkg.Run([]string{"copy", "--image", fmt.Sprintf("%s:%v", env.Image, tag), "--to-repo", env.RelocationRepo, "--lock-output", lockOutputPath}) iLockBytes, err := ioutil.ReadFile(lockOutputPath) if err != nil { t.Fatalf("could not read lock-output: %v", err) } var iLock cmd.ImageLock err = yaml.Unmarshal(iLockBytes, &iLock) if err != nil { t.Fatalf("could not unmarshal lock output: %v", err) } expectedRef := fmt.Sprintf("%s%s", env.RelocationRepo, imageDigestTag) if iLock.Spec.Images[0].Image != expectedRef { t.Fatalf("expected lock output to contain relocated ref '%s', got '%s'", expectedRef, iLock.Spec.Images[0].Image) } if err := validateImageLockApiVersionAndKind(iLock); err != nil { t.Fatal(err.Error()) } if err := validateImagePresence([]string{env.RelocationRepo + imageDigestTag}); err != nil { t.Fatalf("could not validate image presence: %v", err) } if err := validateImagePresence([]string{fmt.Sprintf("%s:%v", env.RelocationRepo, tag)}); err == nil { t.Fatalf("expected not to find image with tag '%v', but did", tag) } }
explode_data.jsonl/23216
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 675 }
[ 2830, 3393, 12106, 1906, 2505, 1249, 25243, 2354, 11989, 5097, 1155, 353, 8840, 836, 8, 341, 57538, 1669, 7854, 14359, 1155, 340, 39162, 30069, 1669, 2362, 21888, 7351, 90, 83, 11, 9514, 22655, 6105, 13, 13033, 30069, 1820, 630, 197, 32...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestHelpShowHelp(t *testing.T) { app := NewApp() app.Name = "app" app.Version = "1.1.1" app.Usage = "demo app" app.Authors = "Guoqiang Chen <subchen@gmail.com>" app.Flags = []*Flag{ { Name: "i, input", Usage: "input file", Placeholder: "file", }, { Name: "o, output", Usage: "output file", }, } app.Commands = []*Command{ { Name: "build", Usage: "build project", Flags: []*Flag{ { Name: "debug", Usage: "enable debug", IsBool: true, }, }, SeeAlso: "https://github.com/subchen/go-cli#build\nhttps://github.com/subchen/go-cli#build2", }, { Name: "release", Usage: "release project", }, } app.SeeAlso = `https://github.com/subchen https://github.com/yingzhuo` // reset helpWriter = new(bytes.Buffer) ctx1 := newAppHelpContext("app", app) showHelp(ctx1) ctx2 := newCommandHelpContext("app build", app.Commands[0], app) showHelp(ctx2) }
explode_data.jsonl/19614
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 441 }
[ 2830, 3393, 12689, 7812, 12689, 1155, 353, 8840, 836, 8, 341, 28236, 1669, 1532, 2164, 741, 28236, 2967, 284, 330, 676, 698, 28236, 35842, 284, 330, 16, 13, 16, 13, 16, 698, 28236, 85900, 284, 330, 25762, 906, 698, 28236, 25233, 1087,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSuccessPushHandler(t *testing.T) { t.Skip() cfg := initTest() cfg.Android.Enabled = true cfg.Android.APIKey = os.Getenv("ANDROID_API_KEY") androidToken := os.Getenv("ANDROID_TEST_TOKEN") r := gofight.New() r.POST("/api/push"). SetJSON(gofight.D{ "notifications": []gofight.D{ { "tokens": []string{androidToken, "bbbbb"}, "platform": core.PlatFormAndroid, "message": "Welcome Android", }, }, }). Run(routerEngine(cfg, q), func(r gofight.HTTPResponse, rq gofight.HTTPRequest) { assert.Equal(t, http.StatusOK, r.Code) }) }
explode_data.jsonl/67614
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 260 }
[ 2830, 3393, 7188, 16644, 3050, 1155, 353, 8840, 836, 8, 341, 3244, 57776, 741, 50286, 1669, 2930, 2271, 2822, 50286, 52924, 13690, 284, 830, 198, 50286, 52924, 24922, 1592, 284, 2643, 64883, 445, 72997, 11415, 6600, 5130, 197, 5954, 3323,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUser_change(t *testing.T) { type args struct { name string age uint8 email string note string pass string auth uint8 } tests := map[string]struct { args args want *User err error }{ "氏名にhoge fugaを指定してユーザーのインスタンスが作成されること": { args: args{ name: "hoge fuga", }, want: &User{Name: "hoge fuga"}, }, "氏名にhoge fugaを指定してNameからhoge fugaが取得できること": { args: args{name: "hoge fuga"}, want: &User{Name: "hoge fuga"}, }, "氏名にfoo barを指定してNameからfoo barが取得できること": { args: args{name: "foo bar"}, want: &User{Name: "foo bar"}, }, "年齢に18を指定してAgeから18が取得できること": { args: args{name: "foo bar", age: 18}, want: &User{Name: "foo bar", Age: 18}, }, "年齢に20を指定してAgeから20が取得できること": { args: args{name: "foo bar", age: 20}, want: &User{Name: "foo bar", Age: 20}, }, "メールアドレスにhoge@hoge.comを指定してEmailからhoge@hoge.comを取得できること": { args: args{name: "foo bar", email: "hoge@hoge.com"}, want: &User{Name: "foo bar", EMail: "hoge@hoge.com"}, }, "メールアドレスにfoo@foo.co.jpを指定してEmailからfoo@foo.co.jpを取得できること": { args: args{name: "foo bar", email: "foo@foo.co.jp"}, want: &User{Name: "foo bar", EMail: "foo@foo.co.jp"}, }, "備考にxxxxxxを指定してNoteからxxxxxが取得できること": { args: args{name: "foo bar", note: "xxxxxx"}, want: &User{Name: "foo bar", Note: "xxxxxx"}, }, "備考にyyyyyyを指定してNoteからyyyyyが取得できること": { args: args{name: "foo bar", note: "yyyyyy"}, want: &User{Name: "foo bar", Note: "yyyyyy"}, }, "パスワードにpasswordを指定してPasswordからpasswordが取得できること": { args: args{name: "foo bar", pass: "password"}, want: &User{Name: "foo bar", Password: "password"}, }, "パスワードにpasspassを指定してPasswordからpasspassが取得できること": { args: args{name: "foo bar", pass: "passpass"}, want: &User{Name: "foo bar", Password: "passpass"}, }, "権限にAdmin(1)を指定してAuthorityからAdmin(1)が取得できること": { args: args{name: "foo bar", auth: 1}, want: &User{Name: "foo bar", Auth: 1}, }, "権限にWorker(2)を指定してAuthorityからWorker(2)が取得できること": { args: args{name: "foo bar", auth: 2}, want: &User{Name: "foo bar", Auth: 2}, }, } for testName, arg := range tests { t.Run(testName, func(t *testing.T) { sut := &User{ Name: arg.args.name, Password: arg.args.pass, EMail: arg.args.email, Auth: arg.args.auth, Age: arg.args.age, Note: arg.args.note, } if reflect.DeepEqual(sut, arg.want) == false { t.Errorf("Not equals actual: %v, expected: %v", sut, arg.want) } }) } }
explode_data.jsonl/9501
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1394 }
[ 2830, 3393, 1474, 15947, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 11609, 220, 914, 198, 197, 197, 424, 256, 2622, 23, 198, 197, 57549, 914, 198, 197, 9038, 1272, 220, 914, 198, 197, 41431, 220, 914, 198, 197, 78011...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNewRGB(t *testing.T) { type args struct { r uint8 g uint8 b uint8 } tests := []struct { name string args args want RGB }{ {name: "1", args: args{0, 0, 0}, want: RGB{0, 0, 0}}, {name: "3", args: args{255, 255, 255}, want: RGB{255, 255, 255}}, {name: "4", args: args{127, 127, 127}, want: RGB{127, 127, 127}}, {name: "5", args: args{1, 2, 3}, want: RGB{1, 2, 3}}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := NewRGB(tt.args.r, tt.args.g, tt.args.b); !reflect.DeepEqual(got, tt.want) { t.Errorf("NewRGB() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/62943
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 305 }
[ 2830, 3393, 3564, 18184, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 7000, 2622, 23, 198, 197, 3174, 2622, 23, 198, 197, 2233, 2622, 23, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestClusterServiceClassClient(t *testing.T) { rootTestFunc := func(sType server.StorageType) func(t *testing.T) { return func(t *testing.T) { const name = "test-serviceclass" client, _, shutdownServer := getFreshApiserverAndClient(t, sType.String(), func() runtime.Object { return &servicecatalog.ClusterServiceClass{} }) defer shutdownServer() if err := testClusterServiceClassClient(sType, client, name); err != nil { t.Fatal(err) } } } // TODO: Fix this for CRD. // https://github.com/kubernetes-incubator/service-catalog/issues/1256 // for _, sType := range storageTypes { // if !t.Run(sType.String(), rootTestFunc(sType)) { // t.Errorf("%q test failed", sType) // } // } // for _, sType := range storageTypes { // if !t.Run(sType.String(), rootTestFunc(sType)) { // t.Errorf("%q test failed", sType) // } // } sType := server.StorageTypeEtcd if !t.Run(sType.String(), rootTestFunc(sType)) { t.Errorf("%q test failed", sType) } }
explode_data.jsonl/7399
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 410 }
[ 2830, 3393, 28678, 1860, 1957, 2959, 1155, 353, 8840, 836, 8, 341, 33698, 2271, 9626, 1669, 2915, 1141, 929, 3538, 43771, 929, 8, 2915, 1155, 353, 8840, 836, 8, 341, 197, 853, 2915, 1155, 353, 8840, 836, 8, 341, 298, 4777, 829, 284,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCoreChecker(t *testing.T) { tests := map[string]struct { IA addr.IA PrepareInspector func(i *mock_infra.MockASInspector) ErrorAssertion require.ErrorAssertionFunc ExpectedCore bool }{ "Wildcard": { IA: xtest.MustParseIA("1-0"), PrepareInspector: func(i *mock_infra.MockASInspector) {}, ErrorAssertion: require.NoError, ExpectedCore: true, }, "InspectorError": { IA: xtest.MustParseIA("1-ff00:0:110"), PrepareInspector: func(i *mock_infra.MockASInspector) { i.EXPECT().HasAttributes(gomock.Any(), gomock.Any(), gomock.Any()). Return(false, errors.New("test error")) }, ErrorAssertion: require.Error, ExpectedCore: false, }, "Core": { IA: xtest.MustParseIA("1-ff00:0:110"), PrepareInspector: func(i *mock_infra.MockASInspector) { i.EXPECT().HasAttributes(gomock.Any(), gomock.Any(), gomock.Any()). Return(true, nil) }, ErrorAssertion: require.NoError, ExpectedCore: true, }, "Non-Core": { IA: xtest.MustParseIA("1-ff00:0:110"), PrepareInspector: func(i *mock_infra.MockASInspector) { i.EXPECT().HasAttributes(gomock.Any(), gomock.Any(), gomock.Any()). Return(false, nil) }, ErrorAssertion: require.NoError, ExpectedCore: false, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() i := mock_infra.NewMockASInspector(ctrl) test.PrepareInspector(i) c := segreq.CoreChecker{Inspector: i} core, err := c.IsCore(context.Background(), test.IA) test.ErrorAssertion(t, err) assert.Equal(t, test.ExpectedCore, core) }) } }
explode_data.jsonl/24666
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 800 }
[ 2830, 3393, 5386, 35188, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 2415, 14032, 60, 1235, 341, 197, 197, 5863, 2290, 10789, 2447, 32, 198, 197, 197, 50590, 46230, 2915, 1956, 353, 16712, 26051, 956, 24664, 1911, 46230, 340, 197, 58421,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWholeCPUCores(t *testing.T) { for _, c := range []struct { v string n int }{ {v: "1", n: 1}, {v: "1m", n: 1}, {v: "1000m", n: 1}, {v: "1001m", n: 2}, } { q, err := k8sResource.ParseQuantity(c.v) if err != nil { t.Fatal(err) } n, err := ToWholeCPUCores(q) if err != nil { t.Fatal(err) } if n != int64(c.n) { t.Fatalf("Unexpected value: %v != %v", n, c.n) } } }
explode_data.jsonl/11845
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 228 }
[ 2830, 3393, 90582, 7123, 5459, 4589, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 272, 1669, 2088, 3056, 1235, 341, 197, 5195, 914, 198, 197, 9038, 526, 198, 197, 59403, 197, 197, 90, 85, 25, 330, 16, 497, 308, 25, 220, 16, 1583, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestFailedLastCommitForPathRequest(t *testing.T) { server, serverSocketPath := startTestServices(t) defer server.Stop() client, conn := newCommitServiceClient(t, serverSocketPath) defer conn.Close() testRepo, _, cleanupFn := testhelper.NewTestRepo(t) defer cleanupFn() invalidRepo := &pb.Repository{StorageName: "fake", RelativePath: "path"} testCases := []struct { desc string request *pb.LastCommitForPathRequest code codes.Code }{ { desc: "Invalid repository", request: &pb.LastCommitForPathRequest{Repository: invalidRepo}, code: codes.InvalidArgument, }, { desc: "Repository is nil", request: &pb.LastCommitForPathRequest{Revision: []byte("some-branch")}, code: codes.InvalidArgument, }, { desc: "Revision is missing", request: &pb.LastCommitForPathRequest{Repository: testRepo, Path: []byte("foo/bar")}, code: codes.InvalidArgument, }, } for _, testCase := range testCases { t.Run(testCase.desc, func(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) defer cancel() _, err := client.LastCommitForPath(ctx, testCase.request) testhelper.AssertGrpcError(t, err, testCase.code, "") }) } }
explode_data.jsonl/53835
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 481 }
[ 2830, 3393, 9408, 5842, 33441, 2461, 1820, 1900, 1155, 353, 8840, 836, 8, 341, 41057, 11, 3538, 10286, 1820, 1669, 1191, 2271, 11025, 1155, 340, 16867, 3538, 30213, 2822, 25291, 11, 4534, 1669, 501, 33441, 1860, 2959, 1155, 11, 3538, 10...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestChatSrvGetStaticConfig(t *testing.T) { ctc := makeChatTestContext(t, "GetStaticConfig", 2) defer ctc.cleanup() users := ctc.users() ctx := ctc.as(t, users[0]).startCtx tc := ctc.world.Tcs[users[0].Username] res, err := ctc.as(t, ctc.users()[0]).chatLocalHandler().GetStaticConfig(ctx) require.NoError(t, err) require.Equal(t, chat1.StaticConfig{ DeletableByDeleteHistory: chat1.DeletableMessageTypesByDeleteHistory(), BuiltinCommands: tc.Context().CommandsSource.GetBuiltins(ctx), }, res) }
explode_data.jsonl/63721
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 209 }
[ 2830, 3393, 15672, 50, 10553, 1949, 11690, 2648, 1155, 353, 8840, 836, 8, 341, 89216, 66, 1669, 1281, 15672, 2271, 1972, 1155, 11, 330, 1949, 11690, 2648, 497, 220, 17, 340, 16867, 272, 10413, 87689, 741, 90896, 1669, 272, 10413, 20653,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestChangefeedNodeShutdown(t *testing.T) { defer leaktest.AfterTest(t)() t.Skip("#32232") defer func(oldInterval time.Duration) { jobs.DefaultAdoptInterval = oldInterval }(jobs.DefaultAdoptInterval) jobs.DefaultAdoptInterval = 100 * time.Millisecond flushCh := make(chan struct{}, 1) defer close(flushCh) knobs := base.TestingKnobs{DistSQL: &execinfra.TestingKnobs{Changefeed: &TestingKnobs{ AfterSinkFlush: func() error { select { case flushCh <- struct{}{}: default: } return nil }, }}} tc := serverutils.StartTestCluster(t, 3, base.TestClusterArgs{ ServerArgs: base.TestServerArgs{ UseDatabase: "d", Knobs: knobs, }, }) defer tc.Stopper().Stop(context.Background()) db := tc.ServerConn(1) sqlDB := sqlutils.MakeSQLRunner(db) sqlDB.Exec(t, `SET CLUSTER SETTING changefeed.experimental_poll_interval = '0ns'`) sqlDB.Exec(t, `CREATE DATABASE d`) sqlDB.Exec(t, `CREATE TABLE foo (a INT PRIMARY KEY, b STRING)`) sqlDB.Exec(t, `INSERT INTO foo VALUES (0, 'initial')`) // Create a factory which uses server 1 as the output of the Sink, but // executes the CREATE CHANGEFEED statement on server 0. sink, cleanup := sqlutils.PGUrl( t, tc.Server(0).ServingSQLAddr(), t.Name(), url.User(security.RootUser)) defer cleanup() f := cdctest.MakeTableFeedFactory(tc.Server(1), tc.ServerConn(0), flushCh, sink) foo := feed(t, f, "CREATE CHANGEFEED FOR foo") defer closeFeed(t, foo) sqlDB.Exec(t, `INSERT INTO foo VALUES (1, 'second')`) assertPayloads(t, foo, []string{ `foo: [0]->{"after": {"a": 0, "b": "initial"}}`, `foo: [1]->{"after": {"a": 1, "b": "second"}}`, }) // TODO(mrtracy): At this point we need to wait for a resolved timestamp, // in order to ensure that there isn't a repeat when the job is picked up // again. As an alternative, we could use a verifier instead of assertPayloads. // Wait for the high-water mark on the job to be updated after the initial // scan, to make sure we don't get the initial scan data again. // Stop server 0, which is where the table feed connects. tc.StopServer(0) sqlDB.Exec(t, `UPSERT INTO foo VALUES(0, 'updated')`) sqlDB.Exec(t, `INSERT INTO foo VALUES (3, 'third')`) assertPayloads(t, foo, []string{ `foo: [0]->{"after": {"a": 0, "b": "updated"}}`, `foo: [3]->{"after": {"a": 3, "b": "third"}}`, }) }
explode_data.jsonl/7061
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 896 }
[ 2830, 3393, 1143, 524, 823, 12051, 1955, 62004, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 3244, 57776, 3584, 18, 17, 17, 18, 17, 5130, 16867, 2915, 21972, 10256, 882, 33795, 8, 341, 197, 12428, 5481,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUserDefined(t *testing.T) { var flags FlagSet flags.Init("test", ContinueOnError) var v flagVar flags.Var(&v, "v", "usage") if err := flags.Parse([]string{"-v", "1", "-v", "2", "-v=3"}); err != nil { t.Error(err) } if len(v) != 3 { t.Fatal("expected 3 args; got ", len(v)) } expect := "[1 2 3]" if v.String() != expect { t.Errorf("expected value %q got %q", expect, v.String()) } }
explode_data.jsonl/53993
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 176 }
[ 2830, 3393, 1474, 29361, 1155, 353, 8840, 836, 8, 341, 2405, 8042, 22666, 1649, 198, 59516, 26849, 445, 1944, 497, 15003, 74945, 340, 2405, 348, 5181, 3962, 198, 59516, 87968, 2099, 85, 11, 330, 85, 497, 330, 17698, 1138, 743, 1848, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestMergeCniResults(t *testing.T) { syncher := setupTest(len(totalSuccessTestConsts), totalSuccessTestConsts) cniResult := syncher.MergeCniResults() var expectedNumberOfCniInterfaces int for _, result := range totalSuccessTestConsts { if result.cniRes != nil { expectedNumberOfCniInterfaces = expectedNumberOfCniInterfaces + len(result.cniRes.Interfaces) } } if len(cniResult.Interfaces) != expectedNumberOfCniInterfaces { t.Errorf("Number of interfaces inside the aggregated CNI result:%d does not match with the expected:%d", len(cniResult.Interfaces), expectedNumberOfCniInterfaces) } if cniResult.Interfaces[0].Name != physicalEth0Name { t.Errorf("Name of the first interface in the merged CNI result:%s does not match with the expected eth0", cniResult.Interfaces[0].Name) } }
explode_data.jsonl/69739
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 271 }
[ 2830, 3393, 52096, 34, 7751, 9801, 1155, 353, 8840, 836, 8, 341, 220, 6782, 9034, 1669, 6505, 2271, 6901, 22842, 7188, 2271, 19167, 82, 701, 2790, 7188, 2271, 19167, 82, 340, 220, 272, 7751, 2077, 1669, 6782, 9034, 93855, 34, 7751, 98...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestTiDBReadStaleness(t *testing.T) { store, clean := realtikvtest.CreateMockStoreAndSetup(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("set @@tidb_read_staleness='-5'") tk.MustExec("set @@tidb_read_staleness='-100'") err := tk.ExecToErr("set @@tidb_read_staleness='-5s'") require.Error(t, err) err = tk.ExecToErr("set @@tidb_read_staleness='foo'") require.Error(t, err) tk.MustExec("set @@tidb_read_staleness=''") tk.MustExec("set @@tidb_read_staleness='0'") }
explode_data.jsonl/5717
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 219 }
[ 2830, 3393, 45351, 3506, 4418, 623, 278, 23709, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1931, 83, 1579, 85, 1944, 7251, 11571, 6093, 3036, 21821, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOCMProvider_Create(t *testing.T) { type fields struct { ocmClient ocm.Client } type args struct { clusterReq types.ClusterRequest } awsConfig := &config.AWSConfig{ AccountID: "", AccessKey: "", SecretAccessKey: "", } osdCreateConfig := &config.DataplaneClusterConfig{ OpenshiftVersion: "4.7", } cb := NewClusterBuilder(awsConfig, osdCreateConfig) internalId := "test-internal-id" externalId := "test-external-id" cr := types.ClusterRequest{ CloudProvider: "aws", Region: "east-1", MultiAZ: true, AdditionalSpec: nil, } tests := []struct { name string fields fields args args want *types.ClusterSpec wantErr bool }{ { name: "should return created cluster", fields: fields{ ocmClient: &ocm.ClientMock{ CreateClusterFunc: func(cluster *clustersmgmtv1.Cluster) (*clustersmgmtv1.Cluster, error) { return clustersmgmtv1.NewCluster().ID(internalId).ExternalID(externalId).Build() }, }, }, args: args{ clusterReq: cr, }, want: &types.ClusterSpec{ InternalID: internalId, ExternalID: externalId, Status: api.ClusterProvisioning, AdditionalInfo: nil, }, wantErr: false, }, { name: "should return error when create cluster failed from OCM", fields: fields{ ocmClient: &ocm.ClientMock{ CreateClusterFunc: func(cluster *clustersmgmtv1.Cluster) (*clustersmgmtv1.Cluster, error) { return nil, errors.Errorf("failed to create cluster") }, }, }, args: args{clusterReq: cr}, want: nil, wantErr: true, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { RegisterTestingT(t) p := newOCMProvider(test.fields.ocmClient, cb, &ocm.OCMConfig{}) resp, err := p.Create(&test.args.clusterReq) Expect(resp).To(Equal(test.want)) if test.wantErr { Expect(err).NotTo(BeNil()) } }) } }
explode_data.jsonl/4830
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 871 }
[ 2830, 3393, 7612, 44, 5179, 34325, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 197, 509, 76, 2959, 297, 6226, 11716, 198, 197, 532, 13158, 2827, 2036, 341, 197, 197, 18855, 27234, 4494, 72883, 1900, 198, 197, 532, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBearerPolicy_ClientCertificateCredential(t *testing.T) { srv, close := mock.NewTLSServer() defer close() srv.AppendResponse(mock.WithBody([]byte(accessTokenRespSuccess))) srv.AppendResponse(mock.WithStatusCode(http.StatusOK)) options := ClientCertificateCredentialOptions{} options.AuthorityHost = srv.URL() options.HTTPClient = srv cred, err := NewClientCertificateCredential(tenantID, clientID, certificatePath, &options) if err != nil { t.Fatalf("Did not expect an error but received: %v", err) } pipeline := defaultTestPipeline(srv, cred, scope) req, err := runtime.NewRequest(context.Background(), http.MethodGet, srv.URL()) if err != nil { t.Fatal(err) } _, err = pipeline.Do(req) if err != nil { t.Fatalf("Expected nil error but received one") } }
explode_data.jsonl/26264
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 282 }
[ 2830, 3393, 26399, 13825, 46102, 33202, 48265, 1155, 353, 8840, 836, 8, 341, 1903, 10553, 11, 3265, 1669, 7860, 7121, 13470, 1220, 2836, 741, 16867, 3265, 741, 1903, 10553, 8982, 2582, 30389, 26124, 5444, 10556, 3782, 7, 41167, 36555, 718...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRetrySucceeded(t *testing.T) { assert := internal.NewAssert(t, "TestRetrySucceeded") var number int increaseNumber := func() error { number++ if number == DefaultRetryTimes { return nil } return errors.New("error occurs") } err := Retry(increaseNumber, RetryDuration(time.Microsecond*50)) assert.IsNil(err) assert.Equal(DefaultRetryTimes, number) }
explode_data.jsonl/30739
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 140 }
[ 2830, 3393, 51560, 50, 43805, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 5306, 7121, 8534, 1155, 11, 330, 2271, 51560, 50, 43805, 5130, 2405, 1372, 526, 198, 17430, 19947, 2833, 1669, 2915, 368, 1465, 341, 197, 57135, 22940, 197, 743, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUnretrievableAttributes(t *testing.T) { for _, c := range []struct { opts []interface{} expected *opt.UnretrievableAttributesOption }{ { opts: []interface{}{nil}, expected: opt.UnretrievableAttributes([]string{}...), }, { opts: []interface{}{opt.UnretrievableAttributes("value1")}, expected: opt.UnretrievableAttributes("value1"), }, { opts: []interface{}{opt.UnretrievableAttributes("value1", "value2", "value3")}, expected: opt.UnretrievableAttributes("value1", "value2", "value3"), }, } { var ( in = ExtractUnretrievableAttributes(c.opts...) out opt.UnretrievableAttributesOption ) data, err := json.Marshal(&in) require.NoError(t, err) err = json.Unmarshal(data, &out) require.NoError(t, err) require.ElementsMatch(t, c.expected.Get(), out.Get()) } }
explode_data.jsonl/20622
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 355 }
[ 2830, 3393, 1806, 265, 8927, 23760, 10516, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 272, 1669, 2088, 3056, 1235, 341, 197, 64734, 257, 3056, 4970, 16094, 197, 42400, 353, 2912, 10616, 265, 8927, 23760, 10516, 5341, 198, 197, 59403, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNoOnExitWhenSkipped(t *testing.T) { wf := unmarshalWF(noOnExitWhenSkipped) ctx := context.Background() woc := newWoc(*wf) woc.operate(ctx) assert.Nil(t, woc.wf.GetNodeByName("B.onExit")) }
explode_data.jsonl/71010
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 90 }
[ 2830, 3393, 2753, 1925, 15339, 4498, 19290, 6450, 1155, 353, 8840, 836, 8, 341, 6692, 69, 1669, 650, 27121, 32131, 39205, 1925, 15339, 4498, 19290, 6450, 692, 20985, 1669, 2266, 19047, 741, 6692, 509, 1669, 501, 54, 509, 4071, 43083, 34...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReference(t *testing.T) { w := new(bytes.Buffer) opt := &Option{ referenceFlag: true, } assert.NotNil(t, reference(w, []string{}, opt)) assert.NotNil(t, reference(w, []string{"date.go", "date.go"}, opt)) assert.Nil(t, reference(w, []string{"date.go"}, opt)) }
explode_data.jsonl/82631
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 111 }
[ 2830, 3393, 8856, 1155, 353, 8840, 836, 8, 341, 6692, 1669, 501, 23158, 22622, 692, 64838, 1669, 609, 5341, 515, 197, 197, 16291, 12135, 25, 830, 345, 197, 532, 6948, 93882, 1155, 11, 5785, 3622, 11, 3056, 917, 22655, 3387, 1171, 6948...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSpanProcessor(t *testing.T) { zsp := NewSpanProcessor() tracerProvider := sdktrace.NewTracerProvider( sdktrace.WithSampler(sdktrace.AlwaysSample()), sdktrace.WithSpanProcessor(zsp), ) const spanName = "testSpan" const numSpans = 9 tracer := tracerProvider.Tracer("test") spans := createActiveSpans(tracer, spanName, numSpans) // Sort the spans by the address pointer so we can compare. sort.Slice(spans, func(i, j int) bool { return reflect.ValueOf(spans[i]).Pointer() < reflect.ValueOf(spans[j]).Pointer() }) require.Len(t, spans, numSpans) activeSpans := zsp.activeSpans(spanName) assert.Len(t, activeSpans, numSpans) // Sort the activeSpans by the address pointer so we can compare. sort.Slice(activeSpans, func(i, j int) bool { return reflect.ValueOf(activeSpans[i]).Pointer() < reflect.ValueOf(activeSpans[j]).Pointer() }) for i := range spans { assert.Same(t, spans[i], activeSpans[i]) } // No ended spans so there will be no error, no latency samples. assert.Len(t, zsp.errorSpans(spanName), 0) for i := 0; i < defaultBoundaries.numBuckets(); i++ { assert.Len(t, zsp.spansByLatency(spanName, i), 0) } spansPM := zsp.spansPerMethod() require.Equal(t, 1, len(spansPM)) assert.Equal(t, numSpans, spansPM[spanName].activeSpans) // End all Spans, they will end pretty fast, so we can only check that we have at least one in // errors and one in latency samples. for _, s := range spans { s.End() } // Test that no more active spans. assert.Len(t, zsp.activeSpans(spanName), 0) assert.LessOrEqual(t, 1, len(zsp.errorSpans(spanName))) numLatencySamples := 0 for i := 0; i < defaultBoundaries.numBuckets(); i++ { numLatencySamples += len(zsp.spansByLatency(spanName, i)) } assert.LessOrEqual(t, 1, numLatencySamples) }
explode_data.jsonl/34887
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 678 }
[ 2830, 3393, 12485, 22946, 1155, 353, 8840, 836, 8, 341, 20832, 2154, 1669, 1532, 12485, 22946, 741, 25583, 9584, 5179, 1669, 45402, 15067, 7121, 1282, 9584, 5179, 1006, 197, 1903, 7584, 15067, 26124, 66048, 1141, 7584, 15067, 9636, 2284, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidate(t *testing.T) { scenarios := map[string]scenario{ "valid": { wrapFunc: func(args *WebhookParameters) {}, expectedError: "", }, "invalid deployment namespace": { wrapFunc: func(args *WebhookParameters) { args.DeploymentAndServiceNamespace = "_/invalid" }, expectedError: `invalid deployment namespace: "_/invalid"`, }, "invalid deployment name": { wrapFunc: func(args *WebhookParameters) { args.DeploymentName = "_/invalid" }, expectedError: `invalid deployment name: "_/invalid"`, }, "invalid service name": { wrapFunc: func(args *WebhookParameters) { args.ServiceName = "_/invalid" }, expectedError: `invalid service name: "_/invalid"`, }, "missing deployment namespace": { wrapFunc: func(args *WebhookParameters) { args.DeploymentAndServiceNamespace = "" }, expectedError: `invalid deployment namespace: ""`, }, "missing deployment name": { wrapFunc: func(args *WebhookParameters) { args.DeploymentName = "" }, expectedError: `invalid deployment name: ""`, }, "missing service name": { wrapFunc: func(args *WebhookParameters) { args.ServiceName = "" }, expectedError: `invalid service name: ""`, }, "webhook unset": { wrapFunc: func(args *WebhookParameters) { args.WebhookConfigFile = "" }, expectedError: "webhookConfigFile not specified", }, "cert unset": { wrapFunc: func(args *WebhookParameters) { args.CertFile = "" }, expectedError: "cert file not specified", }, "key unset": { wrapFunc: func(args *WebhookParameters) { args.KeyFile = "" }, expectedError: "key file not specified", }, "ca cert unset": { wrapFunc: func(args *WebhookParameters) { args.CACertFile = "" }, expectedError: "CA cert file not specified", }, "invalid port": { wrapFunc: func(args *WebhookParameters) { args.Port = 100000 }, expectedError: "port number 100000 must be in the range 1024..65535", }, } for name, scenario := range scenarios { t.Run(name, func(tt *testing.T) { runTestCode(name, tt, scenario) }) } }
explode_data.jsonl/6686
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 795 }
[ 2830, 3393, 17926, 1155, 353, 8840, 836, 8, 341, 29928, 60494, 1669, 2415, 14032, 60, 61422, 515, 197, 197, 1, 1891, 788, 341, 298, 6692, 4611, 9626, 25, 414, 2915, 7356, 353, 5981, 20873, 9706, 8, 14573, 298, 42400, 1454, 25, 8324, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAccDataSourceLookup_metadata(t *testing.T) { expectedPayload := []interface{}{"bob", "lucy", "david"} expectedJSON, err := json.Marshal(expectedPayload) if err != nil { t.Fatalf("Unable to marshal JSON: %s", err) } resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, Providers: testAccProviders, Steps: []resource.TestStep{ resource.TestStep{ Config: testAccDataSourceLookup_metadata, Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr( "data.jerakia_lookup.lookup_1", "result_json", string(expectedJSON)), ), }, }, }) }
explode_data.jsonl/43729
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 245 }
[ 2830, 3393, 14603, 17173, 34247, 22220, 1155, 353, 8840, 836, 8, 341, 42400, 29683, 1669, 3056, 4970, 6257, 4913, 47086, 497, 330, 75, 1754, 88, 497, 330, 67, 15212, 16707, 42400, 5370, 11, 1848, 1669, 2951, 37271, 15253, 29683, 340, 74...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDataHash(t *testing.T) { t.Parallel() fabBlock, err := getBlock("./mock/sampleblock.pb") assert.NoError(t, err) block, _ := FromFabricBlock(fabBlock) hash := block.DataHash() assert.Equal( t, "db7a04bfca3b18b7cc6f6544863bec7f6b8d863bf8488bd92e25c71ffe04769b", hex.EncodeToString(hash), ) }
explode_data.jsonl/40042
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 147 }
[ 2830, 93200, 6370, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 1166, 370, 4713, 11, 1848, 1669, 633, 4713, 13988, 16712, 69851, 4574, 37916, 1138, 6948, 35699, 1155, 11, 1848, 692, 47996, 11, 716, 1669, 5542, 81731, 4713, 955...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestChangeParentWithConflict(t *testing.T) { f := foresttest.Create("-a-c") // a <- b; c <- d // Set secret to "Propagate" mode. (Use Secret in this test because the test // forest doesn't have Role or RoleBinding by default either. We can also create // secret by existing `createSecret()` function.) or := &reconcilers.ObjectReconciler{ GVK: schema.GroupVersionKind{Group: "", Version: "v1", Kind: "Secret"}, Mode: api.Propagate, } f.AddTypeSyncer(or) // Create secrets with the same name in namespace 'a' and 'd'. createSecret("conflict", "a", f) createSecret("conflict", "d", f) h := &Hierarchy{Forest: f} l := zap.Logger(false) tests := []struct { name string nnm string pnm string fail bool }{ {name: "conflict in itself and the new parent", nnm: "a", pnm: "d", fail: true}, {name: "conflict in itself and a new ancestor (not the parent)", nnm: "d", pnm: "b", fail: true}, {name: "ok: no conflict in ancestors", nnm: "a", pnm: "c"}, {name: "conflict in subtree leaf and the new parent", nnm: "c", pnm: "a", fail: true}, {name: "conflict in subtree leaf and a new ancestor (not the parent)", nnm: "c", pnm: "b", fail: true}, {name: "ok: set a namespace as root", nnm: "d", pnm: ""}, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { // Setup g := NewGomegaWithT(t) hc := &api.HierarchyConfiguration{Spec: api.HierarchyConfigurationSpec{Parent: tc.pnm}} hc.ObjectMeta.Name = api.Singleton hc.ObjectMeta.Namespace = tc.nnm req := &request{hc: hc} // Test got := h.handle(context.Background(), l, req) // Report logResult(t, got.AdmissionResponse.Result) g.Expect(got.AdmissionResponse.Allowed).ShouldNot(Equal(tc.fail)) }) } }
explode_data.jsonl/73411
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 672 }
[ 2830, 3393, 4072, 8387, 2354, 57974, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 13638, 1944, 7251, 13645, 64, 1786, 899, 442, 264, 9119, 293, 26, 272, 9119, 294, 271, 197, 322, 2573, 6234, 311, 330, 2008, 46836, 1, 3856, 13, 320, 102...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseDuration(t *testing.T) { var tests = []struct { input string fail bool result time.Duration }{ { input: "", fail: true, }, { input: "abc", fail: true, }, { input: "2015-06-03T13:21:58.555Z", fail: true, }, { // Internal int64 overflow. input: "-148966367200.372", fail: true, }, { // Internal int64 overflow. input: "148966367200.372", fail: true, }, { input: "123", result: 123 * time.Second, }, { input: "123.333", result: 123*time.Second + 333*time.Millisecond, }, { input: "15s", result: 15 * time.Second, }, { input: "5m", result: 5 * time.Minute, }, } for _, test := range tests { d, err := parseDuration(test.input) if err != nil && !test.fail { t.Errorf("Unexpected error for %q: %s", test.input, err) continue } if err == nil && test.fail { t.Errorf("Expected error for %q but got none", test.input) continue } if !test.fail && d != test.result { t.Errorf("Expected duration %v for input %q but got %v", test.result, test.input, d) } } }
explode_data.jsonl/72407
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 502 }
[ 2830, 3393, 14463, 12945, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 3056, 1235, 341, 197, 22427, 220, 914, 198, 197, 63052, 256, 1807, 198, 197, 9559, 882, 33795, 198, 197, 59403, 197, 197, 515, 298, 22427, 25, 8324, 298, 63052, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestImplicitXeger(t *testing.T) { myRegex, _ := syntax.Parse("[0-9]+", syntax.Perl) // handle this error in the real code myXeger := &Xeger{ myRegex, rand.NewSource(1234567), 15, } if res := myXeger.Generate(); res != "9712160" { // since it's set seed, I know the result t.Errorf("Result is wrong when creating Xeger implicitly: %s\n", res) } }
explode_data.jsonl/1413
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 143 }
[ 2830, 3393, 59558, 55, 1878, 1155, 353, 8840, 836, 8, 341, 13624, 32464, 11, 716, 1669, 19482, 8937, 10937, 15, 12, 24, 7574, 497, 19482, 14834, 75, 8, 442, 3705, 419, 1465, 304, 279, 1931, 2038, 198, 13624, 55, 1878, 1669, 609, 55,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAlgorithmProviderCompatibility(t *testing.T) { // Add serialized versions of scheduler config that exercise available options to ensure compatibility between releases defaultPlugins := map[string][]config.Plugin{ "PreFilterPlugin": { {Name: "NodeResourcesFit"}, {Name: "NodePorts"}, {Name: "InterPodAffinity"}, }, "FilterPlugin": { {Name: "NodeUnschedulable"}, {Name: "NodeResourcesFit"}, {Name: "NodeName"}, {Name: "NodePorts"}, {Name: "NodeAffinity"}, {Name: "VolumeRestrictions"}, {Name: "TaintToleration"}, {Name: "EBSLimits"}, {Name: "GCEPDLimits"}, {Name: "NodeVolumeLimits"}, {Name: "AzureDiskLimits"}, {Name: "VolumeBinding"}, {Name: "VolumeZone"}, {Name: "InterPodAffinity"}, }, "PostFilterPlugin": { {Name: "InterPodAffinity"}, {Name: "DefaultPodTopologySpread"}, {Name: "TaintToleration"}, }, "ScorePlugin": { {Name: "NodeResourcesBalancedAllocation", Weight: 1}, {Name: "ImageLocality", Weight: 1}, {Name: "InterPodAffinity", Weight: 1}, {Name: "NodeResourcesLeastAllocated", Weight: 1}, {Name: "NodeAffinity", Weight: 1}, {Name: "NodePreferAvoidPods", Weight: 10000}, {Name: "DefaultPodTopologySpread", Weight: 1}, {Name: "TaintToleration", Weight: 1}, }, } testcases := []struct { name string provider string wantPlugins map[string][]config.Plugin }{ { name: "No Provider specified", wantPlugins: defaultPlugins, }, { name: "DefaultProvider", provider: config.SchedulerDefaultProviderName, wantPlugins: defaultPlugins, }, { name: "ClusterAutoscalerProvider", provider: algorithmprovider.ClusterAutoscalerProvider, wantPlugins: map[string][]config.Plugin{ "PreFilterPlugin": { {Name: "NodeResourcesFit"}, {Name: "NodePorts"}, {Name: "InterPodAffinity"}, }, "FilterPlugin": { {Name: "NodeUnschedulable"}, {Name: "NodeResourcesFit"}, {Name: "NodeName"}, {Name: "NodePorts"}, {Name: "NodeAffinity"}, {Name: "VolumeRestrictions"}, {Name: "TaintToleration"}, {Name: "EBSLimits"}, {Name: "GCEPDLimits"}, {Name: "NodeVolumeLimits"}, {Name: "AzureDiskLimits"}, {Name: "VolumeBinding"}, {Name: "VolumeZone"}, {Name: "InterPodAffinity"}, }, "PostFilterPlugin": { {Name: "InterPodAffinity"}, {Name: "DefaultPodTopologySpread"}, {Name: "TaintToleration"}, }, "ScorePlugin": { {Name: "NodeResourcesBalancedAllocation", Weight: 1}, {Name: "ImageLocality", Weight: 1}, {Name: "InterPodAffinity", Weight: 1}, {Name: "NodeResourcesMostAllocated", Weight: 1}, {Name: "NodeAffinity", Weight: 1}, {Name: "NodePreferAvoidPods", Weight: 10000}, {Name: "DefaultPodTopologySpread", Weight: 1}, {Name: "TaintToleration", Weight: 1}, }, }, }, } for _, tc := range testcases { t.Run(tc.name, func(t *testing.T) { var opts []scheduler.Option if len(tc.provider) != 0 { opts = append(opts, scheduler.WithAlgorithmSource(config.SchedulerAlgorithmSource{ Provider: &tc.provider, })) } client := fake.NewSimpleClientset() informerFactory := informers.NewSharedInformerFactory(client, 0) sched, err := scheduler.New( client, informerFactory, informerFactory.Core().V1().Pods(), nil, make(chan struct{}), opts..., ) if err != nil { t.Fatalf("Error constructing: %v", err) } gotPlugins := sched.Framework.ListPlugins() if diff := cmp.Diff(tc.wantPlugins, gotPlugins); diff != "" { t.Errorf("unexpected plugins diff (-want, +got): %s", diff) } }) } }
explode_data.jsonl/27546
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1579 }
[ 2830, 3393, 27847, 5179, 85880, 1155, 353, 8840, 836, 8, 341, 197, 322, 2691, 32916, 10795, 315, 28809, 2193, 429, 10158, 2500, 2606, 311, 5978, 24748, 1948, 19232, 198, 11940, 45378, 1669, 2415, 14032, 45725, 1676, 64378, 515, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestVolumeSnapshotUpdateAction(t *testing.T) { var args []string args = append(args, "3769855c-a102-11e7-b772-17b880d2f537") volumeSnapshotUpdateAction(volumeSnapshotDeleteCommand, args) }
explode_data.jsonl/13557
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 71 }
[ 2830, 3393, 18902, 15009, 4289, 2512, 1155, 353, 8840, 836, 8, 341, 2405, 2827, 3056, 917, 198, 31215, 284, 8737, 7356, 11, 330, 18, 22, 21, 24, 23, 20, 20, 66, 7409, 16, 15, 17, 12, 16, 16, 68, 22, 1455, 22, 22, 17, 12, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSkipToEnd(t *testing.T) { testcases := []struct { input string output string }{{ // This is the case where the partial ddl will be reset // because of a premature ';'. input: "create table a(id; select * from t", output: "syntax error at position 19", }, { // Partial DDL should get reset for valid DDLs also. input: "create table a(id int); select * from t", output: "syntax error at position 31 near 'select'", }, { // Partial DDL does not get reset here. But we allow the // DDL only if there are no new tokens after skipping to end. input: "create table a bb cc; select * from t", output: "extra characters encountered after end of DDL: 'select'", }, { // Test that we don't step at ';' inside strings. input: "create table a bb 'a;'; select * from t", output: "extra characters encountered after end of DDL: 'select'", }} for _, tcase := range testcases { _, err := Parse(tcase.input) if err == nil || err.Error() != tcase.output { t.Errorf("%s: %v, want %s", tcase.input, err, tcase.output) } } }
explode_data.jsonl/27192
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 377 }
[ 2830, 3393, 35134, 66573, 1155, 353, 8840, 836, 8, 341, 18185, 23910, 1669, 3056, 1235, 341, 197, 22427, 220, 914, 198, 197, 21170, 914, 198, 197, 15170, 515, 197, 197, 322, 1096, 374, 279, 1142, 1380, 279, 7130, 55780, 686, 387, 7585...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestBlock(t *testing.T) { const ( input = `a{{block "inner" .}}bar{{.}}baz{{end}}b` outer = `a{{template "inner" .}}b` inner = `bar{{.}}baz` ) treeSet := make(map[string]*Tree) tmpl, err := New("outer").Parse(input, "", "", treeSet, nil) if err != nil { t.Fatal(err) } if g, w := tmpl.Root.String(), outer; g != w { t.Errorf("outer template = %q, want %q", g, w) } inTmpl := treeSet["inner"] if inTmpl == nil { t.Fatal("block did not define template") } if g, w := inTmpl.Root.String(), inner; g != w { t.Errorf("inner template = %q, want %q", g, w) } }
explode_data.jsonl/68975
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 265 }
[ 2830, 3393, 4713, 1155, 353, 8840, 836, 8, 341, 4777, 2399, 197, 22427, 284, 1565, 64, 2979, 4574, 330, 4382, 1, 659, 3417, 2257, 2979, 13, 3417, 42573, 2979, 408, 3417, 65, 3989, 197, 197, 2676, 284, 1565, 64, 2979, 4214, 330, 4382...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func Test_appendInvalidFile(t *testing.T) { const functionName = "samplefunc" const functionLang = "ruby" templatePullLocalTemplateRepo(t) defer tearDownFetchTemplates(t) // Create function parameters := []string{ "new", functionName, "--lang=" + functionLang, "--append=" + functionLang + ".yml", } faasCmd.SetArgs(parameters) stdOut := faasCmd.Execute().Error() if found, err := regexp.MatchString(InvalidFile, stdOut); err != nil || !found { t.Fatalf("Output is not as expected: %s\n", stdOut) } }
explode_data.jsonl/47232
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 193 }
[ 2830, 3393, 26041, 7928, 1703, 1155, 353, 8840, 836, 8, 341, 4777, 90519, 284, 330, 13611, 2830, 698, 4777, 729, 26223, 284, 330, 46275, 1837, 22832, 36068, 7319, 7275, 25243, 1155, 340, 16867, 32825, 20714, 51195, 1155, 692, 197, 322, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCreateChannel(t *testing.T) { client := NewClient(mocks.NewMockConfig()) configTx, err := ioutil.ReadFile("../../test/fixtures/channel/mychannel.tx") if err != nil { t.Fatalf(err.Error()) } // Setup mock orderer verifyBroadcast := make(chan *fab.SignedEnvelope) orderer := mocks.NewMockOrderer(fmt.Sprintf("0.0.0.0:1234"), verifyBroadcast) // Create channel without envelope _, err = client.CreateChannel(fab.CreateChannelRequest{ Orderer: orderer, Name: "mychannel", }) if err == nil { t.Fatalf("Expected error creating channel without envelope") } // Create channel without orderer _, err = client.CreateChannel(fab.CreateChannelRequest{ Envelope: configTx, Name: "mychannel", }) if err == nil { t.Fatalf("Expected error creating channel without orderer") } // Create channel without name _, err = client.CreateChannel(fab.CreateChannelRequest{ Envelope: configTx, Orderer: orderer, }) if err == nil { t.Fatalf("Expected error creating channel without name") } // Test with valid cofiguration request := fab.CreateChannelRequest{ Envelope: configTx, Orderer: orderer, Name: "mychannel", } _, err = client.CreateChannel(request) if err != nil { t.Fatalf("Did not expect error from create channel. Got error: %v", err) } select { case b := <-verifyBroadcast: logger.Debugf("Verified broadcast: %v", b) case <-time.After(time.Second): t.Fatalf("Expected broadcast") } }
explode_data.jsonl/36724
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 527 }
[ 2830, 3393, 4021, 9629, 1155, 353, 8840, 836, 8, 341, 25291, 1669, 1532, 2959, 1255, 25183, 7121, 11571, 2648, 12367, 25873, 31584, 11, 1848, 1669, 43144, 78976, 36800, 1944, 94275, 73121, 34198, 10119, 33807, 1138, 743, 1848, 961, 2092, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func Test_parsePackage(t *testing.T) { data := []struct { text string expected []int expectedMessage string }{ {"2x3x4", []int{2, 3, 4}, ""}, {"3x4x2", []int{2, 3, 4}, ""}, {"3x1", nil, "bad package spec"}, {"3xAx4", nil, "strconv.Atoi: parsing \"A\": invalid syntax"}, } for _, d := range data { dims, err := parsePackage(d.text) if !reflect.DeepEqual(dims, d.expected) { t.Errorf("for '%s' expected %d but got %d", d.text, d.expected, dims) } actualMsg := "" if err != nil { actualMsg = err.Error() } if actualMsg != d.expectedMessage { t.Errorf("for '%s' expected error '%s' but got '%s'", d.text, d.expectedMessage, actualMsg) } } }
explode_data.jsonl/30719
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 315 }
[ 2830, 3393, 21039, 13100, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 3056, 1235, 341, 197, 15425, 310, 914, 198, 197, 42400, 286, 3056, 396, 198, 197, 42400, 2052, 914, 198, 197, 59403, 197, 197, 4913, 17, 87, 18, 87, 19, 497, 3056, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestMultiSigScript(t *testing.T) { t.Parallel() // mainnet p2pk 13CG6SJ3yHUXo4Cr2RY4THLLJrNFuG3gUg p2pkCompressedMain := hexToBytes("02192d74d0cb94344c9569c2e77901573d8d790" + "3c3ebec3a957724895dca52c6b4") p2pkCompressed2Main := hexToBytes("03b0bd634234abbb1ba1e986e884185c61cf43" + "e001f9137f23c2c409273eb16e65") p2pkUncompressedMain := hexToBytes("0411db93e1dcdb8a016b49840f8c53bc1eb68" + "a382e97b1482ecad7b148a6909a5cb2e0eaddfb84ccf9744464f82e160bfa9b8b64f" + "9d4c03f999b8643f656b412a3") tests := []struct { name string threshold int pubKeys [][]byte expected string err error }{{ name: "normal 1-of-2", threshold: 1, pubKeys: [][]byte{p2pkCompressedMain, p2pkCompressed2Main}, expected: "1 DATA_33 0x02192d74d0cb94344c9569c2e77901573d8d7903c" + "3ebec3a957724895dca52c6b4 DATA_33 0x03b0bd634" + "234abbb1ba1e986e884185c61cf43e001f9137f23c2c4" + "09273eb16e65 2 CHECKMULTISIG", }, { name: "normal 2-of-2", threshold: 2, pubKeys: [][]byte{p2pkCompressedMain, p2pkCompressed2Main}, expected: "2 DATA_33 0x02192d74d0cb94344c9569c2e77901573d8d7903c" + "3ebec3a957724895dca52c6b4 DATA_33 0x03b0bd634" + "234abbb1ba1e986e884185c61cf43e001f9137f23c2c4" + "09273eb16e65 2 CHECKMULTISIG", }, { name: "threshold 3 > 2 pubkeys", pubKeys: [][]byte{p2pkCompressedMain, p2pkCompressed2Main}, threshold: 3, expected: "", err: ErrTooManyRequiredSigs, }, { name: "threshold 2 > 1 pubkey", pubKeys: [][]byte{p2pkCompressedMain}, threshold: 2, expected: "", err: ErrTooManyRequiredSigs, }, { name: "reject uncompressed pubkeys", pubKeys: [][]byte{p2pkUncompressedMain}, threshold: 1, expected: "", err: ErrPubKeyType, }} t.Logf("Running %d tests", len(tests)) for _, test := range tests { script, err := MultiSigScript(test.threshold, test.pubKeys...) if !errors.Is(err, test.err) { t.Errorf("%q: unexpected error - got %v, want %v", test.name, err, test.err) continue } expected := mustParseShortForm(test.expected) if !bytes.Equal(script, expected) { t.Errorf("%q: unexpected result -- got: %x\nwant: %x", test.name, script, expected) continue } } }
explode_data.jsonl/29684
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1144 }
[ 2830, 3393, 20358, 47246, 5910, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 322, 220, 1887, 4711, 281, 17, 20819, 220, 16, 18, 8798, 21, 97489, 18, 88, 39, 13401, 78, 19, 16001, 17, 11242, 19, 3617, 4086, 41, 81, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestByzantine(t *testing.T) { N := 4 logger := consensusLogger().With("test", "byzantine") css, cleanup := randConsensusNet(N, "consensus_byzantine_test", newMockTickerFunc(false), newCounter) defer cleanup() // give the byzantine validator a normal ticker ticker := NewTimeoutTicker() ticker.SetLogger(css[0].Logger) css[0].SetTimeoutTicker(ticker) switches := make([]*p2p.Switch, N) p2pLogger := logger.With("module", "p2p") for i := 0; i < N; i++ { switches[i] = p2p.MakeSwitch( config.P2P, i, "foo", "1.0.0", func(i int, sw *p2p.Switch) *p2p.Switch { return sw }) switches[i].SetLogger(p2pLogger.With("validator", i)) } blocksSubs := make([]types.Subscription, N) reactors := make([]p2p.Reactor, N) for i := 0; i < N; i++ { // make first val byzantine if i == 0 { // NOTE: Now, test validators are MockPV, which by default doesn't // do any safety checks. css[i].privValidator.(*types.MockPV).DisableChecks() css[i].decideProposal = func(j int) func(int64, int) { return func(height int64, round int) { byzantineDecideProposalFunc(t, height, round, css[j], switches[j]) } }(i) css[i].doPrevote = func(height int64, round int) {} } eventBus := css[i].eventBus eventBus.SetLogger(logger.With("module", "events", "validator", i)) var err error blocksSubs[i], err = eventBus.Subscribe(context.Background(), testSubscriber, types.EventQueryNewBlock) require.NoError(t, err) conR := NewConsensusReactor(css[i], true) // so we don't start the consensus states conR.SetLogger(logger.With("validator", i)) conR.SetEventBus(eventBus) var conRI p2p.Reactor = conR // make first val byzantine if i == 0 { conRI = NewByzantineReactor(conR) } reactors[i] = conRI sm.SaveState(css[i].blockExec.DB(), css[i].state) //for save height 1's validators info } defer func() { for _, r := range reactors { if rr, ok := r.(*ByzantineReactor); ok { rr.reactor.Switch.Stop() } else { r.(*ConsensusReactor).Switch.Stop() } } }() p2p.MakeConnectedSwitches(config.P2P, N, func(i int, s *p2p.Switch) *p2p.Switch { // ignore new switch s, we already made ours switches[i].AddReactor("CONSENSUS", reactors[i]) return switches[i] }, func(sws []*p2p.Switch, i, j int) { // the network starts partitioned with globally active adversary if i != 0 { return } p2p.Connect2Switches(sws, i, j) }) // start the non-byz state machines. // note these must be started before the byz for i := 1; i < N; i++ { cr := reactors[i].(*ConsensusReactor) cr.SwitchToConsensus(cr.conS.GetState(), 0) } // start the byzantine state machine byzR := reactors[0].(*ByzantineReactor) s := byzR.reactor.conS.GetState() byzR.reactor.SwitchToConsensus(s, 0) // byz proposer sends one block to peers[0] // and the other block to peers[1] and peers[2]. // note peers and switches order don't match. peers := switches[0].Peers().List() // partition A ind0 := getSwitchIndex(switches, peers[0]) // partition B ind1 := getSwitchIndex(switches, peers[1]) ind2 := getSwitchIndex(switches, peers[2]) p2p.Connect2Switches(switches, ind1, ind2) // wait for someone in the big partition (B) to make a block <-blocksSubs[ind2].Out() t.Log("A block has been committed. Healing partition") p2p.Connect2Switches(switches, ind0, ind1) p2p.Connect2Switches(switches, ind0, ind2) // wait till everyone makes the first new block // (one of them already has) wg := new(sync.WaitGroup) wg.Add(2) for i := 1; i < N-1; i++ { go func(j int) { <-blocksSubs[j].Out() wg.Done() }(i) } done := make(chan struct{}) go func() { wg.Wait() close(done) }() tick := time.NewTicker(time.Second * 10) select { case <-done: case <-tick.C: for i, reactor := range reactors { t.Log(fmt.Sprintf("Consensus Reactor %v", i)) t.Log(fmt.Sprintf("%v", reactor)) } t.Fatalf("Timed out waiting for all validators to commit first block") } }
explode_data.jsonl/30990
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1633 }
[ 2830, 3393, 1359, 89, 38357, 1155, 353, 8840, 836, 8, 341, 18317, 1669, 220, 19, 198, 17060, 1669, 23869, 7395, 1005, 2354, 445, 1944, 497, 330, 1694, 89, 38357, 1138, 1444, 778, 11, 21290, 1669, 10382, 15220, 13626, 6954, 8204, 11, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMonthRetention(t *testing.T) { secs, err := RetentionSeconds("1M") if err != nil { t.Errorf("Unable to calculate retention") } if secs != uint32(60*60*24*31) { t.Errorf("Incorrect retention") } }
explode_data.jsonl/74563
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 87 }
[ 2830, 3393, 11318, 86329, 1155, 353, 8840, 836, 8, 341, 197, 53281, 11, 1848, 1669, 10392, 2939, 15343, 445, 16, 44, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 13080, 445, 17075, 311, 11047, 37131, 1138, 197, 630, 743, 49749, 961, 26...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestEcEngineConstructor(t *testing.T) { configString := "[app:object-server]\nmount_check=false\n" pol := conf.Policy{Index: 0, Type: "hec", Name: "gold", Aliases: []string{}, Default: true, Deprecated: false, Config: map[string]string{"policy_type": "hec", "default": "yes", "name": "gold", "data_shards": "2", "parity_shards": "1"}} config, _ := conf.StringConfig(configString) _, err := ecEngineConstructor( config, &pol, &flag.FlagSet{}) require.Nil(t, err) }
explode_data.jsonl/58664
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 190 }
[ 2830, 3393, 50730, 4571, 13288, 1155, 353, 8840, 836, 8, 341, 25873, 703, 1669, 10545, 676, 25, 1700, 26300, 17960, 77, 16557, 7200, 12219, 1699, 698, 3223, 337, 1669, 2335, 1069, 8018, 90, 1552, 25, 220, 15, 11, 3990, 25, 330, 41650,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_filter(t *testing.T) { tbl := []struct { input bot.Message output bool }{ {bot.Message{Text: " +1"}, true}, {bot.Message{Text: " -1"}, true}, {bot.Message{Text: ":+1:"}, true}, {bot.Message{Text: ":-1:"}, true}, {bot.Message{Text: "+1 blah"}, false}, {bot.Message{Text: "blah +1 blah"}, false}, } for i, tt := range tbl { t.Run(strconv.Itoa(i), func(t *testing.T) { filtered := filter(tt.input) assert.Equal(t, tt.output, filtered) }) } }
explode_data.jsonl/37337
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 217 }
[ 2830, 3393, 8727, 1155, 353, 8840, 836, 8, 341, 3244, 2024, 1669, 3056, 1235, 341, 197, 22427, 220, 10924, 8472, 198, 197, 21170, 1807, 198, 197, 59403, 197, 197, 90, 6331, 8472, 90, 1178, 25, 330, 488, 16, 14345, 830, 1583, 197, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRandomizedGenState(t *testing.T) { interfaceRegistry := codectypes.NewInterfaceRegistry() cryptocodec.RegisterInterfaces(interfaceRegistry) cdc := codec.NewProtoCodec(interfaceRegistry) s := rand.NewSource(1) r := rand.New(s) simState := module.SimulationState{ AppParams: make(simtypes.AppParams), Cdc: cdc, Rand: r, NumBonded: 3, Accounts: simtypes.RandomAccounts(r, 3), InitialStake: 1000, GenState: make(map[string]json.RawMessage), } simulation.RandomizedGenState(&simState) var stakingGenesis types.GenesisState simState.Cdc.MustUnmarshalJSON(simState.GenState[types.ModuleName], &stakingGenesis) require.Equal(t, uint32(207), stakingGenesis.Params.MaxValidators) require.Equal(t, uint32(7), stakingGenesis.Params.MaxEntries) require.Equal(t, uint32(8687), stakingGenesis.Params.HistoricalEntries) require.Equal(t, "stake", stakingGenesis.Params.BondDenom) require.Equal(t, float64(238280), stakingGenesis.Params.UnbondingTime.Seconds()) require.Equal(t, sdk.DefaultPowerReduction, stakingGenesis.Params.PowerReduction) // check numbers of Delegations and Validators require.Len(t, stakingGenesis.Delegations, 3) require.Len(t, stakingGenesis.Validators, 3) // check Delegations require.Equal(t, "cosmos1tnh2q55v8wyygtt9srz5safamzdengsnqeycj3", stakingGenesis.Delegations[0].DelegatorAddress) require.Equal(t, "cosmosvaloper1tnh2q55v8wyygtt9srz5safamzdengsn9dsd7z", stakingGenesis.Delegations[0].ValidatorAddress) require.Equal(t, "1000.000000000000000000", stakingGenesis.Delegations[0].Shares.String()) // check validators require.Equal(t, "cosmosvaloper1ghekyjucln7y67ntx7cf27m9dpuxxemnsvnaes", stakingGenesis.Validators[2].GetOperator().String()) require.Equal(t, []byte{0xa, 0x20, 0x51, 0xde, 0xbd, 0xe8, 0xfa, 0xdf, 0x4e, 0xfc, 0x33, 0xa5, 0x16, 0x94, 0xf6, 0xee, 0xd3, 0x69, 0x7a, 0x7a, 0x1c, 0x2d, 0x50, 0xb6, 0x2, 0xf7, 0x16, 0x4e, 0x66, 0x9f, 0xff, 0x38, 0x91, 0x9b}, stakingGenesis.Validators[2].ConsensusPubkey.Value) require.Equal(t, false, stakingGenesis.Validators[2].Jailed) require.Equal(t, "BOND_STATUS_UNBONDED", stakingGenesis.Validators[2].Status.String()) require.Equal(t, "1000", stakingGenesis.Validators[2].Tokens.String()) require.Equal(t, "1000.000000000000000000", stakingGenesis.Validators[2].DelegatorShares.String()) require.Equal(t, "0.292059246265731326", stakingGenesis.Validators[2].Commission.CommissionRates.Rate.String()) require.Equal(t, "0.330000000000000000", stakingGenesis.Validators[2].Commission.CommissionRates.MaxRate.String()) require.Equal(t, "0.038337453731274481", stakingGenesis.Validators[2].Commission.CommissionRates.MaxChangeRate.String()) require.Equal(t, "1", stakingGenesis.Validators[2].MinSelfDelegation.String()) }
explode_data.jsonl/25484
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1134 }
[ 2830, 3393, 13999, 1506, 9967, 1397, 1155, 353, 8840, 836, 8, 341, 58915, 1564, 15603, 1669, 20329, 439, 1804, 7121, 5051, 15603, 741, 1444, 3571, 43688, 66, 19983, 41066, 75487, 15603, 340, 1444, 7628, 1669, 34647, 7121, 31549, 36913, 75...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAutoUnsubscribePropagationOnClientDisconnect(t *testing.T) { srvA, srvB, optsA, _ := runServers(t) defer srvA.Shutdown() defer srvB.Shutdown() cluster := []*server.Server{srvA, srvB} clientA := createClientConn(t, optsA.Host, optsA.Port) defer clientA.Close() sendA, expectA := setupConn(t, clientA) // No subscriptions. Ready to test. if err := checkExpectedSubs(0, cluster...); err != nil { t.Fatalf("%v", err) } sendA("SUB foo 1\r\n") sendA("UNSUB 1 1\r\n") sendA("PING\r\n") expectA(pongRe) // Waiting cluster subs propagation if err := checkExpectedSubs(1, cluster...); err != nil { t.Fatalf("%v", err) } clientA.Close() // No subs should be on the cluster when all clients is disconnected if err := checkExpectedSubs(0, cluster...); err != nil { t.Fatalf("%v", err) } }
explode_data.jsonl/5076
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 332 }
[ 2830, 3393, 13253, 1806, 9384, 35172, 1925, 2959, 60651, 1155, 353, 8840, 836, 8, 341, 1903, 10553, 32, 11, 43578, 33, 11, 12185, 32, 11, 716, 1669, 1598, 78139, 1155, 340, 16867, 43578, 32, 10849, 18452, 741, 16867, 43578, 33, 10849, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_checkHealth_WhenBrokerInMetadataAndProducedMessageIsConsumed_ReportsHealthy(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() stop := make(chan struct{}) awaitCheck := sync.WaitGroup{} check, zk := newZkTestCheck(ctrl) connection := workingBroker(check, ctrl, stop) connection.EXPECT().Dial(gomock.Any(), gomock.Any()).Return(nil) connection.EXPECT().Consumer(gomock.Any()).Return(check.consumer, nil) connection.EXPECT().Producer(gomock.Any()).Return(check.producer) connection.EXPECT().Metadata().Return(healthyMetadata(check.config.topicName, check.config.replicationTopicName), nil).AnyTimes() connection.EXPECT().Close() zk.mockHealthyMetadata(check.config.topicName, check.config.replicationTopicName) brokerUpdates := make(chan Update) defer close(brokerUpdates) clusterUpdates := make(chan Update) defer close(clusterUpdates) awaitCheck.Add(1) go func() { check.CheckHealth(brokerUpdates, clusterUpdates, stop) awaitCheck.Done() }() brokerStatus := <-brokerUpdates clusterStatus := <-clusterUpdates close(stop) awaitCheck.Wait() if brokerStatus.Status != insync { t.Errorf("CheckHealth reported broker status as %s, expected %s", brokerStatus.Status, insync) } if clusterStatus.Status != green { t.Errorf("CheckHealth reported cluster status as %v, expected %s", clusterStatus.Status, green) } }
explode_data.jsonl/43291
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 466 }
[ 2830, 3393, 7200, 14542, 62, 4498, 65545, 641, 14610, 3036, 18510, 1998, 2052, 3872, 41966, 291, 62, 23748, 96113, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 62644, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestVolumeGetAccessModes(t *testing.T) { plugMgr, tmpDir := newPluginMgr(t, makeScaleIOSecret(testSecret, testns)) defer os.RemoveAll(tmpDir) plug, err := plugMgr.FindPersistentPluginByName(sioPluginName) if err != nil { t.Errorf("Can't find the plugin %v", sioPluginName) } if !containsMode(plug.GetAccessModes(), api.ReadWriteOnce) { t.Errorf("Expected two AccessModeTypes: %s or %s", api.ReadWriteOnce, api.ReadOnlyMany) } }
explode_data.jsonl/29493
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 170 }
[ 2830, 3393, 18902, 1949, 6054, 70035, 1155, 353, 8840, 836, 8, 341, 197, 47474, 25567, 11, 4174, 6184, 1669, 501, 11546, 25567, 1155, 11, 1281, 6947, 28136, 50856, 8623, 19773, 11, 1273, 4412, 1171, 16867, 2643, 84427, 10368, 6184, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_DAOKey(t *testing.T) { db, cache, end := test.SetupPG(t) defer end() key := sdk.RandomString(10) proj := assets.InsertTestProject(t, db, cache, key, key) app := sdk.Application{ Name: "my-app", } require.NoError(t, application.Insert(db, cache, *proj, &app)) k := &sdk.ApplicationKey{ Name: "mykey-ssh", Type: sdk.KeyTypeSSH, ApplicationID: app.ID, } kssh, err := keys.GenerateSSHKey(k.Name) require.NoError(t, err) k.Public = kssh.Public k.Private = kssh.Private k.KeyID = kssh.KeyID require.NoError(t, application.InsertKey(db, k)) assert.Equal(t, sdk.PasswordPlaceholder, k.Private) ks, err := application.LoadAllKeys(db, app.ID) require.NoError(t, err) assert.Equal(t, sdk.PasswordPlaceholder, ks[0].Private) ks, err = application.LoadAllKeysWithPrivateContent(db, app.ID) require.NoError(t, err) assert.Equal(t, kssh.Private, ks[0].Private) require.NoError(t, application.DeleteKey(db, app.ID, k.Name)) }
explode_data.jsonl/3964
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 411 }
[ 2830, 3393, 1557, 18746, 1592, 1155, 353, 8840, 836, 8, 341, 20939, 11, 6500, 11, 835, 1669, 1273, 39820, 11383, 1155, 340, 16867, 835, 2822, 23634, 1669, 45402, 26709, 703, 7, 16, 15, 340, 197, 30386, 1669, 11770, 23142, 2271, 7849, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCharsetDifferenceRange(t *testing.T) { testdata := []struct { c string r Range str string size uint32 }{ {"", Range{1, 2}, "", 0}, {"\\x01-\\x02", Range{1, 3}, "", 0}, {"\\x03-\\x0a", Range{2, 6}, "6-10", 5}, {"\\x03-\\x0a", Range{2, 11}, "", 0}, {"\\x03-\\x0a", Range{3, 12}, "", 0}, {"\\x03-\\x0a", Range{4, 11}, "3", 1}, {"\\x03-\\x0a", Range{5, 8}, "3-4, 8-10", 5}, {"\\x03-\\x0a", Range{1, 4}, "4-10", 7}, {"\\x03-\\x0a", Range{10, 13}, "3-9", 7}, {"\\x01-\\x02\\x0a-\\x0f\\x1a-\\x1f", Range{1, 3}, "10-15, 26-31", 12}, {"\\x01-\\x02\\x04-\\x0a\\x1a-\\x1f", Range{2, 5}, "1, 5-10, 26-31", 13}, {"\\x01-\\x02\\x04-\\x0a\\x1a-\\x1f", Range{2, 11}, "1, 26-31", 7}, {"\\x01-\\x02\\x04-\\x0a\\x1a-\\x1f", Range{3, 12}, "1-2, 26-31", 8}, {"\\x01-\\x02\\x04-\\x0a\\x1a-\\x1f", Range{5, 11}, "1-2, 4, 26-31", 9}, {"\\x01-\\x02\\x04-\\x0a\\x1a-\\x1f", Range{5, 8}, "1-2, 4, 8-10, 26-31", 12}, {"\\x01-\\x02\\x04-\\x0a\\x1a-\\x1f", Range{1, 4}, "4-10, 26-31", 13}, {"\\x01-\\x02\\x04-\\x0a\\x1a-\\x1f", Range{10, 13}, "1-2, 4-9, 26-31", 14}, {"\\x01-\\x02\\x04-\\x0a\\x1a-\\x1f", Range{2, 29}, "1, 29-31", 4}, {"\\x01-\\x02\\x04-\\x0a\\x1a-\\x1f", Range{2, 32}, "1", 1}, {"\\x01-\\x02\\x04-\\x0a\\x1a-\\x1f", Range{5, 29}, "1-2, 4, 29-31", 6}, {"\\x01-\\x02\\x04-\\x0a\\x1a-\\x1f", Range{5, 32}, "1-2, 4", 3}, } for i, v := range testdata { v := v t.Run(fmt.Sprintf("%d", i), func(t *testing.T) { t.Parallel() c := NewCharset() c.MakeFromBytes([]byte(v.c)) c.DifferenceRange(&v.r) str := c.StringAsInt() test.EXPECT_EQ(t, str, v.str, "") test.EXPECT_EQ(t, c.Size(), v.size, "") }) } }
explode_data.jsonl/51964
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1043 }
[ 2830, 3393, 78172, 62707, 6046, 1155, 353, 8840, 836, 8, 972, 18185, 691, 1669, 3056, 1235, 972, 197, 1444, 262, 914, 319, 197, 7000, 262, 16437, 319, 197, 11355, 220, 914, 319, 197, 13832, 2622, 18, 17, 319, 197, 92, 1666, 197, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUpdatableApps(t *testing.T) { testCases := []struct { name string bp string expectedError string }{ { name: "Stable public SDK", bp: `android_app { name: "foo", srcs: ["a.java"], sdk_version: "29", min_sdk_version: "29", updatable: true, }`, }, { name: "Stable system SDK", bp: `android_app { name: "foo", srcs: ["a.java"], sdk_version: "system_29", min_sdk_version: "29", updatable: true, }`, }, { name: "Current public SDK", bp: `android_app { name: "foo", srcs: ["a.java"], sdk_version: "current", min_sdk_version: "29", updatable: true, }`, }, { name: "Current system SDK", bp: `android_app { name: "foo", srcs: ["a.java"], sdk_version: "system_current", min_sdk_version: "29", updatable: true, }`, }, { name: "Current module SDK", bp: `android_app { name: "foo", srcs: ["a.java"], sdk_version: "module_current", min_sdk_version: "29", updatable: true, }`, }, { name: "Current core SDK", bp: `android_app { name: "foo", srcs: ["a.java"], sdk_version: "core_current", min_sdk_version: "29", updatable: true, }`, }, { name: "No Platform APIs", bp: `android_app { name: "foo", srcs: ["a.java"], platform_apis: true, min_sdk_version: "29", updatable: true, }`, expectedError: "Updatable apps must use stable SDKs", }, { name: "No Core Platform APIs", bp: `android_app { name: "foo", srcs: ["a.java"], sdk_version: "core_platform", min_sdk_version: "29", updatable: true, }`, expectedError: "Updatable apps must use stable SDKs", }, { name: "No unspecified APIs", bp: `android_app { name: "foo", srcs: ["a.java"], updatable: true, min_sdk_version: "29", }`, expectedError: "Updatable apps must use stable SDK", }, { name: "Must specify min_sdk_version", bp: `android_app { name: "app_without_min_sdk_version", srcs: ["a.java"], sdk_version: "29", updatable: true, }`, expectedError: "updatable apps must set min_sdk_version.", }, } for _, test := range testCases { t.Run(test.name, func(t *testing.T) { errorHandler := android.FixtureExpectsNoErrors if test.expectedError != "" { errorHandler = android.FixtureExpectsAtLeastOneErrorMatchingPattern(test.expectedError) } android.GroupFixturePreparers( prepareForJavaTest, FixtureWithPrebuiltApis(map[string][]string{ "29": {"foo"}, })). ExtendWithErrorHandler(errorHandler).RunTestWithBp(t, test.bp) }) } }
explode_data.jsonl/58477
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1360 }
[ 2830, 3393, 2324, 88831, 53602, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 688, 914, 198, 197, 2233, 79, 310, 914, 198, 197, 42400, 1454, 914, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDeleteClusterStep_Name(t *testing.T) { s := &DeleteClusterStep{} if name := s.Name(); name != DeleteClusterStepName { t.Errorf("Wrong step name expected %s actual %s", DeleteClusterStepName, name) } }
explode_data.jsonl/30785
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 80 }
[ 2830, 3393, 6435, 28678, 8304, 19015, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 609, 6435, 28678, 8304, 31483, 743, 829, 1669, 274, 2967, 2129, 829, 961, 10428, 28678, 8304, 675, 341, 197, 3244, 13080, 445, 29185, 3019, 829, 3601, 1018,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestSplitURLPath(t *testing.T) { var table = map[string][2][]string{ "/*name": {{"/", "*"}, {"name"}}, "/users/:name": {{"/users/", ":"}, {"name"}}, "/users/:name/put": {{"/users/", ":", "/put"}, {"name"}}, "/users/:name/put/:section": {{"/users/", ":", "/put/", ":"}, {"name", "section"}}, "/customers/:name/put/:section": {{"/customers/", ":", "/put/", ":"}, {"name", "section"}}, "/customers/groups/:name/put/:section": {{"/customers/groups/", ":", "/put/", ":"}, {"name", "section"}}, } for path, result := range table { parts, names := splitURLpath(path) if !assert_equals_string(parts, result[0]) { t.Errorf("Expected %v %v: %v %v", result[0], result[1], parts, names) } } }
explode_data.jsonl/45453
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 397 }
[ 2830, 3393, 20193, 3144, 1820, 1155, 353, 8840, 836, 8, 1476, 2405, 1965, 284, 2415, 14032, 1457, 17, 45725, 917, 515, 197, 197, 1, 1057, 606, 788, 7561, 5867, 3115, 497, 15630, 14345, 5212, 606, 48085, 197, 197, 3115, 4218, 11315, 60...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestService_GetLinkList(t *testing.T) { type fields struct { ctx context.Context } type args struct { pager *app.Pager } tests := []struct { name string fields fields args args want []*model.Link wantErr bool }{ { name: "查询短链接列表", fields: fields{ ctx: context.Background(), }, args: args{ &app.Pager{}, }, wantErr: false, }, } setup() for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { svc := &Service{ ctx: tt.fields.ctx, } got, err := svc.GetLinkList(tt.args.pager) if (err != nil) != tt.wantErr { t.Errorf("GetLinkList() error = %v, wantErr %v", err, tt.wantErr) return } assert.NotNil(t, got) for _, l := range got { t.Logf("link: %+v", l) } }) } }
explode_data.jsonl/81477
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 404 }
[ 2830, 3393, 1860, 13614, 3939, 852, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 20985, 2266, 9328, 198, 197, 532, 13158, 2827, 2036, 341, 197, 3223, 1409, 353, 676, 1069, 1409, 198, 197, 532, 78216, 1669, 3056, 1235, 34...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRequestHandlerGetResourceLinksDevice(t *testing.T) { deviceID := test.MustFindDeviceByName(test.TestDeviceName) ctx, cancel := context.WithTimeout(context.Background(), time.Second*20) defer cancel() tearDown := service.SetUp(ctx, t) defer tearDown() token := oauthTest.GetDefaultAccessToken(t) ctx = kitNetGrpc.CtxWithToken(ctx, token) conn, err := grpc.Dial(config.GRPC_HOST, grpc.WithTransportCredentials(credentials.NewTLS(&tls.Config{ RootCAs: test.GetRootCertificatePool(t), }))) require.NoError(t, err) defer func() { _ = conn.Close() }() c := pb.NewGrpcGatewayClient(conn) resourceLinks := test.GetAllBackendResourceLinks() _, shutdownDevSim := test.OnboardDevSim(ctx, t, c, deviceID, config.GW_HOST, resourceLinks) defer shutdownDevSim() resourceLinks = append(resourceLinks, test.AddDeviceSwitchResources(ctx, t, deviceID, c, "1", "2", "3")...) time.Sleep(200 * time.Millisecond) shutdownHttp := httpgwTest.SetUp(t) defer shutdownHttp() type args struct { typeFilter []string } tests := []struct { name string args args want []*events.ResourceLinksPublished }{ { name: "valid", args: args{}, want: []*events.ResourceLinksPublished{ { DeviceId: deviceID, Resources: test.ResourceLinksToResources(deviceID, resourceLinks), AuditContext: commands.NewAuditContext(oauthService.DeviceUserID, ""), }, }, }, { name: "invalid typefilter", args: args{ typeFilter: []string{"unknown"}, }, want: []*events.ResourceLinksPublished{ { DeviceId: deviceID, AuditContext: commands.NewAuditContext(oauthService.DeviceUserID, ""), }, }, }, { name: "valid typefilter", args: args{ typeFilter: []string{collection.ResourceType, types.BINARY_SWITCH}, }, want: []*events.ResourceLinksPublished{ { DeviceId: deviceID, Resources: test.ResourceLinksToResources(deviceID, test.FilterResourceLink(func(rl schema.ResourceLink) bool { return strings.Contains(rl.ResourceTypes, collection.ResourceType) || strings.Contains(rl.ResourceTypes, types.BINARY_SWITCH) }, resourceLinks)), AuditContext: commands.NewAuditContext(oauthService.DeviceUserID, ""), }, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { rb := httpgwTest.NewRequest(http.MethodGet, uri.AliasDeviceResourceLinks, nil).AuthToken(token).DeviceId(deviceID).AddTypeFilter(tt.args.typeFilter) resp := httpgwTest.HTTPDo(t, rb.Build()) defer func() { _ = resp.Body.Close() }() var links []*events.ResourceLinksPublished for { var v events.ResourceLinksPublished err = Unmarshal(resp.StatusCode, resp.Body, &v) if err == io.EOF { break } require.NoError(t, err) links = append(links, pbTest.CleanUpResourceLinksPublished(&v, true)) } test.CheckProtobufs(t, tt.want, links, test.RequireToCheckFunc(require.Equal)) }) } }
explode_data.jsonl/56488
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1172 }
[ 2830, 3393, 1900, 3050, 1949, 4783, 24089, 6985, 1155, 353, 8840, 836, 8, 341, 54719, 915, 1669, 1273, 50463, 9885, 6985, 16898, 8623, 8787, 6985, 675, 692, 20985, 11, 9121, 1669, 2266, 26124, 7636, 5378, 19047, 1507, 882, 32435, 9, 17,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAccUser_full(t *testing.T) { resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, CheckDestroy: testAccCheckUserDestroy("artifactory_user.foobar"), Providers: testAccProviders, Steps: []resource.TestStep{ { Config: userFull, Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("artifactory_user.foobar", "name", "dummy_user"), resource.TestCheckResourceAttr("artifactory_user.foobar", "email", "dummy@a.com"), resource.TestCheckResourceAttr("artifactory_user.foobar", "admin", "true"), resource.TestCheckResourceAttr("artifactory_user.foobar", "profile_updatable", "true"), resource.TestCheckResourceAttr("artifactory_user.foobar", "groups.#", "1"), ), }, }, }) }
explode_data.jsonl/68365
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 304 }
[ 2830, 3393, 14603, 1474, 16372, 1155, 353, 8840, 836, 8, 341, 50346, 8787, 1155, 11, 5101, 31363, 515, 197, 197, 4703, 3973, 25, 257, 2915, 368, 314, 1273, 14603, 4703, 3973, 1155, 8, 1153, 197, 69472, 14245, 25, 1273, 14603, 3973, 14...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestApplicationContext_Profile(t *testing.T) { t.Run("bean:_c:", func(t *testing.T) { c, ch := container() c.Object(&BeanZero{5}) err := c.Refresh() assert.Nil(t, err) p := <-ch var b *BeanZero err = p.BeanRegistry().Get(&b) assert.Nil(t, err) }) t.Run("bean:_c:test", func(t *testing.T) { c, ch := container() c.Property(gs.SpringProfilesActive, "test") c.Object(&BeanZero{5}) err := c.Refresh() assert.Nil(t, err) p := <-ch var b *BeanZero err = p.BeanRegistry().Get(&b) assert.Nil(t, err) }) }
explode_data.jsonl/17406
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 258 }
[ 2830, 3393, 19736, 77294, 1155, 353, 8840, 836, 8, 1476, 3244, 16708, 445, 17479, 22035, 66, 12147, 2915, 1155, 353, 8840, 836, 8, 1476, 197, 1444, 11, 521, 1669, 5476, 741, 197, 1444, 8348, 2099, 10437, 17999, 90, 20, 3518, 197, 9859...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTermIsGround(t *testing.T) { tests := []struct { note string term string expected bool }{ {"null", "null", true}, {"string", `"foo"`, true}, {"number", "42.1", true}, {"boolean", "false", true}, {"var", "x", false}, {"ref ground", "a.b[0]", true}, {"ref non-ground", "a.b[i].x", false}, {"array ground", "[1,2,3]", true}, {"array non-ground", "[1,2,x]", false}, {"set ground", "{1,2,3}", true}, {"Set non-ground", "{1,2,x}", false}, {"object ground", `{"a": 1}`, true}, {"object non-ground key", `{"x": 1, y: 2}`, false}, {"object non-ground value", `{"x": 1, "y": y}`, false}, {"array compr ground", `["a" | true]`, true}, {"array compr non-ground", `[x | x = a[i]]`, false}, } for i, tc := range tests { term := MustParseTerm(tc.term) if term.IsGround() != tc.expected { expected := "ground" if !tc.expected { expected = "non-ground" } t.Errorf("Expected term %v to be %s (test case %d: %v)", term, expected, i, tc.note) } } }
explode_data.jsonl/2913
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 443 }
[ 2830, 3393, 17249, 3872, 30714, 1155, 353, 8840, 836, 8, 1476, 78216, 1669, 3056, 1235, 341, 197, 9038, 1272, 257, 914, 198, 197, 197, 4991, 257, 914, 198, 197, 42400, 1807, 198, 197, 59403, 197, 197, 4913, 2921, 497, 330, 2921, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestList(t *testing.T) { tcs := []struct { name string namespace string path string resp []byte want *unstructured.UnstructuredList }{ { name: "normal_list", path: "/apis/gtest/vtest/rtest", resp: getListJSON("vTest", "rTestList", getJSON("vTest", "rTest", "item1"), getJSON("vTest", "rTest", "item2")), want: &unstructured.UnstructuredList{ Object: map[string]interface{}{ "apiVersion": "vTest", "kind": "rTestList", }, Items: []unstructured.Unstructured{ *getObject("vTest", "rTest", "item1"), *getObject("vTest", "rTest", "item2"), }, }, }, { name: "namespaced_list", namespace: "nstest", path: "/apis/gtest/vtest/namespaces/nstest/rtest", resp: getListJSON("vTest", "rTestList", getJSON("vTest", "rTest", "item1"), getJSON("vTest", "rTest", "item2")), want: &unstructured.UnstructuredList{ Object: map[string]interface{}{ "apiVersion": "vTest", "kind": "rTestList", }, Items: []unstructured.Unstructured{ *getObject("vTest", "rTest", "item1"), *getObject("vTest", "rTest", "item2"), }, }, }, } for _, tc := range tcs { gv := &schema.GroupVersion{Group: "gtest", Version: "vtest"} resource := &metav1.APIResource{Name: "rtest", Namespaced: len(tc.namespace) != 0} cl, srv, err := getClientServer(gv, func(w http.ResponseWriter, r *http.Request) { if r.Method != "GET" { t.Errorf("List(%q) got HTTP method %s. wanted GET", tc.name, r.Method) } if r.URL.Path != tc.path { t.Errorf("List(%q) got path %s. wanted %s", tc.name, r.URL.Path, tc.path) } w.Header().Set("Content-Type", runtime.ContentTypeJSON) w.Write(tc.resp) }) if err != nil { t.Errorf("unexpected error when creating client: %v", err) continue } defer srv.Close() got, err := cl.Resource(resource, tc.namespace).List(metav1.ListOptions{}) if err != nil { t.Errorf("unexpected error when listing %q: %v", tc.name, err) continue } if !reflect.DeepEqual(got, tc.want) { t.Errorf("List(%q) want: %v\ngot: %v", tc.name, tc.want, got) } } }
explode_data.jsonl/37540
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1005 }
[ 2830, 3393, 852, 1155, 353, 8840, 836, 8, 341, 3244, 4837, 1669, 3056, 1235, 341, 197, 11609, 414, 914, 198, 197, 56623, 914, 198, 197, 26781, 414, 914, 198, 197, 34653, 414, 3056, 3782, 198, 197, 50780, 414, 353, 359, 51143, 10616, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTargetQueue_DepsBuilt(t *testing.T) { f := newTargetQueueFixture(t) fooTarget := model.NewImageTarget(container.MustParseSelector("foo")) s1 := store.BuildState{LastSuccessfulResult: store.NewImageBuildResult(fooTarget.ID(), container.MustParseNamedTagged("foo:1234"))} barTarget := model.NewImageTarget(container.MustParseSelector("bar")).WithDependencyIDs([]model.TargetID{fooTarget.ID()}) s2 := store.BuildState{} targets := []model.ImageTarget{fooTarget, barTarget} buildStateSet := store.BuildStateSet{ fooTarget.ID(): s1, barTarget.ID(): s2, } f.run(targets, buildStateSet) barCall := newFakeBuildHandlerCall(barTarget, s2, 1, []store.BuildResult{ store.NewImageBuildResult(fooTarget.ID(), store.ImageFromBuildResult(s1.LastSuccessfulResult)), }) // foo has a valid last result, so only bar gets rebuilt expectedCalls := map[model.TargetID]fakeBuildHandlerCall{ barTarget.ID(): barCall, } assert.Equal(t, expectedCalls, f.handler.calls) }
explode_data.jsonl/2247
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 339 }
[ 2830, 3393, 6397, 7554, 1557, 7124, 54300, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 6397, 7554, 18930, 1155, 692, 197, 7975, 6397, 1669, 1614, 7121, 1906, 6397, 28168, 50463, 14463, 5877, 445, 7975, 5455, 1903, 16, 1669, 3553, 252...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGripInterceptors(t *testing.T) { for _, test := range []struct { name string unaryInfo *grpc.UnaryServerInfo streamInfo *grpc.StreamServerInfo action string err bool }{ { name: "ValidLogging", unaryInfo: &grpc.UnaryServerInfo{}, }, { name: "ErrorLogging", unaryInfo: &grpc.UnaryServerInfo{}, action: "error", err: true, }, { name: "PanicLogging", unaryInfo: &grpc.UnaryServerInfo{}, action: "panic", err: true, }, } { t.Run(test.name, func(t *testing.T) { t.Run("Unary", func(t *testing.T) { sender, err := send.NewInternalLogger("test", grip.GetSender().Level()) require.NoError(t, err) journaler := logging.MakeGrip(sender) startAt := getNumber() interceptor := MakeGripUnaryInterceptor(journaler) _, err = interceptor(context.TODO(), test.action, &grpc.UnaryServerInfo{}, mockUnaryHandler) if test.err { assert.Error(t, err) } else { assert.NoError(t, err) } assert.Equal(t, startAt+2, getNumber()) if assert.True(t, sender.HasMessage()) { require.Equal(t, 2, sender.Len()) msg := sender.GetMessage() assert.Equal(t, level.Debug, msg.Priority) msg = sender.GetMessage() assert.Equal(t, expectedPriority(test.action), msg.Priority) } }) t.Run("Streaming", func(t *testing.T) { sender, err := send.NewInternalLogger("test", grip.GetSender().Level()) require.NoError(t, err) journaler := logging.MakeGrip(sender) startAt := getNumber() interceptor := MakeGripStreamInterceptor(journaler) err = interceptor(test.action, &mockServerStream{}, &grpc.StreamServerInfo{}, mockStreamHandler) if test.err { assert.Error(t, err) } else { assert.NoError(t, err) } assert.Equal(t, startAt+2, getNumber()) if assert.True(t, sender.HasMessage()) { require.Equal(t, 2, sender.Len()) msg := sender.GetMessage() assert.Equal(t, level.Debug, msg.Priority) msg = sender.GetMessage() assert.Equal(t, expectedPriority(test.action), msg.Priority) } }) }) } }
explode_data.jsonl/70540
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 976 }
[ 2830, 3393, 38, 4561, 3306, 57550, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 3056, 1235, 341, 197, 11609, 981, 914, 198, 197, 20479, 658, 1731, 220, 353, 56585, 10616, 658, 5475, 1731, 198, 197, 44440, 1731, 353, 565...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestLimitRangerIgnoresSubresource(t *testing.T) { client := fake.NewSimpleClientset() indexer := cache.NewIndexer(cache.MetaNamespaceKeyFunc, cache.Indexers{"namespace": cache.MetaNamespaceIndexFunc}) handler := &limitRanger{ Handler: admission.NewHandler(admission.Create, admission.Update), client: client, limitFunc: Limit, indexer: indexer, } limitRange := validLimitRangeNoDefaults() testPod := validPod("testPod", 1, api.ResourceRequirements{}) indexer.Add(&limitRange) err := handler.Admit(admission.NewAttributesRecord(&testPod, api.Kind("Pod"), limitRange.Namespace, "testPod", api.Resource("pods"), "", admission.Update, nil)) if err == nil { t.Errorf("Expected an error since the pod did not specify resource limits in its update call") } err = handler.Admit(admission.NewAttributesRecord(&testPod, api.Kind("Pod"), limitRange.Namespace, "testPod", api.Resource("pods"), "status", admission.Update, nil)) if err != nil { t.Errorf("Should have ignored calls to any subresource of pod %v", err) } }
explode_data.jsonl/16885
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 342 }
[ 2830, 3393, 16527, 49, 4003, 40, 70, 2152, 416, 3136, 9233, 1155, 353, 8840, 836, 8, 341, 25291, 1669, 12418, 7121, 16374, 2959, 746, 741, 26327, 261, 1669, 6500, 7121, 1552, 261, 31933, 58806, 22699, 1592, 9626, 11, 6500, 18338, 388, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCondBreakpointError(t *testing.T) { skipOn(t, "broken", "freebsd") protest.AllowRecording(t) withTestProcess("parallel_next", t, func(p *proc.Target, fixture protest.Fixture) { bp := setFileBreakpoint(p, t, fixture.Source, 9) bp.UserBreaklet().Cond = &ast.BinaryExpr{ Op: token.EQL, X: &ast.Ident{Name: "nonexistentvariable"}, Y: &ast.BasicLit{Kind: token.INT, Value: "7"}, } err := p.Continue() if err == nil { t.Fatalf("No error on first Continue()") } if err.Error() != "error evaluating expression: could not find symbol value for nonexistentvariable" && err.Error() != "multiple errors evaluating conditions" { t.Fatalf("Unexpected error on first Continue(): %v", err) } bp.UserBreaklet().Cond = &ast.BinaryExpr{ Op: token.EQL, X: &ast.Ident{Name: "n"}, Y: &ast.BasicLit{Kind: token.INT, Value: "7"}, } err = p.Continue() if err != nil { if _, exited := err.(proc.ErrProcessExited); !exited { t.Fatalf("Unexpected error on second Continue(): %v", err) } } else { nvar := evalVariable(p, t, "n") n, _ := constant.Int64Val(nvar.Value) if n != 7 { t.Fatalf("Stopped on wrong goroutine %d\n", n) } } }) }
explode_data.jsonl/56239
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 496 }
[ 2830, 3393, 49696, 22524, 2768, 1454, 1155, 353, 8840, 836, 8, 341, 1903, 13389, 1925, 1155, 11, 330, 48909, 497, 330, 10593, 51835, 1138, 197, 776, 1944, 29081, 52856, 1155, 340, 46948, 2271, 7423, 445, 46103, 11257, 497, 259, 11, 2915...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestSchedulerCreation(t *testing.T) { client := clientsetfake.NewSimpleClientset() informerFactory := informers.NewSharedInformerFactory(client, 0) testSource := "testProvider" eventBroadcaster := events.NewBroadcaster(&events.EventSinkImpl{Interface: client.EventsV1beta1().Events("")}) defaultBindTimeout := int64(30) factory.RegisterFitPredicate("PredicateOne", PredicateOne) factory.RegisterPriorityFunction("PriorityOne", PriorityOne, 1) factory.RegisterAlgorithmProvider(testSource, sets.NewString("PredicateOne"), sets.NewString("PriorityOne")) stopCh := make(chan struct{}) defer close(stopCh) _, err := New(client, informerFactory.Core().V1().Nodes(), factory.NewPodInformer(client, 0), informerFactory.Core().V1().PersistentVolumes(), informerFactory.Core().V1().PersistentVolumeClaims(), informerFactory.Core().V1().ReplicationControllers(), informerFactory.Apps().V1().ReplicaSets(), informerFactory.Apps().V1().StatefulSets(), informerFactory.Core().V1().Services(), informerFactory.Policy().V1beta1().PodDisruptionBudgets(), informerFactory.Storage().V1().StorageClasses(), informerFactory.Storage().V1beta1().CSINodes(), eventBroadcaster.NewRecorder(scheme.Scheme, "scheduler"), kubeschedulerconfig.SchedulerAlgorithmSource{Provider: &testSource}, stopCh, emptyPluginRegistry, nil, emptyPluginConfig, WithBindTimeoutSeconds(defaultBindTimeout)) if err != nil { t.Fatalf("Failed to create scheduler: %v", err) } }
explode_data.jsonl/24708
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 500 }
[ 2830, 3393, 38878, 32701, 1155, 353, 8840, 836, 8, 341, 25291, 1669, 2943, 746, 30570, 7121, 16374, 2959, 746, 741, 17430, 34527, 4153, 1669, 6051, 388, 7121, 16997, 641, 34527, 4153, 12805, 11, 220, 15, 692, 18185, 3608, 1669, 330, 194...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestBindLiteral(t *testing.T) { expr, diags := BindExpressionText("false", nil, hcl.Pos{}) assert.Len(t, diags, 0) assert.Equal(t, BoolType, expr.Type()) lit, ok := expr.(*LiteralValueExpression) assert.True(t, ok) assert.Equal(t, cty.False, lit.Value) assert.Equal(t, "false", fmt.Sprintf("%v", expr)) expr, diags = BindExpressionText("true", nil, hcl.Pos{}) assert.Len(t, diags, 0) assert.Equal(t, BoolType, expr.Type()) lit, ok = expr.(*LiteralValueExpression) assert.True(t, ok) assert.Equal(t, cty.True, lit.Value) assert.Equal(t, "true", fmt.Sprintf("%v", expr)) expr, diags = BindExpressionText("0", nil, hcl.Pos{}) assert.Len(t, diags, 0) assert.Equal(t, NumberType, expr.Type()) lit, ok = expr.(*LiteralValueExpression) assert.True(t, ok) assert.True(t, cty.NumberIntVal(0).RawEquals(lit.Value)) assert.Equal(t, "0", fmt.Sprintf("%v", expr)) expr, diags = BindExpressionText("3.14", nil, hcl.Pos{}) assert.Len(t, diags, 0) assert.Equal(t, NumberType, expr.Type()) lit, ok = expr.(*LiteralValueExpression) assert.True(t, ok) assert.True(t, cty.MustParseNumberVal("3.14").RawEquals(lit.Value)) assert.Equal(t, "3.14", fmt.Sprintf("%v", expr)) expr, diags = BindExpressionText(`"foo"`, nil, hcl.Pos{}) assert.Len(t, diags, 0) assert.Equal(t, StringType, expr.Type()) template, ok := expr.(*TemplateExpression) assert.True(t, ok) assert.Len(t, template.Parts, 1) lit, ok = template.Parts[0].(*LiteralValueExpression) assert.True(t, ok) assert.Equal(t, cty.StringVal("foo"), lit.Value) assert.Equal(t, "\"foo\"", fmt.Sprintf("%v", expr)) }
explode_data.jsonl/42562
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 674 }
[ 2830, 3393, 9950, 17350, 1155, 353, 8840, 836, 8, 341, 8122, 649, 11, 1853, 2032, 1669, 29189, 9595, 1178, 445, 3849, 497, 2092, 11, 305, 564, 44208, 37790, 6948, 65819, 1155, 11, 1853, 2032, 11, 220, 15, 340, 6948, 12808, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAttachment(t *testing.T) { m := NewMessage() m.SetHeader("From", "from@example.com") m.SetHeader("To", "to@example.com") m.SetBody("text/plain", "Test") m.Attach(mockCopyFile("/tmp/test.pdf")) want := &message{ from: "from@example.com", to: []string{"to@example.com"}, content: "From: from@example.com\r\n" + "To: to@example.com\r\n" + "Content-Type: multipart/mixed;\r\n" + " boundary=_BOUNDARY_1_\r\n" + "\r\n" + "--_BOUNDARY_1_\r\n" + "Content-Type: text/plain; charset=UTF-8\r\n" + "Content-Transfer-Encoding: quoted-printable\r\n" + "\r\n" + "Test\r\n" + "--_BOUNDARY_1_\r\n" + "Content-Type: application/pdf; name=\"test.pdf\"\r\n" + "Content-Disposition: attachment; filename=\"test.pdf\"\r\n" + "Content-Transfer-Encoding: base64\r\n" + "\r\n" + base64.StdEncoding.EncodeToString([]byte("Content of test.pdf")) + "\r\n" + "--_BOUNDARY_1_--\r\n", } testMessage(t, m, 1, want) }
explode_data.jsonl/31579
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 465 }
[ 2830, 3393, 33569, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1532, 2052, 741, 2109, 4202, 4047, 445, 3830, 497, 330, 1499, 35487, 905, 1138, 2109, 4202, 4047, 445, 1249, 497, 330, 983, 35487, 905, 1138, 2109, 4202, 5444, 445, 1318, 36...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIdemix(t *testing.T) { // Test weak BB sigs: // Test KeyGen rng, err := GetRand() require.NoError(t, err) wbbsk, wbbpk := WBBKeyGen(rng) // Get random message testmsg := RandModOrder(rng) // Test Signing wbbsig := WBBSign(wbbsk, testmsg) // Test Verification err = WBBVerify(wbbpk, wbbsig, testmsg) require.NoError(t, err) // Test idemix functionality AttributeNames := []string{"Attr1", "Attr2", "Attr3", "Attr4", "Attr5"} attrs := make([]*FP256BN.BIG, len(AttributeNames)) for i := range AttributeNames { attrs[i] = FP256BN.NewBIGint(i) } // Test issuer key generation if err != nil { t.Fatalf("Error getting rng: \"%s\"", err) return } // Create a new key pair key, err := NewIssuerKey(AttributeNames, rng) if err != nil { t.Fatalf("Issuer key generation should have succeeded but gave error \"%s\"", err) return } // Check that the key is valid err = key.GetIpk().Check() if err != nil { t.Fatalf("Issuer public key should be valid") return } // Make sure Check() is invalid for a public key with invalid proof proofC := key.Ipk.GetProofC() key.Ipk.ProofC = BigToBytes(RandModOrder(rng)) require.Error(t, key.Ipk.Check(), "public key with broken zero-knowledge proof should be invalid") // Make sure Check() is invalid for a public key with incorrect number of HAttrs hAttrs := key.Ipk.GetHAttrs() key.Ipk.HAttrs = key.Ipk.HAttrs[:0] require.Error(t, key.Ipk.Check(), "public key with incorrect number of HAttrs should be invalid") key.Ipk.HAttrs = hAttrs // Restore IPk to be valid key.Ipk.ProofC = proofC h := key.Ipk.GetHash() require.NoError(t, key.Ipk.Check(), "restored public key should be valid") require.Zero(t, bytes.Compare(h, key.Ipk.GetHash()), "IPK hash changed on ipk Check") // Create public with duplicate attribute names should fail _, err = NewIssuerKey([]string{"Attr1", "Attr2", "Attr1"}, rng) require.Error(t, err, "issuer key generation should fail with duplicate attribute names") // Test issuance sk := RandModOrder(rng) ni := RandModOrder(rng) m := NewCredRequest(sk, BigToBytes(ni), key.Ipk, rng) cred, err := NewCredential(key, m, attrs, rng) require.NoError(t, err, "Failed to issue a credential: \"%s\"", err) require.NoError(t, cred.Ver(sk, key.Ipk), "credential should be valid") // Issuing a credential with the incorrect amount of attributes should fail _, err = NewCredential(key, m, []*FP256BN.BIG{}, rng) require.Error(t, err, "issuing a credential with the incorrect amount of attributes should fail") // Breaking the ZK proof of the CredRequest should make it invalid proofC = m.GetProofC() m.ProofC = BigToBytes(RandModOrder(rng)) require.Error(t, m.Check(key.Ipk), "CredRequest with broken ZK proof should not be valid") // Creating a credential from a broken CredRequest should fail _, err = NewCredential(key, m, attrs, rng) require.Error(t, err, "creating a credential from an invalid CredRequest should fail") m.ProofC = proofC // A credential with nil attribute should be invalid attrsBackup := cred.GetAttrs() cred.Attrs = [][]byte{nil, nil, nil, nil, nil} require.Error(t, cred.Ver(sk, key.Ipk), "credential with nil attribute should be invalid") cred.Attrs = attrsBackup // Generate a revocation key pair revocationKey, err := GenerateLongTermRevocationKey() require.NoError(t, err) // Create CRI that contains no revocation mechanism epoch := 0 cri, err := CreateCRI(revocationKey, []*FP256BN.BIG{}, epoch, ALG_NO_REVOCATION, rng) require.NoError(t, err) err = VerifyEpochPK(&revocationKey.PublicKey, cri.EpochPk, cri.EpochPkSig, int(cri.Epoch), RevocationAlgorithm(cri.RevocationAlg)) require.NoError(t, err) // make sure that epoch pk is not valid in future epoch err = VerifyEpochPK(&revocationKey.PublicKey, cri.EpochPk, cri.EpochPkSig, int(cri.Epoch)+1, RevocationAlgorithm(cri.RevocationAlg)) require.Error(t, err) // Test bad input _, err = CreateCRI(nil, []*FP256BN.BIG{}, epoch, ALG_NO_REVOCATION, rng) require.Error(t, err) _, err = CreateCRI(revocationKey, []*FP256BN.BIG{}, epoch, ALG_NO_REVOCATION, nil) require.Error(t, err) // Test signing no disclosure Nym, RandNym := MakeNym(sk, key.Ipk, rng) disclosure := []byte{0, 0, 0, 0, 0} msg := []byte{1, 2, 3, 4, 5} rhindex := 4 sig, _, err := NewSignature(cred, sk, Nym, RandNym, key.Ipk, disclosure, msg, rhindex, cri, rng) require.NoError(t, err) err = sig.Ver(disclosure, key.Ipk, msg, nil, 0, &revocationKey.PublicKey, epoch) if err != nil { t.Fatalf("Signature should be valid but verification returned error: %s", err) return } // Test signing selective disclosure disclosure = []byte{0, 1, 1, 1, 0} sig, _, err = NewSignature(cred, sk, Nym, RandNym, key.Ipk, disclosure, msg, rhindex, cri, rng) require.NoError(t, err) err = sig.Ver(disclosure, key.Ipk, msg, attrs, rhindex, &revocationKey.PublicKey, epoch) require.NoError(t, err) // Test NymSignatures nymsig, err := NewNymSignature(sk, Nym, RandNym, key.Ipk, []byte("testing"), rng) require.NoError(t, err) err = nymsig.Ver(Nym, key.Ipk, []byte("testing")) if err != nil { t.Fatalf("NymSig should be valid but verification returned error: %s", err) return } }
explode_data.jsonl/23891
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1978 }
[ 2830, 3393, 764, 336, 941, 1155, 353, 8840, 836, 8, 341, 197, 322, 3393, 7469, 18270, 8366, 82, 510, 197, 322, 3393, 5309, 9967, 198, 7000, 968, 11, 1848, 1669, 2126, 56124, 741, 17957, 35699, 1155, 11, 1848, 340, 6692, 65, 1279, 74...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestRespondentSendState(t *testing.T) { s := GetSocket(t, NewSocket) MustBeError(t, s.Send([]byte{}), mangos.ErrProtoState) MustSucceed(t, s.Close()) }
explode_data.jsonl/57405
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 65354, 306, 11505, 1397, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 2126, 10286, 1155, 11, 1532, 10286, 340, 9209, 590, 3430, 1454, 1155, 11, 274, 20176, 10556, 3782, 6257, 701, 50196, 436, 27862, 31549, 1397, 340, 9209, 590,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestStreamMigrateCancel(t *testing.T) { ctx := context.Background() tme := newTestShardMigrater(ctx, t, []string{"-40", "40-"}, []string{"-80", "80-"}) defer tme.stopTablets(t) tme.expectNoPreviousJournals() // Migrate reads _, err := tme.wr.SwitchReads(ctx, tme.targetKeyspace, "test", rdOnly, nil, workflow.DirectionForward, false) if err != nil { t.Fatal(err) } tme.expectNoPreviousJournals() _, err = tme.wr.SwitchReads(ctx, tme.targetKeyspace, "test", replica, nil, workflow.DirectionForward, false) if err != nil { t.Fatal(err) } tme.expectCheckJournals() stopStreamsFail := func() { // sm.stopStreams->sm.readSourceStreams->readTabletStreams('Stopped') tme.dbSourceClients[0].addQuery("select id, workflow, source, pos from _vt.vreplication where db_name='vt_ks' and workflow != 'test_reverse' and state = 'Stopped' and message != 'FROZEN'", &sqltypes.Result{}, nil) tme.dbSourceClients[1].addQuery("select id, workflow, source, pos from _vt.vreplication where db_name='vt_ks' and workflow != 'test_reverse' and state = 'Stopped' and message != 'FROZEN'", &sqltypes.Result{}, nil) // pre-compute sourceRows because they're re-read multiple times. var sourceRows [][]string for _, sourceTargetShard := range tme.sourceShards { var rows []string for j, sourceShard := range tme.sourceShards { bls := &binlogdatapb.BinlogSource{ Keyspace: "ks1", Shard: sourceShard, Filter: &binlogdatapb.Filter{ Rules: []*binlogdatapb.Rule{{ Match: "t1", Filter: fmt.Sprintf("select * from t1 where in_keyrange('%s')", sourceTargetShard), }}, }, } rows = append(rows, fmt.Sprintf("%d|t1|%v|MariaDB/5-456-888", j+1, bls)) } sourceRows = append(sourceRows, rows) } for i, dbclient := range tme.dbSourceClients { // sm.stopStreams->sm.readSourceStreams->readTabletStreams('') and VReplicationExec(_vt.copy_state) dbclient.addQuery("select id, workflow, source, pos from _vt.vreplication where db_name='vt_ks' and workflow != 'test_reverse'", sqltypes.MakeTestResult(sqltypes.MakeTestFields( "id|workflow|source|pos", "int64|varbinary|varchar|varbinary"), sourceRows[i]...), nil) dbclient.addQuery("select vrepl_id from _vt.copy_state where vrepl_id in (1, 2)", &sqltypes.Result{}, nil) // sm.stopStreams->sm.stopSourceStreams->VReplicationExec('Stopped'): fail this dbclient.addQuery("select id from _vt.vreplication where id in (1, 2)", nil, fmt.Errorf("intentionally failed")) } } stopStreamsFail() cancelMigration := func() { // sm.migrateStreams->sm.deleteTargetStreams tme.dbTargetClients[0].addQuery("select id from _vt.vreplication where db_name = 'vt_ks' and workflow in ('t1')", resultid34, nil) tme.dbTargetClients[1].addQuery("select id from _vt.vreplication where db_name = 'vt_ks' and workflow in ('t1')", resultid34, nil) tme.dbTargetClients[0].addQuery("delete from _vt.vreplication where id in (3, 4)", &sqltypes.Result{}, nil) tme.dbTargetClients[1].addQuery("delete from _vt.vreplication where id in (3, 4)", &sqltypes.Result{}, nil) tme.dbTargetClients[0].addQuery("delete from _vt.copy_state where vrepl_id in (3, 4)", &sqltypes.Result{}, nil) tme.dbTargetClients[1].addQuery("delete from _vt.copy_state where vrepl_id in (3, 4)", &sqltypes.Result{}, nil) // sm.migrateStreams->->restart source streams tme.dbSourceClients[0].addQuery("select id from _vt.vreplication where db_name = 'vt_ks' and workflow != 'test_reverse'", resultid12, nil) tme.dbSourceClients[1].addQuery("select id from _vt.vreplication where db_name = 'vt_ks' and workflow != 'test_reverse'", resultid12, nil) tme.dbSourceClients[0].addQuery("update _vt.vreplication set state = 'Running', stop_pos = null, message = '' where id in (1, 2)", &sqltypes.Result{}, nil) tme.dbSourceClients[1].addQuery("update _vt.vreplication set state = 'Running', stop_pos = null, message = '' where id in (1, 2)", &sqltypes.Result{}, nil) tme.dbSourceClients[0].addQuery("select * from _vt.vreplication where id = 1", runningResult(1), nil) tme.dbSourceClients[1].addQuery("select * from _vt.vreplication where id = 1", runningResult(1), nil) tme.dbSourceClients[0].addQuery("select * from _vt.vreplication where id = 2", runningResult(2), nil) tme.dbSourceClients[1].addQuery("select * from _vt.vreplication where id = 2", runningResult(2), nil) // mi.cancelMigration->restart target streams tme.dbTargetClients[0].addQuery("select id from _vt.vreplication where db_name = 'vt_ks' and workflow = 'test'", &sqltypes.Result{}, nil) tme.dbTargetClients[1].addQuery("select id from _vt.vreplication where db_name = 'vt_ks' and workflow = 'test'", &sqltypes.Result{}, nil) tme.expectDeleteReverseVReplication() } cancelMigration() _, _, err = tme.wr.SwitchWrites(ctx, tme.targetKeyspace, "test", 1*time.Second, false, false, true, false) want := "intentionally failed" if err == nil || !strings.Contains(err.Error(), want) { t.Errorf("SwitchWrites err: %v, want %s", err, want) } checkServedTypes(t, tme.ts, "ks:-40", 1) checkServedTypes(t, tme.ts, "ks:40-", 1) checkServedTypes(t, tme.ts, "ks:-80", 2) checkServedTypes(t, tme.ts, "ks:80-", 2) checkIfPrimaryServing(t, tme.ts, "ks:-40", true) checkIfPrimaryServing(t, tme.ts, "ks:40-", true) checkIfPrimaryServing(t, tme.ts, "ks:-80", false) checkIfPrimaryServing(t, tme.ts, "ks:80-", false) verifyQueries(t, tme.allDBClients) }
explode_data.jsonl/60569
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2117 }
[ 2830, 3393, 3027, 44, 34479, 9269, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 3244, 2660, 1669, 501, 2271, 2016, 567, 44, 5233, 962, 7502, 11, 259, 11, 3056, 917, 4913, 12, 19, 15, 497, 330, 19, 15, 12, 14345, 30...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestManifestGenerateFlagsMinimalProfile(t *testing.T) { g := NewWithT(t) // Change profile from empty to minimal using flag. m, _, err := generateManifest("empty", "-s profile=minimal", liveCharts) if err != nil { t.Fatal(err) } objs, err := parseObjectSetFromManifest(m) if err != nil { t.Fatal(err) } // minimal profile always has istiod, empty does not. mustGetDeployment(g, objs, "istiod") }
explode_data.jsonl/47906
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 38495, 31115, 9195, 88328, 8526, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1532, 2354, 51, 1155, 340, 197, 322, 10388, 5526, 504, 4287, 311, 17377, 1667, 5181, 624, 2109, 11, 8358, 1848, 1669, 6923, 38495, 445, 3194, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestParse(t *testing.T) { e, _ := parser.Parse("object_pairs('$1') AND x > 1 AND 1 < x") emitExpr(t, e) e, _ = parser.Parse("x = 1") emitExpr(t, e) e, _ = parser.Parse("x > 10 AND y > 20 AND (z > 30 OR w > 40)") emitExpr(t, e) e, _ = parser.Parse("x > 10 AND y > 20 AND (z > 30 OR w > 40) AND (zz > 300 OR ww > 400)") emitExpr(t, e) e, _ = parser.Parse("(x > 10 AND y > 20) AND (z > 30 OR w > 40) AND (zz > 300 OR ww > 400)") emitExpr(t, e) e, _ = parser.Parse("(x > 10 AND y > 20) AND (z > 30 OR w > 40) AND (zz > 300 OR ww > 400) OR true") emitExpr(t, e) e, _ = parser.Parse("x > 10 AND y > 20 AND IFNULL(z > 30 OR w > 40, NULL)") emitExpr(t, e) e, _ = parser.Parse("x > 10 AND y > 20 AND IFNULL(NULL, NULL)") emitExpr(t, e) e, _ = parser.Parse("x > 10 AND x > 11 AND x > 12 AND x > 13 AND x > 14") emitExpr(t, e) }
explode_data.jsonl/45723
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 402 }
[ 2830, 3393, 14463, 1155, 353, 8840, 836, 8, 341, 7727, 11, 716, 1669, 6729, 8937, 445, 1700, 36430, 29345, 16, 863, 3567, 856, 861, 220, 16, 3567, 220, 16, 366, 856, 1138, 81562, 16041, 1155, 11, 384, 692, 7727, 11, 716, 284, 6729, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestToIntOrDefault(t *testing.T) { type args struct { s string d int } tests := []struct { name string args args want int }{ { name: "", args: args{ s: "not_int", d: 10, }, want: 10, }, { name: "", args: args{ s: "5", d: 10, }, want: 5, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := ToIntOrDefault(tt.args.s, tt.args.d); got != tt.want { t.Errorf("ToIntOrDefault() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/5488
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 282 }
[ 2830, 3393, 38544, 14188, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 1903, 914, 198, 197, 2698, 526, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197, 50780, 526, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAuthorizationUpdateRequest_String(t *testing.T) { v := AuthorizationUpdateRequest{ Note: String(""), NoteURL: String(""), Fingerprint: String(""), } want := `github.AuthorizationUpdateRequest{Note:"", NoteURL:"", Fingerprint:""}` if got := v.String(); got != want { t.Errorf("AuthorizationUpdateRequest.String = %v, want %v", got, want) } }
explode_data.jsonl/33220
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 133 }
[ 2830, 3393, 18124, 4289, 1900, 31777, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 30562, 4289, 1900, 515, 197, 197, 9112, 25, 286, 923, 445, 4461, 197, 197, 9112, 3144, 25, 257, 923, 445, 4461, 197, 12727, 47918, 25, 923, 445, 4461, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_return_all_projects_when_query_is_empty(t *testing.T) { projects := NewProjects() project1 := Project{Name: "PROJECT_1"} project2 := Project{Name: "PROJECT_2"} project3 := Project{Name: "PROJECT_3"} project4 := Project{Name: "PROJECT_4"} projects.AddAll([]Project{project1, project2, project3, project4}) filteredProjects := FuzzyMatch(emptyQuery, projects) assert.Equal(t, []Project{project1, project2, project3, project4}, filteredProjects.List()) }
explode_data.jsonl/10977
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 163 }
[ 2830, 3393, 12511, 5705, 58774, 47636, 5738, 6892, 15124, 1155, 353, 8840, 836, 8, 341, 197, 17161, 1669, 1532, 29958, 741, 72470, 16, 1669, 5787, 63121, 25, 330, 41455, 62, 16, 16707, 72470, 17, 1669, 5787, 63121, 25, 330, 41455, 62, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLocationByName(t *testing.T) { result, _ := pokeapi.Location("canalave-city") assert.Equal(t, "canalave-city", result.Name, "Expect to receive Canalave City.") }
explode_data.jsonl/63733
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 64 }
[ 2830, 3393, 4707, 16898, 1155, 353, 8840, 836, 8, 341, 9559, 11, 716, 1669, 51551, 2068, 4515, 445, 4814, 278, 523, 53329, 1138, 6948, 12808, 1155, 11, 330, 4814, 278, 523, 53329, 497, 1102, 2967, 345, 197, 197, 1, 17536, 311, 5258, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSkipList_GetFail(t *testing.T) { sl := newSkipList() if sl == nil { t.Fatalf("%v: got nil", t.Name()) } v, ok := sl.Get(-1) if ok { t.Fatalf("%v: suppose to fail, but got: %v, %v", t.Name(), v, ok) } }
explode_data.jsonl/54832
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 105 }
[ 2830, 3393, 35134, 852, 13614, 19524, 1155, 353, 8840, 836, 8, 341, 78626, 1669, 501, 35134, 852, 741, 743, 1739, 621, 2092, 341, 197, 3244, 30762, 4430, 85, 25, 2684, 2092, 497, 259, 2967, 2398, 197, 630, 5195, 11, 5394, 1669, 1739, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCompileOverview(t *testing.T) { todos, _ := parse.File(tu.FullTestdataPath("overview.input")) overview := tu.ToJSON(compileTopLevelMomentsOverview(todos)) tu.AssertGoldenOutput(t, "TestCompileOverview", "overview.output.json", overview) }
explode_data.jsonl/42525
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 46126, 41044, 1155, 353, 8840, 836, 8, 341, 3244, 16385, 11, 716, 1669, 4715, 8576, 1155, 84, 32038, 2271, 691, 1820, 445, 49278, 10046, 28075, 197, 49278, 1669, 9765, 3274, 5370, 7, 20433, 5366, 4449, 58783, 805, 41044, 115...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestReversiAnz41(t *testing.T) { r := NewReversiAnz() r.SetGoodPoint(1) if r.GetGoodPoint() != 1 { t.Errorf("NG") } }
explode_data.jsonl/23064
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 64 }
[ 2830, 3393, 693, 3004, 72, 2082, 89, 19, 16, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1532, 693, 3004, 72, 2082, 89, 741, 7000, 4202, 15216, 2609, 7, 16, 340, 743, 435, 2234, 15216, 2609, 368, 961, 220, 16, 341, 197, 3244, 13080,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2