text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestReadsDoNotCallPush(t *testing.T) { push := func(m []interface{}) { t.Errorf("Unexpected call to push!") } u := NewUndeltaStore(push, testUndeltaKeyFunc) // These should not call push. _ = u.List() _, _, _ = u.Get(testUndeltaObject{"a", ""}) }
explode_data.jsonl/69026
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 116 }
[ 2830, 3393, 4418, 82, 5404, 2623, 7220, 16644, 1155, 353, 8840, 836, 8, 972, 43155, 1669, 2915, 1255, 3056, 4970, 28875, 972, 197, 3244, 13080, 445, 29430, 1618, 311, 4484, 93896, 197, 2570, 10676, 1669, 1532, 19957, 5964, 6093, 62982, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBitToByteWriter1(t *testing.T) { var source []Bit var is, should []byte source = []Bit{1, 0, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1} should = []byte{0b_1001_1101, 0b_0110_1100} buf := bytes.NewBuffer(is) w := NewBitToByteWriter(buf) err := w.WriteBits(source...) // Before close, only firs byte ... if bytes.Compare(should[:1], buf.Bytes()) != 0 || err != nil { fmt.Println("\nsource:", source) fmt.Println("got before close:", buf.Bytes()) fmt.Println("want:", should) fmt.Println("error:", err) t.Fatal(err) } // close w.Close() if bytes.Compare(should, buf.Bytes()) != 0 || err != nil { fmt.Println("\nsource:", source) fmt.Println("got:", buf.Bytes()) fmt.Println("want:", should) fmt.Println("error:", err) t.Fatal(err) } }
explode_data.jsonl/49149
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 339 }
[ 2830, 3393, 8344, 1249, 7153, 6492, 16, 1155, 353, 8840, 836, 8, 1476, 2405, 2530, 3056, 8344, 198, 2405, 374, 11, 1265, 3056, 3782, 271, 47418, 284, 3056, 8344, 90, 16, 11, 220, 15, 11, 220, 15, 11, 220, 16, 11, 220, 16, 11, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestExecuteParallel(t *testing.T) { err := Run(context.Background(), func() error { time.Sleep(time.Millisecond * 200) return errors.New("test") }, func() error { time.Sleep(time.Millisecond * 500) return errors.New("test2") }) if r := cmp.Diff(err.Error(), "test"); r != "" { t.Error(r) } }
explode_data.jsonl/28959
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 133 }
[ 2830, 3393, 17174, 16547, 1155, 353, 8840, 836, 8, 341, 9859, 1669, 6452, 5378, 19047, 3148, 197, 29244, 368, 1465, 341, 298, 21957, 31586, 9730, 71482, 353, 220, 17, 15, 15, 340, 298, 853, 5975, 7121, 445, 1944, 1138, 197, 197, 2137,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRandomTxExecuteConsistent(t *testing.T) { _, root := prepareTruck() defer closeTestCtx(root) randomTxs := generateRandomTx(&testcase.DumbSigner{}) for i := 100; i > 0; i-- { r1 := executeTxs(randomTxs, root.GetTrunkStore(1000).(*store.TrunkStore)) r2 := executeTxs(randomTxs, root.GetTrunkStore(1000).(*store.TrunkStore)) //check txs require.Equal(t, len(r1.standbyTxs), len(r2.standbyTxs)) for i, tx1 := range r1.standbyTxs { require.Equal(t, true, bytes.Equal(tx1.From.Bytes(), r2.standbyTxs[i].From.Bytes())) require.Equal(t, true, bytes.Equal(tx1.To.Bytes(), r2.standbyTxs[i].To.Bytes())) require.Equal(t, tx1.Nonce, r2.standbyTxs[i].Nonce) require.Equal(t, true, bytes.Equal(tx1.Value[:], r2.standbyTxs[i].Value[:])) fmt.Printf( ` from:%s to:%s nonce:%d `, tx1.From.String(), tx1.To.String(), tx1.Nonce) } //check balance require.Equal(t, r1.from1.Balance(), r2.from1.Balance()) require.Equal(t, r1.from2.Balance(), r2.from2.Balance()) require.Equal(t, r1.to1.Balance(), r2.to1.Balance()) require.Equal(t, r1.to2.Balance(), r2.to2.Balance()) //check nonce require.Equal(t, r1.from1.Nonce(), r2.from1.Nonce()) require.Equal(t, r1.from2.Nonce(), r2.from2.Nonce()) require.Equal(t, r1.to1.Nonce(), r2.to1.Nonce()) require.Equal(t, r1.to2.Nonce(), r2.to2.Nonce()) //check standby tx require.Equal(t, r1.txR.start, r2.txR.start) require.Equal(t, r1.txR.end, r2.txR.end) //check committed tx require.Equal(t, len(r1.committedTxs), len(r2.committedTxs)) require.Equal(t, len(r1.standbyTxs), len(r1.committedTxs)) for i, tx1 := range r1.committedTxs { require.Equal(t, true, bytes.Equal(tx1.From[:], r2.committedTxs[i].From[:])) require.Equal(t, true, bytes.Equal(tx1.To[:], r2.committedTxs[i].To[:])) require.Equal(t, tx1.Nonce, r2.committedTxs[i].Nonce) require.Equal(t, true, bytes.Equal(tx1.Value[:], r2.committedTxs[i].Value[:])) } } }
explode_data.jsonl/58971
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 921 }
[ 2830, 3393, 13999, 31584, 17174, 15220, 18128, 1155, 353, 8840, 836, 8, 341, 197, 6878, 3704, 1669, 10549, 1282, 1942, 741, 16867, 3265, 2271, 23684, 9206, 340, 83628, 51, 18561, 1669, 6923, 13999, 31584, 2099, 1944, 5638, 909, 3551, 7264...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestIssue15743(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t0") tk.MustExec("CREATE TABLE t0(c0 int)") tk.MustExec("INSERT INTO t0 VALUES (1)") tk.MustQuery("SELECT * FROM t0 WHERE 1 AND 0.4").Check(testkit.Rows("1")) }
explode_data.jsonl/65521
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 141 }
[ 2830, 3393, 42006, 16, 20, 22, 19, 18, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 340, 3244, 74, 50...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFailedWrite(t *testing.T) { // Test that a write error during the handshake is returned. for _, breakAfter := range []int{0, 1} { c, s := localPipe(t) done := make(chan bool) go func() { Server(s, testConfig).Handshake() s.Close() done <- true }() brokenC := &brokenConn{Conn: c, breakAfter: breakAfter} err := Client(brokenC, testConfig).Handshake() if err != brokenConnErr { t.Errorf("#%d: expected error from brokenConn but got %q", breakAfter, err) } brokenC.Close() <-done } }
explode_data.jsonl/27729
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 206 }
[ 2830, 3393, 9408, 7985, 1155, 353, 8840, 836, 8, 341, 197, 322, 3393, 429, 264, 3270, 1465, 2337, 279, 57020, 374, 5927, 624, 2023, 8358, 1438, 6025, 1669, 2088, 3056, 396, 90, 15, 11, 220, 16, 92, 341, 197, 1444, 11, 274, 1669, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPlus(t *testing.T) { var ( result int64 err error ) service := NewCalculatorHandler() for _, testsFixture := range dataProviderPlusTests { result, err = service.Plus(testsFixture.a, testsFixture.b) assert.Equal(t, nil, err, "err should be nil") assert.Equal(t, testsFixture.expected, result, "they should be equal") } }
explode_data.jsonl/58103
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 130 }
[ 2830, 3393, 21807, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 9559, 526, 21, 19, 198, 197, 9859, 262, 1465, 198, 197, 692, 52934, 1669, 1532, 55743, 3050, 2822, 2023, 8358, 7032, 18930, 1669, 2088, 821, 5179, 21807, 18200, 341, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCataloger_DeleteEntry(t *testing.T) { ctx := context.Background() c := testCataloger(t) repository := testCatalogerRepo(t, ctx, c, "repository", "master") t.Run("delete file not exists", func(t *testing.T) { err := c.DeleteEntry(ctx, repository, "master", "/file1") wantErr := ErrEntryNotFound if !errors.As(err, &wantErr) { t.Errorf("DeleteEntry() error = %s, want = %s", err, wantErr) } }) t.Run("delete uncommitted", func(t *testing.T) { if err := c.CreateEntry(ctx, repository, "master", Entry{ Path: "/file2", Checksum: "ff", PhysicalAddress: "/addr2", Size: 2, Metadata: nil, }, CreateEntryParams{}); err != nil { t.Fatal("create entry for delete entry test:", err) } err := c.DeleteEntry(ctx, repository, "master", "/file2") if err != nil { t.Errorf("DeleteEntry() error = %s, expected no error", err) return } testDeleteEntryExpectNotFound(t, ctx, c, repository, "master", "/file2") // if we try to commit we should fail - there was no change _, err = c.Commit(ctx, repository, "master", "commit nothing", "tester", nil) if !errors.Is(err, ErrNothingToCommit) { t.Fatalf("Commit returned err=%s, expected=%s", err, ErrNothingToCommit) } }) t.Run("delete committed on branch", func(t *testing.T) { if err := c.CreateEntry(ctx, repository, "master", Entry{ Path: "/file3", Checksum: "ffff", PhysicalAddress: "/addr3", Size: 2, Metadata: nil, }, CreateEntryParams{}); err != nil { t.Fatal("create entry for delete entry test:", err) } if _, err := c.Commit(ctx, repository, "master", "commit file3", "tester", nil); err != nil { t.Fatal("Commit entry for delete entry test:", err) } err := c.DeleteEntry(ctx, repository, "master", "/file3") if err != nil { t.Errorf("DeleteEntry() error = %s, want no error", err) return } testDeleteEntryExpectNotFound(t, ctx, c, repository, "master", "/file3") testDeleteEntryCommitAndExpectNotFound(t, ctx, c, repository, "master", "/file3") }) t.Run("delete file committed on parent", func(t *testing.T) { if err := c.CreateEntry(ctx, repository, "master", Entry{ Path: "/file4", Checksum: "ffff", PhysicalAddress: "/addr4", Size: 4, Metadata: nil, }, CreateEntryParams{}); err != nil { t.Fatal("create entry for delete entry test:", err) } if _, err := c.Commit(ctx, repository, "master", "commit file4", "tester", nil); err != nil { t.Fatal("Commit entry for delete entry test:", err) } if _, err := c.CreateBranch(ctx, repository, "b1", "master"); err != nil { t.Fatal("create branch for delete entry test:", err) } err := c.DeleteEntry(ctx, repository, "b1", "/file4") if err != nil { t.Errorf("DeleteEntry() error = %s, want no error", err) return } testDeleteEntryExpectNotFound(t, ctx, c, repository, "b1", "/file4") testDeleteEntryCommitAndExpectNotFound(t, ctx, c, repository, "b1", "/file4") }) }
explode_data.jsonl/26628
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1256 }
[ 2830, 3393, 41606, 261, 57418, 5874, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 1444, 1669, 1273, 41606, 261, 1155, 340, 17200, 3099, 1669, 1273, 41606, 261, 25243, 1155, 11, 5635, 11, 272, 11, 330, 23319, 497, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestOrderCreateMetafield(t *testing.T) { setup() defer teardown() httpmock.RegisterResponder("POST", fmt.Sprintf("https://fooshop.myshopify.com/%s/orders/1/metafields.json", client.pathPrefix), httpmock.NewBytesResponder(200, loadFixture("metafield.json"))) metafield := Metafield{ Key: "app_key", Value: "app_value", ValueType: "string", Namespace: "affiliates", } returnedMetafield, err := client.Order.CreateMetafield(1, metafield) if err != nil { t.Errorf("Order.CreateMetafield() returned error: %v", err) } MetafieldTests(t, *returnedMetafield) }
explode_data.jsonl/17995
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 240 }
[ 2830, 3393, 4431, 4021, 12175, 2566, 1155, 353, 8840, 836, 8, 341, 84571, 741, 16867, 49304, 2822, 28080, 16712, 19983, 30884, 445, 2946, 497, 8879, 17305, 445, 2428, 1110, 824, 9267, 453, 12618, 8675, 1437, 905, 12627, 82, 82818, 14, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestValidateForInsert(t *testing.T) { t.Run("Validate good", func(t *testing.T) { colColl := NewColCollection(allCols...) assert.NoError(t, ValidateForInsert(colColl)) }) t.Run("Name collision", func(t *testing.T) { cols := append(allCols, Column{titleColName, 100, types.StringKind, false, typeinfo.StringDefaultType, "", false, "", nil}) colColl := NewColCollection(cols...) err := ValidateForInsert(colColl) assert.Error(t, err) assert.Equal(t, err, ErrColNameCollision) }) t.Run("Case insensitive collision", func(t *testing.T) { cols := append(allCols, Column{strings.ToUpper(titleColName), 100, types.StringKind, false, typeinfo.StringDefaultType, "", false, "", nil}) colColl := NewColCollection(cols...) err := ValidateForInsert(colColl) assert.Error(t, err) assert.Equal(t, err, ErrColNameCollision) }) t.Run("Tag collision", func(t *testing.T) { cols := append(allCols, Column{"newCol", lnColTag, types.StringKind, false, typeinfo.StringDefaultType, "", false, "", nil}) colColl := NewColCollection(cols...) err := ValidateForInsert(colColl) assert.Error(t, err) assert.Equal(t, err, ErrColTagCollision) }) }
explode_data.jsonl/5970
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 433 }
[ 2830, 3393, 17926, 2461, 13780, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 17926, 1661, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 46640, 15265, 1669, 1532, 6127, 6482, 20388, 37567, 31218, 197, 6948, 35699, 1155, 11, 23282, 246...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestVersion(t *testing.T) { a, err := NewVersion("1.2") if err != nil { t.Fatal(err) } b, err := NewVersion("2.2.1") if err != nil { t.Fatal(err) } if !a.Less(b) { t.Error("A should be less than B") } if b.Less(a) { t.Error("B shouldn't be less than A") } v200 := Version{2, 0, 0} if !a.Less(v200) { t.Error("1.2.1 should not be less than 2.0.0") } if v200.Less(a) { t.Error("2.0.0 should not be less than 1.2.1") } }
explode_data.jsonl/5112
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 5637, 1155, 353, 8840, 836, 8, 341, 11323, 11, 1848, 1669, 1532, 5637, 445, 16, 13, 17, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 630, 2233, 11, 1848, 1669, 1532, 5637, 445, 17, 13, 17, 13, 16,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestSearcher_WIREFindCityOnly(t *testing.T) { s := searcher{} if err := s.helperLoadFEDWIREFile(t); err != nil { t.Fatal(err) } wireP := s.WIREFindCityOnly(hardResultsLimit, "IOWA CITY") if len(wireP) == 0 { t.Fatalf("%s", "No matches found for city") } for _, p := range wireP { if !strings.Contains(p.City, strings.ToUpper("IOWA CITY")) { t.Errorf("City=%s", p.City) } } }
explode_data.jsonl/71103
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 180 }
[ 2830, 3393, 5890, 261, 2763, 40, 5996, 484, 12730, 7308, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 94674, 16094, 743, 1848, 1669, 274, 38922, 5879, 37, 1479, 54, 7466, 1703, 1155, 1215, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 34...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestBgpview(t *testing.T) { asn := GetAsn(5692) asnPrefixes := GetAsnPrefixes(5692) asnPeers := GetAsnPeers(5692) asnUpstreams := GetAsnUpstreams(5692) asnDownstreams := GetAsnDownstreams(10834) asnIxs := GetAsnIxs(5692) prefix := GetPrefix("192.209.63.0", 24) ix := GetIx(363) fmt.Println(asn) fmt.Println(asnPrefixes) fmt.Println(asnPeers) fmt.Println(asnUpstreams) fmt.Println(asnDownstreams) fmt.Println(asnIxs) fmt.Println(prefix) fmt.Println(ix) }
explode_data.jsonl/41336
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 240 }
[ 2830, 3393, 33, 21888, 1050, 1155, 353, 8840, 836, 8, 341, 60451, 77, 1669, 2126, 2121, 77, 7, 20, 21, 24, 17, 340, 60451, 77, 14335, 288, 1669, 2126, 2121, 77, 14335, 288, 7, 20, 21, 24, 17, 340, 60451, 77, 10197, 388, 1669, 21...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrintNetworkPolicy(t *testing.T) { tests := []struct { policy networking.NetworkPolicy expected []metav1.TableRow }{ // Basic network policy with empty spec. { policy: networking.NetworkPolicy{ ObjectMeta: metav1.ObjectMeta{ Name: "policy1", CreationTimestamp: metav1.Time{Time: time.Now().Add(1.9e9)}, }, Spec: networking.NetworkPolicySpec{}, }, // Columns: Name, Pod-Selector, Age expected: []metav1.TableRow{{Cells: []interface{}{"policy1", "<none>", "0s"}}}, }, // Basic network policy with pod selector. { policy: networking.NetworkPolicy{ ObjectMeta: metav1.ObjectMeta{ Name: "policy2", CreationTimestamp: metav1.Time{Time: time.Now().Add(1.9e9)}, }, Spec: networking.NetworkPolicySpec{ PodSelector: metav1.LabelSelector{MatchLabels: map[string]string{"foo": "bar"}}, }, }, // Columns: Name, Pod-Selector, Age expected: []metav1.TableRow{{Cells: []interface{}{"policy2", "foo=bar", "0s"}}}, }, } for i, test := range tests { rows, err := printNetworkPolicy(&test.policy, printers.GenerateOptions{}) if err != nil { t.Fatal(err) } for i := range rows { rows[i].Object.Object = nil } if !reflect.DeepEqual(test.expected, rows) { t.Errorf("%d mismatch: %s", i, diff.ObjectReflectDiff(test.expected, rows)) } } }
explode_data.jsonl/21618
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 582 }
[ 2830, 3393, 8994, 12320, 13825, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 3223, 8018, 256, 28030, 30149, 13825, 198, 197, 42400, 3056, 4059, 402, 16, 18257, 3102, 198, 197, 59403, 197, 197, 322, 14625, 3922, 4842,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestDisclosureSession(t *testing.T) { id := irma.NewAttributeTypeIdentifier("irma-demo.RU.studentCard.studentID") request := getDisclosureRequest(id) sessionHelper(t, request, "verification", nil) }
explode_data.jsonl/69987
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 91065, 5283, 1155, 353, 8840, 836, 8, 341, 15710, 1669, 6216, 1728, 7121, 3907, 929, 8714, 445, 44011, 58893, 2013, 52, 40113, 5770, 40113, 915, 1138, 23555, 1669, 633, 91065, 1900, 3724, 340, 25054, 5511, 1155, 11, 1681, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestZCashDecodeToString(t *testing.T) { tests := []TestcaseDecode{ { name: "Empty", input: "", err: errors.New("invalid data length"), }, { name: "Short", input: "06a1a1a7f2ff4762", err: errors.New("invalid opcode bytes"), }, { name: "Good", input: "76a91458e71790e51ab7558c05a6067cfc4926aa8c44dd88ac", output: "t1RygJmrLdNGgi98gUgEJDTVaELTAYWoMBy", }, { name: "Good2", input: "76a91469bf38acef973293c07f05c778eb1209748e8d5288ac", output: "t1TWk2mmvESDnE4dmCfT7MQ97ij6ZqLpNVU", }, { name: "Good3", input: "76a9148c6f453157897ce2e6de413f329d995fe0d8f90288ac", output: "t1Wg9uPPAfwhBWeRjtDPa5ZHNzyBx9rJVKY", }, { name: "Good5", input: "76a914f925b59e1dc043ad7f0b7e85ea05b06dfe83413888ac", output: "t1gaySCXCYtXE3ygP38YuWtVZczsEbdjG49", }, } RunTestsDecode(t, slip44.ZCASH, tests) }
explode_data.jsonl/9965
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 519 }
[ 2830, 3393, 57, 47441, 32564, 5870, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 2271, 5638, 32564, 515, 197, 197, 515, 298, 11609, 25, 220, 330, 3522, 756, 298, 22427, 25, 8324, 298, 9859, 25, 256, 5975, 7121, 445, 11808, 821, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_InFromStdin(t *testing.T) { expected := []byte("foo") r, w, err := os.Pipe() if err != nil { t.Error(err) } _, err = w.Write(expected) if err != nil { t.Error(err) } w.Close() orig := os.Stdin defer func() { os.Stdin = orig }() os.Stdin = r actual, err := cmder.New("cat").In().Output() if err != nil { t.Error(err) } msg := fmt.Sprintf("Expected %s. Got %s.", expected, actual) assert.Equal(t, expected, actual, msg) }
explode_data.jsonl/70662
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 199 }
[ 2830, 3393, 25972, 3830, 22748, 258, 1155, 353, 8840, 836, 8, 341, 42400, 1669, 3056, 3782, 445, 7975, 5130, 7000, 11, 289, 11, 1848, 1669, 2643, 1069, 3444, 741, 743, 1848, 961, 2092, 341, 197, 3244, 6141, 3964, 340, 197, 630, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMapProxy_TryLock(t *testing.T) { mp.Put("testingKey", "testingValue") ok, err := mp.TryLockWithTimeoutAndLease("testingKey", 1, time.Second, 2, time.Second) AssertEqualf(t, err, ok, true, "Try Lock failed") time.Sleep(5 * time.Second) locked, err := mp.IsLocked("testingKey") AssertEqualf(t, err, locked, false, "Key should not be locked.") mp.ForceUnlock("testingKey") mp.Clear() }
explode_data.jsonl/56993
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 147 }
[ 2830, 3393, 2227, 16219, 1139, 884, 11989, 1155, 353, 8840, 836, 8, 341, 53230, 39825, 445, 8840, 1592, 497, 330, 8840, 1130, 1138, 59268, 11, 1848, 1669, 10490, 19824, 11989, 2354, 7636, 3036, 2304, 519, 445, 8840, 1592, 497, 220, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSendTelemetry(t *testing.T) { err := reportManager.SendReport(tb) if err != nil { t.Errorf("SendTelemetry failed due to %v", err) } i := 3 rpMgr := &ReportManager{} rpMgr.Report = &i err = rpMgr.SendReport(tb) if err == nil { t.Errorf("SendTelemetry not failed for incorrect report type") } }
explode_data.jsonl/49195
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 128 }
[ 2830, 3393, 11505, 6639, 35958, 1155, 353, 8840, 836, 8, 341, 9859, 1669, 1895, 2043, 20176, 10361, 61414, 340, 743, 1848, 961, 2092, 341, 197, 3244, 13080, 445, 11505, 6639, 35958, 4641, 4152, 311, 1018, 85, 497, 1848, 340, 197, 630, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSameFuncWrapper(t *testing.T) { a := func(_ string) {} // string argument to force wrapping b := func(_ string) {} // string argument to force wrapping if !dummys.Call("isEqual", a, a).Bool() || dummys.Call("isEqual", a, b).Bool() { t.Fail() } if !dummys.Call("isEqual", somePackageFunction, somePackageFunction).Bool() { t.Fail() } if !dummys.Call("isEqual", (*T).someMethod, (*T).someMethod).Bool() { t.Fail() } t1 := &T{} t2 := &T{} if !dummys.Call("isEqual", t1.someMethod, t1.someMethod).Bool() || dummys.Call("isEqual", t1.someMethod, t2.someMethod).Bool() { t.Fail() } }
explode_data.jsonl/56794
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 257 }
[ 2830, 3393, 19198, 9626, 11542, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 2915, 2490, 914, 8, 4687, 442, 914, 5693, 311, 5344, 41195, 198, 2233, 1669, 2915, 2490, 914, 8, 4687, 442, 914, 5693, 311, 5344, 41195, 198, 743, 753, 67, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_mime_MediaType_(t *testing.T) { zr.TBegin(t) // MediaType_(name string) string // /* name = strings.TrimSpace(strings.ToLower(name)) if i := strings.LastIndex(name, "."); i != -1 { name = strings.ToLower(name[i+1:]) } for _, iter := range MediaTypes { if name == iter.ext || name == iter.mimeType { return iter.mimeType } } return "" */ }
explode_data.jsonl/7184
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 165 }
[ 2830, 3393, 83624, 1245, 4495, 929, 8361, 83, 353, 8840, 836, 8, 341, 20832, 81, 836, 11135, 1155, 340, 197, 322, 50423, 8361, 606, 914, 8, 914, 198, 197, 2289, 197, 3284, 197, 11609, 284, 9069, 90790, 51442, 29983, 3153, 1171, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBackupRestoreShowJob(t *testing.T) { defer leaktest.AfterTest(t)() const numAccounts = 1 _, _, sqlDB, _, cleanupFn := BackupRestoreTestSetup(t, singleNode, numAccounts, InitNone) defer cleanupFn() sqlDB.Exec(t, `BACKUP DATABASE data TO $1 WITH revision_history`, LocalFoo) sqlDB.Exec(t, `CREATE DATABASE "data 2"`) sqlDB.Exec(t, `RESTORE data.bank FROM $1 WITH skip_missing_foreign_keys, into_db = $2`, LocalFoo, "data 2") // The "updating privileges" clause in the SELECT statement is for excluding jobs // run by an unrelated startup migration. // TODO (lucy): Update this if/when we decide to change how these jobs queued by // the startup migration are handled. sqlDB.CheckQueryResults( t, "SELECT description FROM [SHOW JOBS] WHERE description != 'updating privileges' ORDER BY description", [][]string{ {"BACKUP DATABASE data TO 'nodelocal://0/foo' WITH revision_history"}, {"RESTORE TABLE data.bank FROM 'nodelocal://0/foo' WITH into_db = 'data 2', skip_missing_foreign_keys"}, }, ) }
explode_data.jsonl/57617
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 354 }
[ 2830, 3393, 56245, 56284, 7812, 12245, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 2822, 4777, 1629, 41369, 284, 220, 16, 198, 197, 6878, 8358, 5704, 3506, 11, 8358, 21290, 24911, 1669, 43438, 56284, 2271, 21...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConfig_LoadSingleInputWithEnvVars(t *testing.T) { c := NewConfig() err := os.Setenv("MY_TEST_SERVER", "192.168.1.1") assert.NoError(t, err) err = os.Setenv("TEST_INTERVAL", "10s") assert.NoError(t, err) c.LoadConfig("./testdata/single_plugin_env_vars.toml") memcached := inputs.Inputs["memcached"]().(*memcached.Memcached) memcached.Servers = []string{"192.168.1.1"} filter := models.Filter{ NameDrop: []string{"metricname2"}, NamePass: []string{"metricname1", "ip_192.168.1.1_name"}, FieldDrop: []string{"other", "stuff"}, FieldPass: []string{"some", "strings"}, TagDrop: []models.TagFilter{ { Name: "badtag", Filter: []string{"othertag"}, }, }, TagPass: []models.TagFilter{ { Name: "goodtag", Filter: []string{"mytag"}, }, }, } assert.NoError(t, filter.Compile()) mConfig := &models.InputConfig{ Name: "memcached", Filter: filter, Interval: 10 * time.Second, } mConfig.Tags = make(map[string]string) assert.Equal(t, memcached, c.Inputs[0].Input, "Testdata did not produce a correct memcached struct.") assert.Equal(t, mConfig, c.Inputs[0].Config, "Testdata did not produce correct memcached metadata.") }
explode_data.jsonl/67101
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 503 }
[ 2830, 3393, 2648, 19553, 10888, 2505, 2354, 14359, 28305, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 1532, 2648, 741, 9859, 1669, 2643, 4202, 3160, 445, 19159, 11641, 20012, 497, 330, 16, 24, 17, 13, 16, 21, 23, 13, 16, 13, 16, 1138,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInvokeNOKVSCCUnspecified(t *testing.T) { t.Run("1.2Capability", func(t *testing.T) { l, v, cleanup := setupLedgerAndValidatorWithV12Capabilities(t) defer cleanup() testInvokeNOKVSCCUnspecified(t, l, v) }) t.Run("1.3Capability", func(t *testing.T) { l, v, cleanup := setupLedgerAndValidatorWithV13Capabilities(t) defer cleanup() testInvokeNOKVSCCUnspecified(t, l, v) }) }
explode_data.jsonl/47814
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 166 }
[ 2830, 3393, 17604, 45, 3925, 53, 3540, 34, 1806, 53434, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 16, 13, 17, 63746, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 8810, 11, 348, 11, 21290, 1669, 6505, 60850, 1389, 3036, 14256,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestArrayValuer(t *testing.T) { var v driver.Valuer = Array([]bool{}) if _, ok := v.(*BoolArray); !ok { t.Errorf("Expected *BoolArray, got %T", v) } v = Array([]float64{}) if _, ok := v.(*Float64Array); !ok { t.Errorf("Expected *Float64Array, got %T", v) } v = Array([]int64{}) if _, ok := v.(*Int64Array); !ok { t.Errorf("Expected *Int64Array, got %T", v) } v = Array([]string{}) if _, ok := v.(*StringArray); !ok { t.Errorf("Expected *StringArray, got %T", v) } for _, tt := range []interface{}{ nil, []driver.Value{}, [][]bool{}, [][]float64{}, [][]int64{}, [][]string{}, } { v = Array(tt) if _, ok := v.(GenericArray); !ok { t.Errorf("Expected GenericArray for %T, got %T", tt, v) } } }
explode_data.jsonl/5303
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 351 }
[ 2830, 3393, 1857, 2208, 8801, 1155, 353, 8840, 836, 8, 341, 2405, 348, 5579, 77819, 8801, 284, 2910, 10556, 2641, 37790, 743, 8358, 5394, 1669, 348, 41399, 11233, 1857, 1215, 753, 562, 341, 197, 3244, 13080, 445, 18896, 353, 11233, 1857...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestUnlinkCompletionHandler(t *testing.T) { tests := []struct { name string component string dcList appsv1.DeploymentConfigList serviceList scv1beta1.ServiceInstanceList output []string }{ { name: "Case 1: both components and services are present", component: "frontend", serviceList: scv1beta1.ServiceInstanceList{ Items: []scv1beta1.ServiceInstance{ { ObjectMeta: metav1.ObjectMeta{ Name: "mysql-persistent", Labels: map[string]string{ applabels.ApplicationLabel: "app", componentlabels.ComponentLabel: "mysql-persistent", componentlabels.ComponentTypeLabel: "mysql-persistent", }, }, Spec: scv1beta1.ServiceInstanceSpec{ PlanReference: scv1beta1.PlanReference{ ClusterServiceClassExternalName: "mysql-persistent", ClusterServicePlanExternalName: "default", }, }, Status: scv1beta1.ServiceInstanceStatus{ Conditions: []scv1beta1.ServiceInstanceCondition{ { Reason: "ProvisionedSuccessfully", }, }, }, }, { ObjectMeta: metav1.ObjectMeta{ Name: "postgresql-ephemeral", Labels: map[string]string{ applabels.ApplicationLabel: "app", componentlabels.ComponentLabel: "postgresql-ephemeral", componentlabels.ComponentTypeLabel: "postgresql-ephemeral", }, }, Spec: scv1beta1.ServiceInstanceSpec{ PlanReference: scv1beta1.PlanReference{ ClusterServiceClassExternalName: "postgresql-ephemeral", ClusterServicePlanExternalName: "default", }, }, Status: scv1beta1.ServiceInstanceStatus{ Conditions: []scv1beta1.ServiceInstanceCondition{ { Reason: "ProvisionedSuccessfully", }, }, }, }, }, }, dcList: appsv1.DeploymentConfigList{ Items: []appsv1.DeploymentConfig{ { ObjectMeta: metav1.ObjectMeta{ Name: "backend-app", Namespace: "project", Labels: map[string]string{ applabels.ApplicationLabel: "app", componentlabels.ComponentLabel: "backend", componentlabels.ComponentTypeLabel: "java", }, Annotations: map[string]string{ component.ComponentSourceTypeAnnotation: "local", }, }, Spec: appsv1.DeploymentConfigSpec{ Template: &corev1.PodTemplateSpec{ Spec: corev1.PodSpec{ Containers: []corev1.Container{ { Name: "dummyContainer", }, }, }, }, }, }, { ObjectMeta: metav1.ObjectMeta{ Name: "backend2-app", Namespace: "project", Labels: map[string]string{ applabels.ApplicationLabel: "app", componentlabels.ComponentLabel: "backend2", componentlabels.ComponentTypeLabel: "java", }, Annotations: map[string]string{ component.ComponentSourceTypeAnnotation: "local", }, }, Spec: appsv1.DeploymentConfigSpec{ Template: &corev1.PodTemplateSpec{ Spec: corev1.PodSpec{ Containers: []corev1.Container{ { Name: "dummyContainer", }, }, }, }, }, }, { ObjectMeta: metav1.ObjectMeta{ Name: "frontend-app", Namespace: "project", Labels: map[string]string{ applabels.ApplicationLabel: "app", componentlabels.ComponentLabel: "frontend", componentlabels.ComponentTypeLabel: "nodejs", }, Annotations: map[string]string{ component.ComponentSourceTypeAnnotation: "local", }, }, Spec: appsv1.DeploymentConfigSpec{ Template: &corev1.PodTemplateSpec{ Spec: corev1.PodSpec{ Containers: []corev1.Container{ { Name: "dummyContainer", EnvFrom: []corev1.EnvFromSource{ { SecretRef: &corev1.SecretEnvSource{ LocalObjectReference: corev1.LocalObjectReference{Name: "postgresql-ephemeral"}, }, }, { SecretRef: &corev1.SecretEnvSource{ LocalObjectReference: corev1.LocalObjectReference{Name: "backend-8080"}, }, }, }, }, }, }, }, }, }, }, }, // make sure that the 'component' is not part of the suggestions and that only actually linked components/services show up output: []string{"backend", "postgresql-ephemeral"}, }, } p := corev1.Secret{ ObjectMeta: metav1.ObjectMeta{ Name: "postgresql-ephemeral", }, } for _, tt := range tests { client, fakeClientSet := occlient.FakeNew() parsedArgs := parsedArgs{ commands: make(map[string]bool), } context := genericclioptions.NewFakeContext("project", "app", tt.component, client) //fake the services fakeClientSet.ServiceCatalogClientSet.PrependReactor("list", "serviceinstances", func(action ktesting.Action) (bool, runtime.Object, error) { return true, &tt.serviceList, nil }) //fake the dcs fakeClientSet.AppsClientset.PrependReactor("list", "deploymentconfigs", func(action ktesting.Action) (bool, runtime.Object, error) { return true, &tt.dcList, nil }) for i := range tt.dcList.Items { fakeClientSet.AppsClientset.PrependReactor("get", "deploymentconfigs", func(action ktesting.Action) (bool, runtime.Object, error) { return true, &tt.dcList.Items[i], nil }) } fakeClientSet.Kubernetes.PrependReactor("get", "secrets", func(action ktesting.Action) (bool, runtime.Object, error) { return true, &p, nil }) completions := UnlinkCompletionHandler(nil, parsedArgs, context) sort.Strings(completions) if !reflect.DeepEqual(tt.output, completions) { t.Errorf("expected output: %#v,got: %#v", tt.output, completions) } } }
explode_data.jsonl/3548
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2860 }
[ 2830, 3393, 1806, 2080, 33190, 3050, 1155, 353, 8840, 836, 8, 1476, 78216, 1669, 3056, 1235, 341, 197, 11609, 286, 914, 198, 197, 52228, 256, 914, 198, 197, 87249, 852, 414, 906, 3492, 16, 34848, 39130, 2648, 852, 198, 197, 52934, 852...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPanic(t *testing.T) { query := makeQuery(nil) q := graphql.MustParse(` { panic } `, nil) if err := graphql.PrepareQuery(context.Background(), query, q.SelectionSet); err != nil { t.Error(err) } e := testgraphql.NewExecutorWrapperWithoutExactErrorMatch(t) _, err := e.Execute(context.Background(), query, nil, q) if err == nil || !strings.Contains(err.Error(), "test panic") { t.Error("expected test panic") } if !strings.Contains(err.Error(), "executor_test.go") { t.Error("expected stacktrace") } }
explode_data.jsonl/72162
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 207 }
[ 2830, 3393, 47, 31270, 1155, 353, 8840, 836, 8, 341, 27274, 1669, 1281, 2859, 27907, 692, 18534, 1669, 48865, 50463, 14463, 61528, 197, 197, 515, 298, 30764, 198, 197, 197, 532, 197, 7808, 2092, 692, 743, 1848, 1669, 48865, 28770, 3380,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestDescribeChangeLogs(t *testing.T) { client := NewTestClient() changeLogs, err := client.DescribeChangeLogs(&DescribeChangeLogsArgs{}) t.Logf("Change logs: %v, %v", changeLogs, err) }
explode_data.jsonl/68511
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 71 }
[ 2830, 3393, 74785, 4072, 51053, 1155, 353, 8840, 836, 8, 341, 25291, 1669, 1532, 2271, 2959, 2822, 68380, 51053, 11, 1848, 1669, 2943, 23548, 3114, 4072, 51053, 2099, 74785, 4072, 51053, 4117, 6257, 692, 3244, 98954, 445, 4072, 18422, 25,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestUseDatabase(t *testing.T) { catalogStore, err := store.Open("catalog_use_db", store.DefaultOptions()) require.NoError(t, err) defer os.RemoveAll("catalog_use_db") dataStore, err := store.Open("sqldata_use_db", store.DefaultOptions()) require.NoError(t, err) defer os.RemoveAll("sqldata_use_db") engine, err := NewEngine(catalogStore, dataStore, DefaultOptions().WithPrefix(sqlPrefix)) require.NoError(t, err) err = engine.UseDatabase("db1") require.Equal(t, ErrCatalogNotReady, err) _, err = engine.DatabaseInUse() require.Equal(t, ErrCatalogNotReady, err) _, err = engine.ExecStmt("CREATE DATABASE db1", nil, true) require.NoError(t, err) _, err = engine.DatabaseInUse() require.Equal(t, ErrNoDatabaseSelected, err) err = engine.UseDatabase("db1") require.NoError(t, err) db, err := engine.DatabaseInUse() require.NoError(t, err) require.Equal(t, "db1", db.name) err = engine.UseDatabase("db2") require.Equal(t, ErrDatabaseDoesNotExist, err) db, err = engine.DatabaseInUse() require.NoError(t, err) require.Equal(t, "db1", db.name) _, err = engine.ExecStmt("USE DATABASE db1", nil, true) require.NoError(t, err) _, err = engine.ExecStmt("USE DATABASE db2", nil, true) require.Equal(t, ErrDatabaseDoesNotExist, err) }
explode_data.jsonl/64053
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 486 }
[ 2830, 3393, 10253, 5988, 1155, 353, 8840, 836, 8, 341, 1444, 7750, 6093, 11, 1848, 1669, 3553, 12953, 445, 26539, 15951, 8685, 497, 3553, 13275, 3798, 2398, 17957, 35699, 1155, 11, 1848, 340, 16867, 2643, 84427, 445, 26539, 15951, 8685, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHttpParser_Request_ContentLength_0(t *testing.T) { http := HttpModForTests() http.Send_headers = true http.Send_all_headers = true data := []byte("POST / HTTP/1.1\r\n" + "user-agent: curl/7.35.0\r\n" + "host: localhost:9000\r\n" + "accept: */*\r\n" + "authorization: Company 1\r\n" + "content-length: 0\r\n" + "connection: close\r\n" + "\r\n") stream := &HttpStream{data: data, message: new(HttpMessage)} ok, complete := http.messageParser(stream) if !ok { t.Errorf("Parsing returned error") } if !complete { t.Errorf("Expecting a complete message") } }
explode_data.jsonl/6831
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 252 }
[ 2830, 3393, 2905, 6570, 44024, 78383, 4373, 62, 15, 1155, 353, 8840, 836, 8, 1476, 28080, 1669, 4823, 4459, 2461, 18200, 741, 28080, 20176, 26719, 284, 830, 198, 28080, 20176, 5705, 26719, 284, 830, 271, 8924, 1669, 3056, 3782, 445, 294...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestOutboundListenerTCPWithVS(t *testing.T) { tests := []struct { name string CIDR string expectedChains []string }{ { name: "same CIDR", CIDR: "10.10.0.0/24", expectedChains: []string{"10.10.0.0"}, }, { name: "different CIDR", CIDR: "10.10.10.0/24", expectedChains: []string{"10.10.0.0", "10.10.10.0"}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if features.RestrictPodIPTrafficLoops.Get() { // Expect a filter chain on the node IP tt.expectedChains = append([]string{"1.1.1.1"}, tt.expectedChains...) } services := []*model.Service{ buildService("test.com", tt.CIDR, protocol.TCP, tnow), } p := &fakePlugin{} virtualService := model.Config{ ConfigMeta: model.ConfigMeta{ Type: collections.IstioNetworkingV1Alpha3Virtualservices.Resource().Kind(), Version: collections.IstioNetworkingV1Alpha3Virtualservices.Resource().Version(), Name: "test_vs", Namespace: "default", }, Spec: virtualServiceSpec, } listeners := buildOutboundListeners(p, &proxy, nil, &virtualService, services...) if len(listeners) != 1 { t.Fatalf("expected %d listeners, found %d", 1, len(listeners)) } var chains []string for _, fc := range listeners[0].FilterChains { for _, cidr := range fc.FilterChainMatch.PrefixRanges { chains = append(chains, cidr.AddressPrefix) } } // There should not be multiple filter chains with same CIDR match if !reflect.DeepEqual(chains, tt.expectedChains) { t.Fatalf("expected filter chains %v, found %v", tt.expectedChains, chains) } }) } }
explode_data.jsonl/61267
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 761 }
[ 2830, 3393, 2662, 10891, 2743, 49896, 2354, 26050, 1155, 353, 8840, 836, 8, 1476, 78216, 1669, 3056, 1235, 341, 197, 11609, 1843, 914, 198, 197, 6258, 915, 49, 1843, 914, 198, 197, 42400, 1143, 1735, 3056, 917, 198, 197, 59403, 197, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestTimeEncoders(t *testing.T) { moment := time.Unix(100, 50005000).UTC() tests := []struct { name string expected interface{} // output of serializing moment }{ {"iso8601", "1970-01-01T00:01:40.050Z"}, {"ISO8601", "1970-01-01T00:01:40.050Z"}, {"millis", 100050.005}, {"nanos", int64(100050005000)}, {"", 100.050005}, {"something-random", 100.050005}, } for _, tt := range tests { var te TimeEncoder require.NoError(t, te.UnmarshalText([]byte(tt.name)), "Unexpected error unmarshaling %q.", tt.name) assertAppended( t, tt.expected, func(arr ArrayEncoder) { te(moment, arr) }, "Unexpected output serializing %v with %q.", moment, tt.name, ) } }
explode_data.jsonl/51370
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 297 }
[ 2830, 3393, 1462, 7408, 52498, 1155, 353, 8840, 836, 8, 341, 2109, 12913, 1669, 882, 10616, 941, 7, 16, 15, 15, 11, 220, 20, 15, 15, 15, 20, 15, 15, 15, 568, 21183, 741, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestApp_CommandWithDash(t *testing.T) { var args Args app := &App{ Commands: []*Command{ { Name: "cmd", Action: func(c *Context) error { args = c.Args() return nil }, }, }, } app.Run([]string{"", "cmd", "my-arg", "-"}) expect(t, args.Get(0), "my-arg") expect(t, args.Get(1), "-") }
explode_data.jsonl/52568
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 162 }
[ 2830, 3393, 2164, 66751, 2354, 42263, 1155, 353, 8840, 836, 8, 341, 2405, 2827, 17693, 271, 28236, 1669, 609, 2164, 515, 197, 197, 30479, 25, 29838, 4062, 515, 298, 197, 515, 571, 21297, 25, 330, 8710, 756, 571, 67607, 25, 2915, 1337,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPopulateCluster_KubeController_High_Enough_Version(t *testing.T) { c := buildMinimalCluster() c.Spec.KubernetesVersion = "v1.9.0" cloud, err := BuildCloud(c) if err != nil { t.Fatalf("error from BuildCloud: %v", err) } err = PerformAssignments(c, cloud) if err != nil { t.Fatalf("error from PerformAssignments: %v", err) } full, err := mockedPopulateClusterSpec(c) if err != nil { t.Fatalf("Unexpected error from PopulateCluster: %v", err) } if full.Spec.KubeControllerManager.AttachDetachReconcileSyncPeriod == nil { t.Fatalf("AttachDetachReconcileSyncPeriod not set correctly") } }
explode_data.jsonl/75050
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 236 }
[ 2830, 3393, 11598, 6334, 28678, 10102, 3760, 2051, 2039, 1090, 62, 95801, 85217, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 1936, 88328, 28678, 741, 1444, 36473, 11352, 29827, 5637, 284, 330, 85, 16, 13, 24, 13, 15, 1837, 197, 12361, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestExecPipeNils(t *testing.T) { data := "" resp := &types.HijackedResponse{Reader: bufio.NewReader(strings.NewReader(data))} err := execPipe(*resp, nil, nil, nil) if err != nil { t.Error(err) } }
explode_data.jsonl/50538
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 85 }
[ 2830, 3393, 10216, 34077, 45, 8669, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 8389, 34653, 1669, 609, 9242, 3839, 3172, 11191, 2582, 90, 5062, 25, 96917, 68587, 51442, 68587, 2592, 73031, 9859, 1669, 3883, 34077, 4071, 18243, 11, 2092, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestHealthCheckVerifiesTabletAlias(t *testing.T) { ts := memorytopo.NewServer("cell") hc := createTestHc(ts) defer hc.Close() tablet := topo.NewTablet(1, "cell", "a") tablet.PortMap["vt"] = 1 input := make(chan *querypb.StreamHealthResponse, 1) fc := createFakeConn(tablet, input) resultChan := hc.Subscribe() hc.AddTablet(tablet) // Immediately after AddTablet() there will be the first notification. want := &TabletHealth{ Tablet: tablet, Target: &querypb.Target{}, Serving: false, MasterTermStartTime: 0, } result := <-resultChan mustMatch(t, want, result, "Wrong TabletHealth data") input <- &querypb.StreamHealthResponse{ Target: &querypb.Target{Keyspace: "k", Shard: "s", TabletType: topodatapb.TabletType_MASTER}, TabletAlias: &topodatapb.TabletAlias{Uid: 20, Cell: "cellb"}, Serving: true, TabletExternallyReparentedTimestamp: 10, RealtimeStats: &querypb.RealtimeStats{SecondsBehindMaster: 1, CpuUsage: 0.2}, } ticker := time.NewTicker(1 * time.Second) select { case err := <-fc.cbErrCh: assert.Contains(t, err.Error(), "health stats mismatch", "wrong error") case <-resultChan: require.Fail(t, "StreamHealth should have returned a health stats mismatch error") case <-ticker.C: require.Fail(t, "Timed out waiting for StreamHealth to return a health stats mismatch error") } }
explode_data.jsonl/47686
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 626 }
[ 2830, 3393, 14542, 3973, 10141, 9606, 2556, 83, 22720, 1155, 353, 8840, 836, 8, 341, 57441, 1669, 4938, 3481, 78, 7121, 5475, 445, 5873, 1138, 9598, 66, 1669, 1855, 2271, 39, 66, 35864, 340, 16867, 50394, 10421, 2822, 26481, 83, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestReplaceUint8(t *testing.T) { v := &Value{data: []uint8{uint8(1), uint8(1), uint8(1), uint8(1), uint8(1), uint8(1)}} rawArr := v.MustUint8Slice() replaced := v.ReplaceUint8(func(index int, val uint8) uint8 { if index < len(rawArr)-1 { return rawArr[index+1] } return rawArr[0] }) replacedArr := replaced.MustUint8Slice() if assert.Equal(t, 6, len(replacedArr)) { assert.Equal(t, replacedArr[0], rawArr[1]) assert.Equal(t, replacedArr[1], rawArr[2]) assert.Equal(t, replacedArr[2], rawArr[3]) assert.Equal(t, replacedArr[3], rawArr[4]) assert.Equal(t, replacedArr[4], rawArr[5]) assert.Equal(t, replacedArr[5], rawArr[0]) } }
explode_data.jsonl/23470
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 317 }
[ 2830, 3393, 23107, 21570, 23, 1155, 353, 8840, 836, 8, 1476, 5195, 1669, 609, 1130, 90, 691, 25, 3056, 2496, 23, 90, 2496, 23, 7, 16, 701, 2622, 23, 7, 16, 701, 2622, 23, 7, 16, 701, 2622, 23, 7, 16, 701, 2622, 23, 7, 16, 70...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestQueue_Pop(t *testing.T) { queue := New(0) read, write := int32(0), int32(0) var readCh sync.WaitGroup readCh.Add(1) go func() { for { if event, closed := queue.Pop(); closed { if event != nil { t.FailNow() } readCh.Done() break } else { if event == nil { t.FailNow() } read++ } } }() now := time.Now() var writeCh sync.WaitGroup for i := 0; i < 100; i++ { writeCh.Add(1) go func() { for i := 0; i < 1000000; i++ { if err := queue.Push(i); err != nil { t.FailNow() } else { atomic.AddInt32(&write, 1) } } writeCh.Done() }() } writeCh.Wait() log.Println(time.Since(now)) queue.Close() log.Println(time.Since(now)) readCh.Wait() log.Println(time.Since(now)) log.Printf("read: %d, write: %d", read, write) }
explode_data.jsonl/81008
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 406 }
[ 2830, 3393, 7554, 1088, 453, 1155, 353, 8840, 836, 8, 1476, 46993, 1669, 1532, 7, 15, 692, 37043, 11, 3270, 1669, 526, 18, 17, 7, 15, 701, 526, 18, 17, 7, 15, 692, 2405, 1349, 1143, 12811, 28384, 2808, 271, 37043, 1143, 1904, 7, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestClients_Get_Error(t *testing.T) { s, _, _, _, iss, err := setupHydraTest(false) if err != nil { t.Fatalf("setupHydraTest() failed: %v", err) } tests := []struct { name string clientName string realm string status int }{ { name: "client not exists", clientName: "invalid", realm: "master", status: http.StatusNotFound, }, { name: "client id and client name not match", clientName: "test_client2", realm: "master", status: http.StatusNotFound, }, { name: "not master realm", clientName: "test_client", realm: "test", status: http.StatusForbidden, }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { pname := "non-admin" resp := damSendTestRequest(t, http.MethodGet, clientPath, tc.clientName, tc.realm, pname, test.TestClientID, test.TestClientSecret, nil, s, iss) if resp.StatusCode != tc.status { t.Errorf("resp.StatusCode = %d, wants %d", resp.StatusCode, tc.status) } }) } }
explode_data.jsonl/18510
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 478 }
[ 2830, 3393, 47174, 13614, 28651, 1155, 353, 8840, 836, 8, 341, 1903, 11, 8358, 8358, 8358, 2369, 11, 1848, 1669, 6505, 30816, 22248, 2271, 3576, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 15188, 30816, 22248, 2271, 368, 4641...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCACommon_GenerateRSACertificateRequestFP_Fail(t *testing.T) { _, errCA = CAGenerateRSACertificateRequestFP(&CertRequestFP{ PrivateKeyFilePath: "", CertificateRequestFilePath: filepath.Join(pathcarsapksc1512, caCertificateRequestFileName), SignatureAlgorithm: x509.SHA256WithRSAPSS, Subject: CAMockSubject, }, "PRIVATE KEY", RSAPKSC1()) t.Log(errCA) }
explode_data.jsonl/24080
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 174 }
[ 2830, 3393, 5049, 10839, 2646, 13220, 11451, 1706, 20962, 1900, 11698, 1400, 604, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 5049, 284, 356, 1890, 13220, 11451, 1706, 20962, 1900, 11698, 2099, 36934, 1900, 11698, 515, 197, 197, 75981,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFetchPCRValues(t *testing.T) { rwc := openTPMOrSkip(t) defer rwc.Close() var mask pcrMask if err := mask.setPCR(17); err != nil { t.Fatal("Couldn't set PCR 17:", err) } pcrs, err := FetchPCRValues(rwc, []int{17}) if err != nil { t.Fatal("Couldn't get PCRs 17:", err) } comp, err := createPCRComposite(mask, pcrs) if err != nil { t.Fatal("Couldn't create PCR composite") } if len(comp) != int(digestSize) { t.Fatal("Invalid PCR composite") } var locality byte _, err = createPCRInfoLong(locality, mask, pcrs) if err != nil { t.Fatal("Couldn't create a pcrInfoLong structure for these PCRs") } }
explode_data.jsonl/75343
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 266 }
[ 2830, 3393, 20714, 73156, 6227, 1155, 353, 8840, 836, 8, 341, 7000, 24028, 1669, 1787, 4239, 44, 2195, 35134, 1155, 340, 16867, 435, 24028, 10421, 2822, 2405, 6911, 281, 5082, 12686, 198, 743, 1848, 1669, 6911, 980, 73156, 7, 16, 22, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestReadVars(t *testing.T) { td := testutil.TempDir(t) defer os.RemoveAll(td) if err := testutil.WriteFiles(td, map[string]string{ "empty.yaml": "", "test.yaml": "a: foo\nb: bar", "invalid.yaml": "123", }); err != nil { t.Fatal(err) } for _, tc := range []struct { name string want map[string]string wantError bool }{ { name: "empty.yaml", want: map[string]string{}, }, { name: "test.yaml", want: map[string]string{"a": "foo", "b": "bar"}, }, { name: "invalid.yaml", wantError: true, }, { name: "missing.yaml", wantError: true, }, } { t.Run(tc.name, func(t *testing.T) { vars, err := readVarsFile(filepath.Join(td, tc.name)) if err != nil { if !tc.wantError { t.Fatal("readVarsFile failed: ", err) } return } if tc.wantError { t.Fatal("readVarsFile succeeded unexpectedly") } if diff := cmp.Diff(vars, tc.want); diff != "" { t.Fatalf("readVarsFile returned unexpected vars (-got +want):\n%v", diff) } }) } }
explode_data.jsonl/61586
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 517 }
[ 2830, 3393, 4418, 28305, 1155, 353, 8840, 836, 8, 341, 76373, 1669, 1273, 1314, 65009, 6184, 1155, 340, 16867, 2643, 84427, 61241, 692, 743, 1848, 1669, 1273, 1314, 4073, 10809, 61241, 11, 2415, 14032, 30953, 515, 197, 197, 1, 3194, 334...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestWriteIndex_Progress(t *testing.T) { idx, err := random.Index(100000, 3, 10) if err != nil { t.Fatal(err) } c := make(chan v1.Update, 200) // Set up a fake registry. s := httptest.NewServer(registry.New()) defer s.Close() u, err := url.Parse(s.URL) if err != nil { t.Fatal(err) } dst := fmt.Sprintf("%s/test/progress/upload", u.Host) ref, err := name.ParseReference(dst) if err != nil { t.Fatal(err) } if err := WriteIndex(ref, idx, WithProgress(c)); err != nil { t.Fatalf("WriteIndex: %v", err) } if err := checkUpdates(c); err != nil { t.Fatal(err) } }
explode_data.jsonl/76476
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 263 }
[ 2830, 3393, 7985, 1552, 16670, 2483, 1155, 353, 8840, 836, 8, 341, 62077, 11, 1848, 1669, 4194, 18338, 7, 16, 15, 15, 15, 15, 15, 11, 220, 18, 11, 220, 16, 15, 340, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 53...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestGetMgmtSecret(t *testing.T) { rs, err := util.GenerateRandomHexString(8) require.NoError(t, err) key := fmt.Sprintf("WE_WILL_NEVER_USE_THIS_MGMT_SECRET_%s", rs) expectedVal := fmt.Sprintf("SUPER_SECRET_%s", rs) err = os.Setenv(key, expectedVal) require.NoError(t, err) for i := 0; i < 5; i++ { val := util.GetMgmtSecret(key) assert.Equal(t, expectedVal, val) } }
explode_data.jsonl/6090
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 170 }
[ 2830, 3393, 1949, 44, 46063, 19773, 1155, 353, 8840, 836, 8, 341, 41231, 11, 1848, 1669, 4094, 57582, 13999, 49137, 7, 23, 340, 17957, 35699, 1155, 11, 1848, 692, 23634, 1669, 8879, 17305, 445, 12457, 2763, 9228, 14039, 3763, 22295, 660...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSequence(t *testing.T) { controller := newController() wfcset := controller.wfclientset.ArgoprojV1alpha1().Workflows("") wf := unmarshalWF(sequence) wf, err := wfcset.Create(wf) assert.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate() updatedWf, err := wfcset.Get(wf.Name, metav1.GetOptions{}) assert.NoError(t, err) found100 := false found101 := false for _, node := range updatedWf.Status.Nodes { if node.DisplayName == "step1(0:100)" { assert.Equal(t, "100", *node.Inputs.Parameters[0].Value) found100 = true } else if node.DisplayName == "step1(1:101)" { assert.Equal(t, "101", *node.Inputs.Parameters[0].Value) found101 = true } } assert.Equal(t, true, found100) assert.Equal(t, true, found101) }
explode_data.jsonl/54366
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 320 }
[ 2830, 3393, 14076, 1155, 353, 8840, 836, 8, 341, 61615, 1669, 501, 2051, 741, 6692, 8316, 746, 1669, 6461, 1418, 69, 2972, 746, 18979, 45926, 73, 53, 16, 7141, 16, 1005, 6776, 38140, 445, 5130, 6692, 69, 1669, 650, 27121, 32131, 56912...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCompileWithoutShared(t *testing.T) { // For simplicity, reuse the signal forwarding test. checkSignalForwardingTest(t) defer func() { os.Remove("libgo2.a") os.Remove("libgo2.h") }() cmd := exec.Command("go", "build", "-buildmode=c-archive", "-gcflags=-shared=false", "-o", "libgo2.a", "libgo2") cmd.Env = gopathEnv t.Log(cmd.Args) out, err := cmd.CombinedOutput() t.Logf("%s", out) if err != nil { t.Fatal(err) } exe := "./testnoshared" + exeSuffix // In some cases, -no-pie is needed here, but not accepted everywhere. First try // if -no-pie is accepted. See #22126. ccArgs := append(cc, "-o", exe, "-no-pie", "main5.c", "libgo2.a") t.Log(ccArgs) out, err = exec.Command(ccArgs[0], ccArgs[1:]...).CombinedOutput() // If -no-pie unrecognized, try -nopie if this is possibly clang if err != nil && bytes.Contains(out, []byte("unknown")) && !strings.Contains(cc[0], "gcc") { ccArgs = append(cc, "-o", exe, "-nopie", "main5.c", "libgo2.a") t.Log(ccArgs) out, err = exec.Command(ccArgs[0], ccArgs[1:]...).CombinedOutput() } // Don't use either -no-pie or -nopie if err != nil && bytes.Contains(out, []byte("unrecognized")) { ccArgs := append(cc, "-o", exe, "main5.c", "libgo2.a") t.Log(ccArgs) out, err = exec.Command(ccArgs[0], ccArgs[1:]...).CombinedOutput() } t.Logf("%s", out) if err != nil { t.Fatal(err) } defer os.Remove(exe) binArgs := append(cmdToRun(exe), "3") t.Log(binArgs) out, err = exec.Command(binArgs[0], binArgs[1:]...).CombinedOutput() t.Logf("%s", out) expectSignal(t, err, syscall.SIGPIPE) }
explode_data.jsonl/50866
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 673 }
[ 2830, 3393, 46126, 26040, 16997, 1155, 353, 8840, 836, 8, 341, 197, 322, 1752, 38975, 11, 25978, 279, 8286, 62104, 1273, 624, 25157, 26810, 25925, 287, 2271, 1155, 692, 16867, 2915, 368, 341, 197, 25078, 13270, 445, 2740, 3346, 17, 5849...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBackupCreatedStatsFromIncrementalBackup(t *testing.T) { defer leaktest.AfterTest(t)() const incremental1Foo = "nodelocal://0/incremental1foo" const incremental2Foo = "nodelocal://0/incremental2foo" const numAccounts = 1 _, _, sqlDB, _, cleanupFn := BackupRestoreTestSetup(t, singleNode, numAccounts, InitNone) defer cleanupFn() var beforeTs string sqlDB.Exec(t, `SET CLUSTER SETTING sql.stats.automatic_collection.enabled=false`) // Create the 1st backup, where data.bank has 1 account. sqlDB.Exec(t, `CREATE STATISTICS bank_stats FROM data.bank`) sqlDB.Exec(t, `BACKUP data.bank TO $1 WITH revision_history`, LocalFoo) // Create the 2nd backup, where data.bank has 3 accounts. sqlDB.Exec(t, `INSERT INTO data.bank VALUES (2, 2), (4, 4)`) sqlDB.Exec(t, `CREATE STATISTICS bank_stats FROM data.bank`) sqlDB.QueryRow(t, `SELECT cluster_logical_timestamp()`).Scan(&beforeTs) // Save time to restore to this point. sqlDB.Exec(t, `BACKUP data.bank TO $1 INCREMENTAL FROM $2 WITH revision_history`, incremental1Foo, LocalFoo) // Create the 3rd backup, where data.bank has 5 accounts. sqlDB.Exec(t, `INSERT INTO data.bank VALUES (3, 3), (5, 2)`) sqlDB.Exec(t, `CREATE STATISTICS bank_stats FROM data.bank`) sqlDB.Exec(t, `BACKUP data.bank TO $1 INCREMENTAL FROM $2, $3 WITH revision_history`, incremental2Foo, LocalFoo, incremental1Foo) // Restore the 2nd backup. sqlDB.Exec(t, `CREATE DATABASE "data 2"`) sqlDB.Exec(t, fmt.Sprintf(`RESTORE data.bank FROM "%s", "%s", "%s" AS OF SYSTEM TIME %s WITH skip_missing_foreign_keys, into_db = "%s"`, LocalFoo, incremental1Foo, incremental2Foo, beforeTs, "data 2")) // Expect the values in row_count and distinct_count to be 3. The values // would be 1 if the stats from the full backup were restored and 5 if // the stats from the latest incremental backup were restored. sqlDB.CheckQueryResults(t, `SELECT statistics_name, column_names, row_count, distinct_count, null_count FROM [SHOW STATISTICS FOR TABLE "data 2".bank] WHERE statistics_name='bank_stats'`, [][]string{ {"bank_stats", "{id}", "3", "3", "0"}, {"bank_stats", "{balance}", "3", "3", "0"}, {"bank_stats", "{payload}", "3", "2", "2"}, }) }
explode_data.jsonl/57621
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 794 }
[ 2830, 3393, 56245, 11694, 16635, 3830, 38311, 278, 56245, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 2822, 4777, 52299, 16, 40923, 284, 330, 77, 720, 3683, 1110, 15, 17996, 13477, 278, 16, 7975, 698, 4777, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMatAdd(t *testing.T) { mat1 := NewMatWithSize(101, 102, MatTypeCV8U) mat2 := NewMatWithSize(101, 102, MatTypeCV8U) mat3 := NewMat() Add(mat1, mat2, &mat3) if mat3.Empty() { t.Error("TestMatAdd dest mat3 should not be empty.") } }
explode_data.jsonl/81705
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 106 }
[ 2830, 3393, 11575, 2212, 1155, 353, 8840, 836, 8, 341, 59874, 16, 1669, 1532, 11575, 2354, 1695, 7, 16, 15, 16, 11, 220, 16, 15, 17, 11, 6867, 929, 19589, 23, 52, 340, 59874, 17, 1669, 1532, 11575, 2354, 1695, 7, 16, 15, 16, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPublishAndGetConfig(t *testing.T) { file, err := initFileData(t) assert.NoError(t, err) err = file.PublishConfig(key, "", "A") assert.NoError(t, err) prop, err := file.GetProperties(key) assert.NoError(t, err) assert.Equal(t, "A", prop) defer destroy(file.rootPath, file) }
explode_data.jsonl/16357
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 120 }
[ 2830, 3393, 50145, 97726, 2648, 1155, 353, 8840, 836, 8, 341, 17661, 11, 1848, 1669, 2930, 1703, 1043, 1155, 340, 6948, 35699, 1155, 11, 1848, 340, 9859, 284, 1034, 83935, 2648, 4857, 11, 7342, 330, 32, 1138, 6948, 35699, 1155, 11, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_hcsTask_GetExec_UnknownExecID_Error(t *testing.T) { lt, _, _ := setupTestHcsTask(t) e, err := lt.GetExec("shouldnotmatch") verifyExpectedError(t, e, err, errdefs.ErrNotFound) }
explode_data.jsonl/56370
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 80 }
[ 2830, 3393, 1523, 4837, 6262, 13614, 10216, 62, 13790, 10216, 915, 28651, 1155, 353, 8840, 836, 8, 341, 197, 4832, 11, 8358, 716, 1669, 6505, 2271, 39, 4837, 6262, 1155, 692, 7727, 11, 1848, 1669, 25175, 2234, 10216, 445, 5445, 1921, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestGitCommit_GenerateFullyQualifiedImageName(t *testing.T) { tests := []struct { description string expectedName string createGitRepo func(string) opts *TagOptions shouldErr bool }{ { description: "success", opts: &TagOptions{ ImageName: "test", Digest: "sha256:12345abcde", }, expectedName: "test:eefe1b9", createGitRepo: func(dir string) { gitInit(t, dir). write("source.go", []byte("code")). add("source.go"). commit("initial") }, }, { description: "dirty", opts: &TagOptions{ ImageName: "test", }, expectedName: "test:eefe1b9-dirty-af8de1fde8be4367", createGitRepo: func(dir string) { gitInit(t, dir). write("source.go", []byte("code")). add("source.go"). commit("initial"). write("source.go", []byte("updated code")) }, }, { description: "untracked", opts: &TagOptions{ ImageName: "test", }, expectedName: "test:eefe1b9-dirty-bfe9b4566c9d3fec", createGitRepo: func(dir string) { gitInit(t, dir). write("source.go", []byte("code")). add("source.go"). commit("initial"). write("new.go", []byte("new code")) }, }, { description: "one file deleted", opts: &TagOptions{ ImageName: "test", }, expectedName: "test:279d53f-dirty-6a3ce511c689eda7", createGitRepo: func(dir string) { gitInit(t, dir). write("source1.go", []byte("code1")). write("source2.go", []byte("code2")). add("source1.go", "source2.go"). commit("initial"). delete("source1.go") }, }, { description: "two files deleted", opts: &TagOptions{ ImageName: "test", }, expectedName: "test:279d53f-dirty-d48c11ed65c37a09", // Must be <> than when only one file is deleted createGitRepo: func(dir string) { gitInit(t, dir). write("source1.go", []byte("code1")). write("source2.go", []byte("code2")). add("source1.go", "source2.go"). commit("initial"). delete("source1.go", "source2.go") }, }, { description: "rename", opts: &TagOptions{ ImageName: "test", }, expectedName: "test:eefe1b9-dirty-9c858d88cc0bf792", createGitRepo: func(dir string) { gitInit(t, dir). write("source.go", []byte("code")). add("source.go"). commit("initial"). rename("source.go", "source2.go") }, }, { description: "rename to different name", opts: &TagOptions{ ImageName: "test", }, expectedName: "test:eefe1b9-dirty-6534adc17ccd1cf4", // Must be <> each time a new name is used createGitRepo: func(dir string) { gitInit(t, dir). write("source.go", []byte("code")). add("source.go"). commit("initial"). rename("source.go", "source3.go") }, }, { description: "failure", createGitRepo: func(dir string) {}, shouldErr: true, }, } for _, tt := range tests { t.Run(tt.description, func(t *testing.T) { tmpDir, cleanup := testutil.TempDir(t) defer cleanup() tt.createGitRepo(tmpDir) c := &GitCommit{} name, err := c.GenerateFullyQualifiedImageName(tmpDir, tt.opts) testutil.CheckErrorAndDeepEqual(t, tt.shouldErr, err, tt.expectedName, name) }) } }
explode_data.jsonl/3536
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1522 }
[ 2830, 3393, 46562, 33441, 2646, 13220, 67386, 65993, 1906, 675, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 42407, 256, 914, 198, 197, 42400, 675, 220, 914, 198, 197, 39263, 46562, 25243, 2915, 3609, 340, 197, 64734...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPersistentClaimReadOnlyFlag(t *testing.T) { tmpDir, plug := getPlugin(t) defer os.RemoveAll(tmpDir) // Read only == true pod := &v1.Pod{ObjectMeta: metav1.ObjectMeta{UID: types.UID("poduid")}} mounter, err := plug.NewMounter(getTestVolume(true, tmpDir, false, nil), pod, volume.VolumeOptions{}) if err != nil { t.Errorf("Failed to make a new Mounter: %v", err) } if mounter == nil { t.Fatalf("Got a nil Mounter") } if !mounter.GetAttributes().ReadOnly { t.Errorf("Expected true for mounter.IsReadOnly") } // Read only == false mounter, err = plug.NewMounter(getTestVolume(false, tmpDir, false, nil), pod, volume.VolumeOptions{}) if err != nil { t.Errorf("Failed to make a new Mounter: %v", err) } if mounter == nil { t.Fatalf("Got a nil Mounter") } if mounter.GetAttributes().ReadOnly { t.Errorf("Expected false for mounter.IsReadOnly") } }
explode_data.jsonl/14262
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 337 }
[ 2830, 3393, 53194, 45544, 20914, 12135, 1155, 353, 8840, 836, 8, 341, 20082, 6184, 11, 19633, 1669, 633, 11546, 1155, 340, 16867, 2643, 84427, 10368, 6184, 692, 197, 322, 4457, 1172, 621, 830, 198, 3223, 347, 1669, 609, 85, 16, 88823, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestGetInstalledMarketplacePlugins(t *testing.T) { samplePlugins := []*model.MarketplacePlugin{ { BaseMarketplacePlugin: &model.BaseMarketplacePlugin{ HomepageURL: "https://example.com/mattermost/mattermost-plugin-nps", IconData: "https://example.com/icon.svg", DownloadURL: "https://example.com/mattermost/mattermost-plugin-nps/releases/download/v1.0.3/com.mattermost.nps-1.0.3.tar.gz", Labels: []model.MarketplaceLabel{ { Name: "someName", Description: "some Description", }, }, Manifest: &model.Manifest{ Id: "com.mattermost.nps", Name: "User Satisfaction Surveys", Description: "This plugin sends quarterly user satisfaction surveys to gather feedback and help improve Mattermost.", Version: "1.0.3", MinServerVersion: "5.14.0", }, }, InstalledVersion: "", }, } path, _ := fileutils.FindDir("tests") tarData, err := ioutil.ReadFile(filepath.Join(path, "testplugin.tar.gz")) require.NoError(t, err) t.Run("marketplace client returns not-installed plugin", func(t *testing.T) { th := Setup().InitBasic() defer th.TearDown() testServer := httptest.NewServer(http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) { res.WriteHeader(http.StatusOK) json, err := json.Marshal(samplePlugins) require.NoError(t, err) res.Write(json) })) defer func() { testServer.Close() }() th.App.UpdateConfig(func(cfg *model.Config) { *cfg.PluginSettings.Enable = true *cfg.PluginSettings.EnableUploads = true *cfg.PluginSettings.EnableMarketplace = true *cfg.PluginSettings.MarketplaceUrl = testServer.URL }) plugins, resp := th.SystemAdminClient.GetMarketplacePlugins(&model.MarketplacePluginFilter{}) CheckNoError(t, resp) require.Equal(t, samplePlugins, plugins) manifest, resp := th.SystemAdminClient.UploadPlugin(bytes.NewReader(tarData)) CheckNoError(t, resp) testIcon, err := ioutil.ReadFile(filepath.Join(path, "test.svg")) require.NoError(t, err) require.True(t, svg.Is(testIcon)) testIconData := fmt.Sprintf("data:image/svg+xml;base64,%s", base64.StdEncoding.EncodeToString(testIcon)) expectedPlugins := append(samplePlugins, &model.MarketplacePlugin{ BaseMarketplacePlugin: &model.BaseMarketplacePlugin{ HomepageURL: "https://example.com/homepage", IconData: testIconData, DownloadURL: "", ReleaseNotesURL: "https://example.com/releases/v0.0.1", Labels: []model.MarketplaceLabel{{ Name: "Local", Description: "This plugin is not listed in the marketplace", }}, Manifest: manifest, }, InstalledVersion: manifest.Version, }) sort.SliceStable(expectedPlugins, func(i, j int) bool { return strings.ToLower(expectedPlugins[i].Manifest.Name) < strings.ToLower(expectedPlugins[j].Manifest.Name) }) plugins, resp = th.SystemAdminClient.GetMarketplacePlugins(&model.MarketplacePluginFilter{}) CheckNoError(t, resp) require.Equal(t, expectedPlugins, plugins) ok, resp := th.SystemAdminClient.RemovePlugin(manifest.Id) CheckNoError(t, resp) assert.True(t, ok) plugins, resp = th.SystemAdminClient.GetMarketplacePlugins(&model.MarketplacePluginFilter{}) CheckNoError(t, resp) require.Equal(t, samplePlugins, plugins) }) t.Run("marketplace client returns installed plugin", func(t *testing.T) { th := Setup().InitBasic() defer th.TearDown() th.App.UpdateConfig(func(cfg *model.Config) { *cfg.PluginSettings.Enable = true *cfg.PluginSettings.EnableUploads = true *cfg.PluginSettings.EnableMarketplace = true }) manifest, resp := th.SystemAdminClient.UploadPlugin(bytes.NewReader(tarData)) CheckNoError(t, resp) newPlugin := &model.MarketplacePlugin{ BaseMarketplacePlugin: &model.BaseMarketplacePlugin{ HomepageURL: "HomepageURL", IconData: "IconData", DownloadURL: "DownloadURL", Manifest: manifest, }, InstalledVersion: manifest.Version, } expectedPlugins := append(samplePlugins, newPlugin) sort.SliceStable(expectedPlugins, func(i, j int) bool { return strings.ToLower(expectedPlugins[i].Manifest.Name) < strings.ToLower(expectedPlugins[j].Manifest.Name) }) testServer := httptest.NewServer(http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) { res.WriteHeader(http.StatusOK) json, err := json.Marshal([]*model.MarketplacePlugin{samplePlugins[0], newPlugin}) require.NoError(t, err) res.Write(json) })) defer func() { testServer.Close() }() th.App.UpdateConfig(func(cfg *model.Config) { *cfg.PluginSettings.MarketplaceUrl = testServer.URL }) plugins, resp := th.SystemAdminClient.GetMarketplacePlugins(&model.MarketplacePluginFilter{}) CheckNoError(t, resp) require.Equal(t, expectedPlugins, plugins) ok, resp := th.SystemAdminClient.RemovePlugin(manifest.Id) CheckNoError(t, resp) assert.True(t, ok) plugins, resp = th.SystemAdminClient.GetMarketplacePlugins(&model.MarketplacePluginFilter{}) CheckNoError(t, resp) newPlugin.InstalledVersion = "" require.Equal(t, expectedPlugins, plugins) }) }
explode_data.jsonl/27538
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1965 }
[ 2830, 3393, 1949, 60800, 38822, 2007, 45378, 1155, 353, 8840, 836, 8, 341, 1903, 1516, 45378, 1669, 29838, 2528, 1321, 54560, 2007, 11546, 515, 197, 197, 515, 298, 66732, 38822, 2007, 11546, 25, 609, 2528, 13018, 38822, 2007, 11546, 515, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClient_New(t *testing.T) { rng := test.Prng(t) backend := &ctest.MockBackend{} c, err := client.New(wtest.NewRandomAddress(rng), &DummyBus{t}, backend, backend, wtest.RandomWallet()) assert.NoError(t, err) require.NotNil(t, c) }
explode_data.jsonl/2343
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 102 }
[ 2830, 3393, 2959, 39582, 1155, 353, 8840, 836, 8, 341, 7000, 968, 1669, 1273, 17947, 968, 1155, 340, 197, 20942, 1669, 609, 67880, 24664, 29699, 16094, 1444, 11, 1848, 1669, 2943, 7121, 3622, 1944, 7121, 13999, 4286, 87597, 701, 609, 43...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestScanStreamGroupsConsumers(t *testing.T) { if os.Getenv("TEST_REDIS_URI") == "" { t.Skipf("TEST_REDIS_URI not set - skipping") } addr := os.Getenv("TEST_REDIS_URI") db := dbNumStr c, err := redis.DialURL(addr) if err != nil { t.Fatalf("Couldn't connect to %#v: %#v", addr, err) } _, err = c.Do("SELECT", db) if err != nil { t.Errorf("Couldn't select database %#v", db) } fixtures := []keyFixture{ {"XADD", "single_consumer_stream", []interface{}{"*", "field_1", "str_1"}}, {"XADD", "multiple_consumer_stream", []interface{}{"*", "field_pattern_1", "str_pattern_1"}}, } // Create test streams _, err = c.Do("XGROUP", "CREATE", "single_consumer_stream", "test_group_1", "$", "MKSTREAM") _, err = c.Do("XGROUP", "CREATE", "multiple_consumer_stream", "test_group_1", "$", "MKSTREAM") // Add simple test items to streams createKeyFixtures(t, c, fixtures) defer func() { deleteKeyFixtures(t, c, fixtures) c.Close() }() // Process messages to assign Consumers to their groups _, err = c.Do("XREADGROUP", "GROUP", "test_group_1", "test_consumer_1", "COUNT", "1", "STREAMS", "single_consumer_stream", ">") _, err = c.Do("XREADGROUP", "GROUP", "test_group_1", "test_consumer_1", "COUNT", "1", "STREAMS", "multiple_consumer_stream", ">") _, err = c.Do("XREADGROUP", "GROUP", "test_group_1", "test_consumer_2", "COUNT", "1", "STREAMS", "multiple_consumer_stream", "0") tsts := []scanStreamFixture{ { name: "Single group test", stream: "single_consumer_stream", groups: []streamGroupsInfo{{Name: "test_group_1"}}, consumers: []streamGroupConsumersInfo{ { Name: "test_consumer_1", Pending: 1, }, }, }, { name: "Multiple consumers test", stream: "multiple_consumer_stream", groups: []streamGroupsInfo{{Name: "test_group_1"}}, consumers: []streamGroupConsumersInfo{ { Name: "test_consumer_1", Pending: 1, }, { Name: "test_consumer_2", Pending: 0, }, }, }, } for _, tst := range tsts { t.Run(tst.name, func(t *testing.T) { // For each group for _, g := range tst.groups { g.StreamGroupConsumersInfo, err = scanStreamGroupConsumers(c, tst.stream, g.Name) if err != nil { t.Errorf("Err: %s", err) } if len(g.StreamGroupConsumersInfo) == len(tst.consumers) { for i := range g.StreamGroupConsumersInfo { if g.StreamGroupConsumersInfo[i].Name != tst.consumers[i].Name { t.Errorf("Consumer name mismatch.\nExpected: %#v;\nActual: %#v\n", tst.consumers[i].Name, g.StreamGroupConsumersInfo[i].Name) } if g.StreamGroupConsumersInfo[i].Pending != tst.consumers[i].Pending { t.Errorf("Pending items mismatch for %s.\nExpected: %#v;\nActual: %#v\n", g.StreamGroupConsumersInfo[i].Name, tst.consumers[i].Pending, g.StreamGroupConsumersInfo[i].Pending) } } } else { t.Errorf("Consumers entries mismatch.\nExpected: %d;\nActual: %d\n", len(tst.consumers), len(g.StreamGroupConsumersInfo)) } } }) } }
explode_data.jsonl/47012
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1316 }
[ 2830, 3393, 26570, 3027, 22173, 41966, 388, 1155, 353, 8840, 836, 8, 341, 743, 2643, 64883, 445, 10033, 2192, 21202, 23116, 899, 621, 1591, 341, 197, 3244, 57776, 69, 445, 10033, 2192, 21202, 23116, 537, 738, 481, 42659, 1138, 197, 532,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestJson2(t *testing.T) { msg := "'stringstring'" bss := []byte{} err := json.Unmarshal([]byte(msg), &bss) fmt.Println(msg, bss, err) assert.NotNil(t, nil) }
explode_data.jsonl/57739
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 79 }
[ 2830, 3393, 5014, 17, 1155, 353, 8840, 836, 8, 341, 21169, 1669, 7178, 917, 917, 41165, 2233, 778, 1669, 3056, 3782, 16094, 9859, 1669, 2951, 38097, 10556, 3782, 8119, 701, 609, 65, 778, 340, 11009, 12419, 8119, 11, 293, 778, 11, 1848...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestEverything(t *testing.T) { if !Everything().Matches(Set{"x": "y"}) { t.Errorf("Nil selector didn't match") } if !Everything().Empty() { t.Errorf("Everything was not empty") } }
explode_data.jsonl/29765
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 74 }
[ 2830, 3393, 34964, 1155, 353, 8840, 836, 8, 341, 743, 753, 34964, 1005, 42470, 52474, 4913, 87, 788, 330, 88, 80154, 341, 197, 3244, 13080, 445, 19064, 9367, 3207, 944, 2432, 1138, 197, 532, 743, 753, 34964, 1005, 3522, 368, 341, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
3
func TestDeepEqualUnexportedMap(t *testing.T) { // Check that DeepEqual can look at unexported fields. x1 := UnexpT{map[int]int{1: 2}} x2 := UnexpT{map[int]int{1: 2}} if !DeepEqual(&x1, &x2) { t.Error("DeepEqual(x1, x2) = false, want true") } y1 := UnexpT{map[int]int{2: 3}} if DeepEqual(&x1, &y1) { t.Error("DeepEqual(x1, y1) = true, want false") } }
explode_data.jsonl/29542
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 177 }
[ 2830, 3393, 33464, 2993, 1806, 1533, 291, 2227, 1155, 353, 8840, 836, 8, 341, 197, 322, 4248, 429, 18183, 2993, 646, 1401, 518, 650, 1533, 291, 5043, 624, 10225, 16, 1669, 1230, 4580, 51, 90, 2186, 18640, 63025, 90, 16, 25, 220, 17,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_NewCommand(t *testing.T) { tests := []struct { name string commands []string wantedAction action wantedKey string wantedValue string wantedErr error }{ { name: "GET Uppercase", commands: []string{"GET", "foo"}, wantedAction: GET, wantedKey: "foo", wantedValue: "", wantedErr: nil, }, { name: "GET Lowercase", commands: []string{"get", "foo"}, wantedAction: GET, wantedKey: "foo", wantedValue: "", wantedErr: nil, }, { name: "SET Uppercase", commands: []string{"SET", "foo", "bar", "baz"}, wantedAction: SET, wantedKey: "foo", wantedValue: "bar baz", wantedErr: nil, }, { name: "SET Lowercase", commands: []string{"set", "foo", "bar", "baz"}, wantedAction: SET, wantedKey: "foo", wantedValue: "bar baz", wantedErr: nil, }, { name: "DEL Uppercase", commands: []string{"DEL", "foo"}, wantedAction: DEL, wantedKey: "foo", wantedValue: "", wantedErr: nil, }, { name: "DEL Lowercase", commands: []string{"del", "foo"}, wantedAction: DEL, wantedKey: "foo", wantedValue: "", wantedErr: nil, }, { name: "LIST Uppercase", commands: []string{"LIST"}, wantedAction: LIST, wantedKey: "", wantedValue: "", wantedErr: nil, }, { name: "LIST Lowercase", commands: []string{"list"}, wantedAction: LIST, wantedKey: "", wantedValue: "", wantedErr: nil, }, { name: "EXIT Uppercase", commands: []string{"EXIT"}, wantedAction: EXIT, wantedKey: "", wantedValue: "", wantedErr: nil, }, { name: "EXIT Lowercase", commands: []string{"exit"}, wantedAction: EXIT, wantedKey: "", wantedValue: "", wantedErr: nil, }, { name: "An unknown action", commands: []string{"foobar"}, wantedAction: 0, wantedKey: "", wantedValue: "", wantedErr: errors.New("commands parseAction: Unknown action foobar"), }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { cmd, err := NewCommand(tt.commands) if err != nil && err.Error() != tt.wantedErr.Error() { t.Errorf("%q = %q, want %q", err, tt.wantedErr, tt.wantedErr) } if cmd.action != tt.wantedAction { t.Error("unwanted command action") } if cmd.key != tt.wantedKey { t.Errorf("%q = %q, want %q", cmd.key, tt.wantedKey, tt.wantedKey) } if cmd.value != tt.wantedValue { t.Errorf("%q = %q, want %q", cmd.value, tt.wantedValue, tt.wantedValue) } }) } }
explode_data.jsonl/79581
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1435 }
[ 2830, 3393, 39582, 4062, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 260, 914, 198, 197, 197, 24270, 257, 3056, 917, 198, 197, 6692, 7566, 2512, 1917, 198, 197, 6692, 7566, 1592, 262, 914, 198, 197, 6692, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestUDP6AddrValidation(t *testing.T) { tests := []struct { param string expected bool }{ {"", false}, {":80", false}, {"127.0.0.1:80", false}, {"[::1]:80", true}, {"256.0.0.0:1", false}, {"[::1]", false}, } validate := New() for i, test := range tests { errs := validate.Var(test.param, "udp6_addr") if test.expected { if !IsEqual(errs, nil) { t.Fatalf("Index: %d udp6_addr failed Error: %s", i, errs) } } else { if IsEqual(errs, nil) { t.Fatalf("Index: %d udp6_addr failed Error: %s", i, errs) } else { val := getError(errs, "", "") if val.Tag() != "udp6_addr" { t.Fatalf("Index: %d udp6_addr failed Error: %s", i, errs) } } } } }
explode_data.jsonl/77246
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 356 }
[ 2830, 3393, 41648, 21, 13986, 13799, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 36037, 262, 914, 198, 197, 42400, 1807, 198, 197, 59403, 197, 197, 4913, 497, 895, 1583, 197, 197, 90, 788, 23, 15, 497, 895, 1583...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestUnmarshalNS(t *testing.T) { for i, tt := range tables { var dst Tables var err error if tt.ns != "" { d := NewDecoder(strings.NewReader(tt.xml)) d.DefaultSpace = tt.ns err = d.Decode(&dst) } else { err = Unmarshal([]byte(tt.xml), &dst) } if err != nil { t.Errorf("#%d: Unmarshal: %v", i, err) continue } want := tt.tab if dst != want { t.Errorf("#%d: dst=%+v, want %+v", i, dst, want) } } }
explode_data.jsonl/25294
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 1806, 27121, 2448, 1155, 353, 8840, 836, 8, 341, 2023, 600, 11, 17853, 1669, 2088, 12632, 341, 197, 2405, 10648, 42152, 198, 197, 2405, 1848, 1465, 198, 197, 743, 17853, 52328, 961, 1591, 341, 298, 2698, 1669, 1532, 20732, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGitServiceKindFromSecretsWithMissingKind(t *testing.T) { t.Parallel() secret := createSecret("jx-pipeline-git", map[string]string{ "jenkins.io/kind": "git", "jenkins.io/service-kind": "", }, map[string]string{ "jenkins.io/url": serviceURL, }) ns := "jx" secret.Namespace = ns kubeClient := fake.NewSimpleClientset(secret) jxClient := v1fake.NewSimpleClientset() foundServiceKind, err := kube.GetGitServiceKind(jxClient, kubeClient, ns, serviceURL) t.Logf("found service kind %s for URL %s\n\n", foundServiceKind, serviceURL) assert.NoError(t, err, "should find a service kind without any error") assert.Equal(t, serviceKind, foundServiceKind, "should find a service kind equal with '%s'", serviceKind) }
explode_data.jsonl/6627
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 284 }
[ 2830, 3393, 46562, 1860, 10629, 3830, 19773, 16056, 25080, 10629, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 20474, 1669, 1855, 19773, 445, 73, 87, 2268, 8790, 81749, 756, 197, 19567, 14032, 30953, 515, 298, 197, 1, 2316...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStorageEntries(t *testing.T) { ents := []pb.Entry{{Index: 3, Term: 3}, {Index: 4, Term: 4}, {Index: 5, Term: 5}, {Index: 6, Term: 6}} tests := []struct { lo, hi, maxsize uint64 werr error wentries []pb.Entry }{ {2, 6, math.MaxUint64, ErrCompacted, nil}, {3, 4, math.MaxUint64, ErrCompacted, nil}, {4, 5, math.MaxUint64, nil, []pb.Entry{{Index: 4, Term: 4}}}, {4, 6, math.MaxUint64, nil, []pb.Entry{{Index: 4, Term: 4}, {Index: 5, Term: 5}}}, {4, 7, math.MaxUint64, nil, []pb.Entry{{Index: 4, Term: 4}, {Index: 5, Term: 5}, {Index: 6, Term: 6}}}, // even if maxsize is zero, the first entry should be returned {4, 7, 0, nil, []pb.Entry{{Index: 4, Term: 4}}}, // limit to 2 {4, 7, uint64(ents[1].Size() + ents[2].Size()), nil, []pb.Entry{{Index: 4, Term: 4}, {Index: 5, Term: 5}}}, // limit to 2 {4, 7, uint64(ents[1].Size() + ents[2].Size() + ents[3].Size()/2), nil, []pb.Entry{{Index: 4, Term: 4}, {Index: 5, Term: 5}}}, {4, 7, uint64(ents[1].Size() + ents[2].Size() + ents[3].Size() - 1), nil, []pb.Entry{{Index: 4, Term: 4}, {Index: 5, Term: 5}}}, // all {4, 7, uint64(ents[1].Size() + ents[2].Size() + ents[3].Size()), nil, []pb.Entry{{Index: 4, Term: 4}, {Index: 5, Term: 5}, {Index: 6, Term: 6}}}, } for i, tt := range tests { s := &MemoryStorage{ents: ents} entries, err := s.Entries(tt.lo, tt.hi, tt.maxsize) if err != tt.werr { t.Errorf("#%d: err = %v, want %v", i, err, tt.werr) } if !reflect.DeepEqual(entries, tt.wentries) { t.Errorf("#%d: entries = %v, want %v", i, entries, tt.wentries) } } }
explode_data.jsonl/66752
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 728 }
[ 2830, 3393, 5793, 24533, 1155, 353, 8840, 836, 8, 341, 197, 805, 1669, 3056, 16650, 22330, 2979, 1552, 25, 220, 18, 11, 17519, 25, 220, 18, 2137, 314, 1552, 25, 220, 19, 11, 17519, 25, 220, 19, 2137, 314, 1552, 25, 220, 20, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestExportFormsWithMinifyIdentifiersAndNoBundle(t *testing.T) { default_suite.expectBundled(t, bundled{ files: map[string]string{ "/a.js": ` export default 123 export var varName = 234 export let letName = 234 export const constName = 234 function Func2() {} class Class2 {} export {Class as Cls, Func2 as Fn2, Class2 as Cls2} export function Func() {} export class Class {} export * from './a' export * as fromB from './b' `, "/b.js": "export default function() {}", "/c.js": "export default function foo() {}", "/d.js": "export default class {}", "/e.js": "export default class Foo {}", }, entryPaths: []string{ "/a.js", "/b.js", "/c.js", "/d.js", "/e.js", }, options: config.Options{ MinifyIdentifiers: true, AbsOutputDir: "/out", }, }) }
explode_data.jsonl/38446
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 381 }
[ 2830, 3393, 16894, 1838, 16056, 6217, 1437, 28301, 11836, 3036, 2753, 8409, 1155, 353, 8840, 836, 8, 341, 11940, 57239, 25952, 33, 1241, 832, 1155, 11, 51450, 515, 197, 74075, 25, 2415, 14032, 30953, 515, 298, 197, 3115, 64, 2857, 788, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSortPresubmitJobOrder(t *testing.T) { tests := []struct { name string presubmits map[string][]prowConfig.Presubmit expectedPresubmits map[string][]prowConfig.Presubmit }{ { name: "empty list of presubmits", presubmits: map[string][]prowConfig.Presubmit{}, expectedPresubmits: map[string][]prowConfig.Presubmit{}, }, { name: "unordered list of presubmits", presubmits: map[string][]prowConfig.Presubmit{ "istio/proxy": { prowConfig.Presubmit{ JobBase: prowConfig.JobBase{ Name: "lint_release-1.5", }, }, prowConfig.Presubmit{ JobBase: prowConfig.JobBase{ Name: "gen_check_master", }, }, prowConfig.Presubmit{ JobBase: prowConfig.JobBase{ Name: "lint_master", }, }, }, "kubernetes/test-infra": { prowConfig.Presubmit{ JobBase: prowConfig.JobBase{ Name: "pull-test-go", }, }, prowConfig.Presubmit{ JobBase: prowConfig.JobBase{ Name: "pull-test-bazel", }, }, }, }, expectedPresubmits: map[string][]prowConfig.Presubmit{ "istio/proxy": { prowConfig.Presubmit{ JobBase: prowConfig.JobBase{ Name: "gen_check_master", }, }, prowConfig.Presubmit{ JobBase: prowConfig.JobBase{ Name: "lint_master", }, }, prowConfig.Presubmit{ JobBase: prowConfig.JobBase{ Name: "lint_release-1.5", }, }, }, "kubernetes/test-infra": { prowConfig.Presubmit{ JobBase: prowConfig.JobBase{ Name: "pull-test-bazel", }, }, prowConfig.Presubmit{ JobBase: prowConfig.JobBase{ Name: "pull-test-go", }, }, }, }, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { sortPresubmits(test.presubmits) for orgrepo := range test.expectedPresubmits { if !reflect.DeepEqual(test.expectedPresubmits[orgrepo], test.presubmits[orgrepo]) { t.Fatalf("Presubmit jobs do not match for repo: %s; actual: %v\n expected %v\n", orgrepo, test.expectedPresubmits[orgrepo], test.presubmits[orgrepo]) } } }) } }
explode_data.jsonl/22115
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1144 }
[ 2830, 3393, 10231, 14367, 392, 1763, 12245, 4431, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 2290, 914, 198, 197, 3223, 416, 392, 44703, 260, 2415, 14032, 45725, 79, 651, 2648, 1069, 416, 392, 1763, 198, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestExitStatusZero(t *testing.T) { conn := dial(exitStatusZeroHandler, t) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("Unable to request new session: %v", err) } defer session.Close() if err := session.Shell(); err != nil { t.Fatalf("Unable to execute command: %v", err) } err = session.Wait() if err != nil { t.Fatalf("expected nil but got %v", err) } }
explode_data.jsonl/34797
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 155 }
[ 2830, 3393, 15339, 2522, 17999, 1155, 353, 8840, 836, 8, 341, 32917, 1669, 27860, 88622, 2522, 17999, 3050, 11, 259, 340, 16867, 4534, 10421, 741, 25054, 11, 1848, 1669, 4534, 7121, 5283, 741, 743, 1848, 961, 2092, 341, 197, 3244, 30762...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestTimer_Reset(t *testing.T) { testTimer(2000, func(timer *s_timer.Timer, index int32, finish chan int64) { i := uint64(0) id := timer.AfterFunc(time.Second, func() { if atomic.LoadUint64(&i) == 0 { t.Error(index, "execute error") } }) time.AfterFunc(time.Second-time.Millisecond*precision, func() { atomic.AddUint64(&i, 1) timer.Reset(id, time.Second) }) time.AfterFunc(time.Second*3, func() { finish <- 0 }) }) }
explode_data.jsonl/7631
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 205 }
[ 2830, 3393, 10105, 67771, 1155, 353, 8840, 836, 8, 341, 18185, 10105, 7, 17, 15, 15, 15, 11, 2915, 42892, 353, 82, 16255, 41143, 11, 1922, 526, 18, 17, 11, 6248, 26023, 526, 21, 19, 8, 341, 197, 8230, 1669, 2622, 21, 19, 7, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTransformFilterByRegexp(t *testing.T) { schema := []byte(` syntax = "proto3"; package my.app; message User { string first_name = 1; string last_name = 2; } message GetUserRequest { string correlation_id = 1; string id = 2; } message GetUserResponse { User data = 1; } `) input := new(bytes.Buffer) input.Write(schema) output := new(bytes.Buffer) transformer := proto2gql.NewTransformer(output) r := regexp.MustCompile("(Request)|(Response)") transformer.SetFilter(func(typeName string) bool { return r.Match([]byte(typeName)) }) if err := transformer.Transform(input); err != nil { t.Fatal(err) } expected := ` type MyAppGetUserRequest { correlation_id: String id: String } type MyAppGetUserResponse { } ` expected = strings.TrimSpace(expected) actual := strings.TrimSpace(output.String()) if expected != actual { t.Fatalf("Expected %s to equal to %s", expected, actual) } }
explode_data.jsonl/2082
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 344 }
[ 2830, 3393, 8963, 5632, 1359, 3477, 4580, 1155, 353, 8840, 836, 8, 341, 1903, 3416, 1669, 3056, 3782, 61528, 56193, 284, 330, 15110, 18, 876, 1722, 847, 1601, 401, 1994, 2657, 341, 11357, 1156, 1269, 284, 220, 16, 280, 11357, 1537, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateAudienceAsArray(t *testing.T) { b, storage := getTestBackend(t) role := "tester" claims := map[string]interface{}{ "aud": []interface{}{"foo", "bar"}, } if err := writeRole(b, storage, role, role+".example.com", claims, map[string]interface{}{}); err != nil { t.Fatalf("%v\n", err) } resp, err := readRole(b, storage, role) if err != nil { t.Fatalf("%v\n", err) } claims, ok := resp.Data[keyClaims].(map[string]interface{}) if !ok { t.Error("failed to read response claims") } audience, ok := claims["aud"] if !ok { t.Error("no audience claim found") } if diff := deep.Equal(claims["aud"], audience); diff != nil { t.Error("failed to update audience:", diff) } }
explode_data.jsonl/1317
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 278 }
[ 2830, 3393, 4021, 52949, 1835, 2121, 1857, 1155, 353, 8840, 836, 8, 341, 2233, 11, 5819, 1669, 633, 2271, 29699, 1155, 692, 197, 5778, 1669, 330, 73358, 1837, 197, 48561, 1669, 2415, 14032, 31344, 67066, 197, 197, 1, 7880, 788, 3056, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestLeaves(t *testing.T) { graphSpec := types.GraphSpec{ Edges: []types.Edge{ {Source: "n1", Target: "n2"}, {Source: "n1", Target: "n3"}, {Source: "n2", Target: "n4"}, {Source: "n3", Target: "n4"}, }, } leaves := Leaves(&graphSpec) if !compare(leaves, []string{"n4"}) { t.Error() } graphSpec = types.GraphSpec{ Edges: []types.Edge{ {Source: "n1", Target: "n2"}, {Source: "n1", Target: "n3"}, {Source: "n2", Target: "n4"}, {Source: "n3", Target: "n4"}, {Source: "n3", Target: "n5"}, }, } leaves = Leaves(&graphSpec) if !compare(leaves, []string{"n4", "n5"}) { t.Error() } }
explode_data.jsonl/62520
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 303 }
[ 2830, 3393, 2304, 4693, 1155, 353, 8840, 836, 8, 341, 66616, 8327, 1669, 4494, 40237, 8327, 515, 197, 197, 41122, 25, 3056, 9242, 13, 11656, 515, 298, 197, 90, 3608, 25, 330, 77, 16, 497, 13483, 25, 330, 77, 17, 7115, 298, 197, 90...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRenderLocalWithValidConfig(t *testing.T) { tmpFilePath, err := createTestConfigFile(validConfig) if err != nil { t.Fatalf(fmt.Sprintf("error whilst creating temporary config file : %s", err.Error())) } defer os.Remove(tmpFilePath) ctx, _, globalFlags := NewTestContext() ctx.Command.Name = "local" globalFlags.String("config-path", tmpFilePath, "") defer os.Remove(fmt.Sprint(wd, "/tfcw.auth.tfvars")) defer os.Remove(fmt.Sprint(wd, "/tfcw.env")) exitCode, err := Render(ctx) assert.Equal(t, err, nil) assert.Equal(t, exitCode, 0) }
explode_data.jsonl/13609
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 213 }
[ 2830, 3393, 6750, 7319, 2354, 4088, 2648, 1155, 353, 8840, 836, 8, 341, 20082, 19090, 11, 1848, 1669, 1855, 2271, 2648, 1703, 41529, 2648, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 28197, 17305, 445, 841, 23856, 6825, 13340, 219...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRequestCtxString(t *testing.T) { var ctx RequestCtx s := ctx.String() expectedS := "#0000000000000000 - 0.0.0.0:0<->0.0.0.0:0 - GET http:///" if s != expectedS { t.Fatalf("unexpected ctx.String: %q. Expecting %q", s, expectedS) } ctx.Request.SetRequestURI("https://foobar.com/aaa?bb=c") s = ctx.String() expectedS = "#0000000000000000 - 0.0.0.0:0<->0.0.0.0:0 - GET https://foobar.com/aaa?bb=c" if s != expectedS { t.Fatalf("unexpected ctx.String: %q. Expecting %q", s, expectedS) } }
explode_data.jsonl/73264
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 219 }
[ 2830, 3393, 1900, 23684, 703, 1155, 353, 8840, 836, 8, 341, 2405, 5635, 6145, 23684, 271, 1903, 1669, 5635, 6431, 741, 42400, 50, 1669, 5869, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 481, 220, 15, 13, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUnmarshalMap(t *testing.T) { testToml := []byte(` a = 1 b = 2 c = 3 `) var result map[string]int err := Unmarshal(testToml, &result) if err != nil { t.Errorf("Received unexpected error: %s", err) return } expected := map[string]int{ "a": 1, "b": 2, "c": 3, } if !reflect.DeepEqual(result, expected) { t.Errorf("Bad unmarshal: expected %v, got %v", expected, result) } }
explode_data.jsonl/46338
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 187 }
[ 2830, 3393, 1806, 27121, 2227, 1155, 353, 8840, 836, 8, 341, 18185, 24732, 75, 1669, 3056, 3782, 61528, 197, 11323, 284, 220, 16, 198, 197, 2233, 284, 220, 17, 198, 197, 1444, 284, 220, 18, 198, 197, 197, 24183, 2405, 1102, 2415, 14...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCodec0002InteropVersion_Decode(t *testing.T) { s := "abc" ut := Tag0002InteropVersion{s} encoded := []byte(s) rawValueOffset := encoded valueContext := exifcommon.NewValueContext( "", 0, uint32(len(encoded)), 0, rawValueOffset, nil, exifcommon.TypeUndefined, exifcommon.TestDefaultByteOrder) codec := Codec0002InteropVersion{} value, err := codec.Decode(valueContext) log.PanicIf(err) if reflect.DeepEqual(value, ut) != true { t.Fatalf("Decoded value not correct: %s\n", value) } }
explode_data.jsonl/81338
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 207 }
[ 2830, 3393, 36913, 15, 15, 15, 17, 94000, 5637, 78668, 534, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 330, 13683, 698, 197, 332, 1669, 12353, 15, 15, 15, 17, 94000, 5637, 84386, 630, 197, 19329, 1669, 3056, 3782, 1141, 692, 76559, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestValidateBasic(t *testing.T) { validators := createValidators(t) hi := HistoricalInfo{ Header: header, } err := ValidateBasic(hi) require.Error(t, err, "ValidateBasic passed on nil ValSet") // Ensure validators are not sorted for sort.IsSorted(Validators(validators)) { rand.Shuffle(len(validators), func(i, j int) { it := validators[i] validators[i] = validators[j] validators[j] = it }) } hi = HistoricalInfo{ Header: header, Valset: validators, } err = ValidateBasic(hi) require.Error(t, err, "ValidateBasic passed on unsorted ValSet") hi = NewHistoricalInfo(header, validators) err = ValidateBasic(hi) require.NoError(t, err, "ValidateBasic failed on valid HistoricalInfo") }
explode_data.jsonl/35592
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 266 }
[ 2830, 3393, 17926, 15944, 1155, 353, 8840, 836, 8, 341, 56322, 2973, 1669, 1855, 31748, 1155, 340, 197, 6023, 1669, 40043, 1731, 515, 197, 197, 4047, 25, 4247, 345, 197, 532, 9859, 1669, 23282, 15944, 3203, 72, 340, 17957, 6141, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMergeEvalResults(t *testing.T) { availableRules := generateDummyRuleMatrices() cases := []struct { name string set1 map[shared.ClientID]shared.EvaluationReturn set2 map[shared.ClientID]shared.EvaluationReturn expect map[shared.ClientID]shared.EvaluationReturn }{ { name: "Simple merge test", set1: map[shared.ClientID]shared.EvaluationReturn{ shared.Teams["Team1"]: { Rules: []rules.RuleMatrix{ availableRules[0], availableRules[1], }, Evaluations: []bool{true, false}, }, }, set2: map[shared.ClientID]shared.EvaluationReturn{}, expect: map[shared.ClientID]shared.EvaluationReturn{ shared.Teams["Team1"]: { Rules: []rules.RuleMatrix{ availableRules[0], availableRules[1], }, Evaluations: []bool{true, false}, }, }, }, { name: "Complex Merge Test", set1: map[shared.ClientID]shared.EvaluationReturn{ shared.Teams["Team1"]: { Rules: []rules.RuleMatrix{ availableRules[0], availableRules[1], }, Evaluations: []bool{true, false}, }, }, set2: map[shared.ClientID]shared.EvaluationReturn{ shared.Teams["Team2"]: { Rules: []rules.RuleMatrix{ availableRules[2], availableRules[3], }, Evaluations: []bool{true, false}, }, }, expect: map[shared.ClientID]shared.EvaluationReturn{ shared.Teams["Team1"]: { Rules: []rules.RuleMatrix{ availableRules[0], availableRules[1], }, Evaluations: []bool{true, false}, }, shared.Teams["Team2"]: { Rules: []rules.RuleMatrix{ availableRules[2], availableRules[3], }, Evaluations: []bool{true, false}, }, }, }, { name: "Patchwork merge test", set1: map[shared.ClientID]shared.EvaluationReturn{ shared.Teams["Team1"]: { Rules: []rules.RuleMatrix{ availableRules[0], availableRules[1], }, Evaluations: []bool{true, false}, }, }, set2: map[shared.ClientID]shared.EvaluationReturn{ shared.Teams["Team1"]: { Rules: []rules.RuleMatrix{ availableRules[0], availableRules[1], }, Evaluations: []bool{true, false}, }, shared.Teams["Team2"]: { Rules: []rules.RuleMatrix{ availableRules[2], availableRules[3], }, Evaluations: []bool{true, false}, }, }, expect: map[shared.ClientID]shared.EvaluationReturn{ shared.Teams["Team1"]: { Rules: []rules.RuleMatrix{ availableRules[0], availableRules[1], availableRules[0], availableRules[1], }, Evaluations: []bool{true, false, true, false}, }, shared.Teams["Team2"]: { Rules: []rules.RuleMatrix{ availableRules[2], availableRules[3], }, Evaluations: []bool{true, false}, }, }, }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { res := mergeEvaluationReturn(tc.set1, tc.set2) if !reflect.DeepEqual(res, tc.expect) { t.Errorf("Expected %v got %v", tc.expect, res) } }) } }
explode_data.jsonl/74049
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1446 }
[ 2830, 3393, 52096, 54469, 9801, 1155, 353, 8840, 836, 8, 341, 197, 10334, 26008, 1669, 6923, 43344, 11337, 11575, 24419, 741, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 256, 914, 198, 197, 8196, 16, 256, 2415, 58, 6100, 11716, 915, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestBackground(t *testing.T) { if BackgroundBlue.String() == Blue.Background().String() { //fmt.Println("BLUE IS BLUE") } else { fmt.Println("BLUE BG IS NOT BLUE BG") fmt.Println(BackgroundBlue.String() + "FIRST" + Stop()) fmt.Println(Blue.Background().String() + "SECOND" + Stop()) t.Fail() } }
explode_data.jsonl/39708
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 123 }
[ 2830, 3393, 8706, 1155, 353, 8840, 836, 8, 341, 743, 24800, 10331, 6431, 368, 621, 8697, 19047, 1005, 703, 368, 341, 197, 197, 322, 12501, 12419, 445, 64330, 3424, 55892, 1138, 197, 92, 770, 341, 197, 11009, 12419, 445, 64330, 43011, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestProcessCOMM(t *testing.T) { p := process.Process{ Status: proc.Status{ Name: "foo-bar", }, CmdLine: []string{""}, } ctx := new(psContext) comm, err := processCOMM(&p, ctx) assert.Nil(t, err) assert.Equal(t, "[foo-bar]", comm) p = process.Process{ CmdLine: []string{"/usr/bin/foo-bar"}, } comm, err = processCOMM(&p, ctx) assert.Nil(t, err) assert.Equal(t, "foo-bar", comm) }
explode_data.jsonl/53395
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 183 }
[ 2830, 3393, 7423, 55554, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 1882, 29012, 515, 197, 58321, 25, 13674, 10538, 515, 298, 21297, 25, 330, 7975, 15773, 756, 197, 197, 1583, 197, 6258, 2277, 2460, 25, 3056, 917, 90, 3014, 1583, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetOnlyIfCachedHit(t *testing.T) { resetTest() { req, err := http.NewRequest("GET", s.server.URL, nil) if err != nil { t.Fatal(err) } resp, err := s.client.Do(req) if err != nil { t.Fatal(err) } defer resp.Body.Close() if resp.Header.Get(XFromCache) != "" { t.Fatal("XFromCache header isn't blank") } _, err = ioutil.ReadAll(resp.Body) if err != nil { t.Fatal(err) } } { req, err := http.NewRequest("GET", s.server.URL, nil) if err != nil { t.Fatal(err) } req.Header.Add("cache-control", "only-if-cached") resp, err := s.client.Do(req) if err != nil { t.Fatal(err) } defer resp.Body.Close() if resp.Header.Get(XFromCache) != "1" { t.Fatalf(`XFromCache header isn't "1": %v`, resp.Header.Get(XFromCache)) } if resp.StatusCode != http.StatusOK { t.Fatalf("response status code isn't 200 OK: %v", resp.StatusCode) } } }
explode_data.jsonl/77615
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 421 }
[ 2830, 3393, 1949, 7308, 2679, 70293, 19498, 1155, 353, 8840, 836, 8, 341, 70343, 2271, 741, 197, 515, 197, 24395, 11, 1848, 1669, 1758, 75274, 445, 3806, 497, 274, 12638, 20893, 11, 2092, 340, 197, 743, 1848, 961, 2092, 341, 298, 3244...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestNewFlowToInstance(t *testing.T) { json := `{ "name": "测试to flow", "start": { "params":{ "name":{"type":"string","value":"menghui"}, "age":{"type":"number","value":41} }, "variables": { "var_a": { "type": "string", "value": "test var" }, "var_b": { "type": "number", "value": 12 } }, "flow": [ { "gate": "to", "target": [{ "expressions" : ["var_b=var_b+10"], "style" : "stdout", "flow" : [ { "gate" : "to", "target": [{ "style" : "stdout", "expressions" : ["var_a='next activity'"] }] } ] }] } ]}}` fl, err := NewFlowInstanceFromJSON(json) if err != nil { fmt.Println(err) return } r := map[string]interface{}{ "name": "menghui", } err = fl.Execute(r) if err != nil { fmt.Println(err) t.Fail() } }
explode_data.jsonl/5939
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 453 }
[ 2830, 3393, 3564, 18878, 1249, 2523, 1155, 353, 8840, 836, 8, 341, 30847, 1669, 1565, 515, 197, 31486, 788, 330, 81705, 983, 6396, 756, 197, 1, 2468, 788, 341, 197, 1, 3519, 12602, 197, 197, 31486, 22317, 1313, 3252, 917, 2198, 957, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestIncreasePullTPSGetPullTPS(t *testing.T) { ShutDownStatis() tests := []struct { RT int ExpectSum int64 }{ {1, 0}, {1, 1}, {1, 2}, {1, 3}, {1, 4}, {1, 5}, {1, 6}, {1, 6}, } for _, tt := range tests { increasePullTPS("rocketmq", "default", tt.RT) topicAndGroupPullTPS.samplingInSeconds() snapshot := getPullTPS("rocketmq", "default") if snapshot.sum != tt.ExpectSum { t.Errorf("wrong Pull TPS sum. want=%d, got=%d", tt.ExpectSum, snapshot.sum) } } }
explode_data.jsonl/1964
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 243 }
[ 2830, 3393, 69556, 36068, 4239, 50, 1949, 36068, 4239, 50, 1155, 353, 8840, 836, 8, 341, 197, 2016, 332, 4454, 623, 3605, 741, 78216, 1669, 3056, 1235, 341, 197, 11826, 286, 526, 198, 197, 35911, 9190, 526, 21, 19, 198, 197, 59403, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFillTagWithENIIndex(t *testing.T) { type args struct { tags map[string]string index int } tests := []struct { name string args args want map[string]string }{ { name: "index 1", args: args{ tags: map[string]string{"key": "val"}, index: 1, }, want: map[string]string{"key": "val", eniIndexTagKey: "1"}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := FillTagWithENIIndex(tt.args.tags, tt.args.index); !reflect.DeepEqual(got, tt.want) { t.Errorf("FillTagWithENIIndex() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/59556
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 279 }
[ 2830, 3393, 14449, 5668, 2354, 953, 40, 1552, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 3244, 2032, 220, 2415, 14032, 30953, 198, 197, 26327, 526, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUpdateContext(t *testing.T) { visitor := createVisitor("test", nil) context := model.Context{} context["test_string"] = "123" context["test_number"] = 36.5 context["test_bool"] = true context["test_wrong"] = errors.New("wrong type") err := visitor.UpdateContext(context) if err == nil { t.Error("Visitor with wrong context variable should raise an error") } delete(context, "test_wrong") err = visitor.UpdateContext(context) if err != nil { t.Errorf("Visitor update context raised an error : %v", err) return } if visitor.Context["test_string"] != "123" { t.Errorf("Visitor update context string failed. Expected %s, got %s", "123", visitor.Context["test_string"]) } if visitor.Context["test_number"] != 36.5 { t.Errorf("Visitor update context string failed. Expected %f, got %v", 36.5, visitor.Context["test_number"]) } if visitor.Context["test_bool"] != true { t.Errorf("Visitor update context string failed. Expected %v, got %v", true, visitor.Context["test_bool"]) } }
explode_data.jsonl/12285
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 345 }
[ 2830, 3393, 4289, 1972, 1155, 353, 8840, 836, 8, 341, 197, 39985, 1669, 1855, 16796, 445, 1944, 497, 2092, 692, 28413, 1669, 1614, 9328, 16094, 28413, 1183, 1944, 3904, 1341, 284, 330, 16, 17, 18, 698, 28413, 1183, 1944, 5500, 1341, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestGetProfileImage(t *testing.T) { th := Setup().InitBasic().InitSystemAdmin() defer th.TearDown() Client := th.Client user := th.BasicUser data, resp := Client.GetProfileImage(user.Id, "") CheckNoError(t, resp) if len(data) == 0 { t.Fatal("Should not be empty") } _, resp = Client.GetProfileImage(user.Id, resp.Etag) if resp.StatusCode == http.StatusNotModified { t.Fatal("Shouldn't have hit etag") } _, resp = Client.GetProfileImage("junk", "") CheckBadRequestStatus(t, resp) _, resp = Client.GetProfileImage(model.NewId(), "") CheckNotFoundStatus(t, resp) Client.Logout() _, resp = Client.GetProfileImage(user.Id, "") CheckUnauthorizedStatus(t, resp) _, resp = th.SystemAdminClient.GetProfileImage(user.Id, "") CheckNoError(t, resp) info := &model.FileInfo{Path: "/users/" + user.Id + "/profile.png"} if err := th.cleanupTestFile(info); err != nil { t.Fatal(err) } }
explode_data.jsonl/21525
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 346 }
[ 2830, 3393, 1949, 8526, 1906, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1005, 3803, 15944, 1005, 3803, 2320, 7210, 741, 16867, 270, 836, 682, 4454, 741, 71724, 1669, 270, 11716, 198, 19060, 1669, 270, 48868, 1474, 271, 8924, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestNew(t *testing.T) { // Test Setup db := NewTestDatabase() cache := NewTestCache() // Should be able to create a new State with a Database and No Cache state, err := manager.New(db, nil) assert.Nil(t, err) assert.NotNil(t, state) // Should be able to create a new state with Cache and no Database state, err = manager.New(nil, cache) assert.Nil(t, err) assert.NotNil(t, state) // Should be able to create a new State with a Database and Cache state, err = manager.New(db, cache) assert.Nil(t, err) assert.NotNil(t, state) // Should NOT be able to create a new state without a Database or a Cache state, err = manager.New(nil, nil) assert.NotNil(t, err) assert.Nil(t, state) }
explode_data.jsonl/58865
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 249 }
[ 2830, 3393, 3564, 1155, 353, 8840, 836, 8, 341, 197, 322, 3393, 18626, 198, 20939, 1669, 1532, 2271, 5988, 741, 52680, 1669, 1532, 2271, 8233, 2822, 197, 322, 12260, 387, 2952, 311, 1855, 264, 501, 3234, 448, 264, 9994, 323, 2308, 194...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClock_AddMixJob(t *testing.T) { var ( myClock = Default().Reset() counter1 int counter2 int ) f1 := func() { counter1++ } f2 := func() { counter2++ } _, inserted1 := myClock.AddJobWithInterval(time.Millisecond*500, f1) _, inserted2 := myClock.AddJobRepeat(time.Millisecond*300, 0, f2) if !inserted1 && !inserted2 { t.Error("add repeat job failure") } time.Sleep(time.Second * 2) if counter1 != 1 || counter2 < 5 { t.Errorf("执行次数异常!,一次性任务执行了:%v,重复性任务执行了%v\n", counter1, counter2) } }
explode_data.jsonl/2022
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 266 }
[ 2830, 3393, 26104, 21346, 58083, 12245, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 13624, 26104, 220, 284, 7899, 1005, 14828, 741, 197, 58261, 16, 526, 198, 197, 58261, 17, 526, 198, 197, 340, 1166, 16, 1669, 2915, 368, 341, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestImports(t *testing.T) { t.Skip("Import test skipped for forked codebase") // testenv.MustHaveGoRun(t) // Replace testenv.GoToolPath(t) with "go" for use outside of Go repo. if err := exec.Command("go", "run", "x509_test_import.go").Run(); err != nil { t.Errorf("failed to run x509_test_import.go: %s", err) } }
explode_data.jsonl/68000
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 125 }
[ 2830, 3393, 31250, 1155, 353, 8840, 836, 8, 341, 3244, 57776, 445, 11511, 1273, 35157, 369, 22435, 291, 2038, 3152, 1138, 197, 322, 18185, 3160, 50463, 12116, 10850, 6727, 1155, 692, 197, 322, 29558, 1273, 3160, 67131, 7740, 1820, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDependencySort_DeadEnd(t *testing.T) { ks := []HelmRelease{ { ObjectMeta: v1.ObjectMeta{Name: "backend"}, Spec: HelmReleaseSpec{DependsOn: []string{"common"}}, }, { ObjectMeta: v1.ObjectMeta{Name: "frontend"}, Spec: HelmReleaseSpec{DependsOn: []string{"infra"}}, }, { ObjectMeta: v1.ObjectMeta{Name: "common"}, }, } got, err := DependencySort(ks) if err != nil { t.Errorf("DependencySort() error = %v", err) return } if len(got) != len(ks) { t.Errorf("DependencySort() len = %v, want %v", len(got), len(ks)) } }
explode_data.jsonl/43018
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 256 }
[ 2830, 3393, 36387, 10231, 1557, 3149, 3727, 1155, 353, 8840, 836, 8, 341, 197, 2787, 1669, 3056, 39, 23162, 16077, 515, 197, 197, 515, 298, 23816, 12175, 25, 348, 16, 80222, 63121, 25, 330, 20942, 7115, 298, 7568, 992, 25, 981, 62042,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBadDriver(t *testing.T) { Register("bad", badDriver{}) db, err := Open("bad", "ignored") if err != nil { t.Fatal(err) } defer func() { if r := recover(); r == nil { t.Error("expected panic") } else { if want := "badConn.Exec"; r.(string) != want { t.Errorf("panic was %v, expected %v", r, want) } } }() defer db.Close() db.Exec("ignored") }
explode_data.jsonl/16037
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 167 }
[ 2830, 3393, 17082, 11349, 1155, 353, 8840, 836, 8, 341, 79096, 445, 13855, 497, 3873, 11349, 37790, 20939, 11, 1848, 1669, 5264, 445, 13855, 497, 330, 58471, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 16867,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInvalidGetInboundSMSParams(t *testing.T) { tests := []struct { name string instance GetInboundSMSParams }{ { name: "low limit", instance: GetInboundSMSParams{Limit: -1}, }, { name: "high limit", instance: GetInboundSMSParams{Limit: 2000}, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { err := test.instance.Validate() require.Error(t, err) }) } }
explode_data.jsonl/13711
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 196 }
[ 2830, 3393, 7928, 1949, 641, 10891, 65565, 4870, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 56256, 2126, 641, 10891, 65565, 4870, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 257, 33...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBuildOrder(t *testing.T) { testSuite := []struct { title string variables map[string]*v1.DashboardVariable current map[string]string previous map[string]string result []Group }{ { title: "no variable", }, { title: "constant variable, no dep", variables: map[string]*v1.DashboardVariable{ "myVariable": { Kind: v1.KindConstantVariable, Parameter: &v1.ConstantVariableParameter{ Values: []string{"myConstant"}, }, }, }, result: []Group{{Variables: []string{"myVariable"}}}, }, { title: "multiple usage of same variable", variables: map[string]*v1.DashboardVariable{ "myVariable": { Kind: v1.KindPromQLQueryVariable, Parameter: &v1.PromQLQueryVariableParameter{ Expr: "sum by($doe, $bar) (rate($foo{label='$bar'}))", }, }, "foo": { Kind: v1.KindPromQLQueryVariable, Parameter: &v1.PromQLQueryVariableParameter{ Expr: "test", }, }, "bar": { Kind: v1.KindPromQLQueryVariable, Parameter: &v1.PromQLQueryVariableParameter{ Expr: "vector($foo)", }, }, "doe": { Kind: v1.KindConstantVariable, Parameter: &v1.ConstantVariableParameter{ Values: []string{"myConstant"}, }, }, }, result: []Group{ {Variables: []string{"doe", "foo"}}, {Variables: []string{"bar"}}, {Variables: []string{"myVariable"}}, }, }, { title: "multiple usage of same variable with foo variable known", variables: map[string]*v1.DashboardVariable{ "myVariable": { Kind: v1.KindPromQLQueryVariable, Parameter: &v1.PromQLQueryVariableParameter{ Expr: "sum by($doe, $bar) (rate($foo{label='$bar'}))", }, }, "foo": { Kind: v1.KindPromQLQueryVariable, Parameter: &v1.PromQLQueryVariableParameter{ Expr: "test", }, }, "bar": { Kind: v1.KindPromQLQueryVariable, Parameter: &v1.PromQLQueryVariableParameter{ Expr: "vector($foo)", }, }, "doe": { Kind: v1.KindConstantVariable, Parameter: &v1.ConstantVariableParameter{ Values: []string{"myConstant"}, }, }, }, current: map[string]string{ "foo": "value", }, result: []Group{ {Variables: []string{"bar", "doe"}}, {Variables: []string{"myVariable"}}, }, }, { title: "all variable with an already known value", variables: map[string]*v1.DashboardVariable{ "labelName": { Kind: v1.KindLabelNamesQueryVariable, Hide: false, Parameter: &v1.LabelNamesQueryVariableParameter{ Matchers: []string{ "up", }, CapturingRegexp: (*v1.CapturingRegexp)(regexp.MustCompile(`(.*)`)), }, }, "labelValue": { Kind: v1.KindLabelValuesQueryVariable, Hide: false, Parameter: &v1.LabelValuesQueryVariableParameter{ LabelName: "$labelName", Matchers: []string{ "up", }, CapturingRegexp: (*v1.CapturingRegexp)(regexp.MustCompile(`(.*)`)), }, }, }, current: map[string]string{ "labelName": "job", "labelValue": "value1", }, previous: map[string]string{ "labelName": "job", "labelValue": "value2", }, result: []Group{}, }, } for _, test := range testSuite { t.Run(test.title, func(t *testing.T) { groups, err := BuildOrder(test.variables, test.current, test.previous) assert.NoError(t, err) assert.Equal(t, len(test.result), len(groups)) for i := 0; i < len(groups); i++ { assert.ElementsMatch(t, test.result[i].Variables, groups[i].Variables) } }) } }
explode_data.jsonl/34534
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1668 }
[ 2830, 3393, 11066, 4431, 1155, 353, 8840, 836, 8, 341, 18185, 28000, 1669, 3056, 1235, 341, 197, 24751, 257, 914, 198, 197, 2405, 2156, 82, 2415, 14032, 8465, 85, 16, 909, 7349, 7827, 198, 197, 20121, 256, 2415, 14032, 30953, 198, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSigningSession(t *testing.T) { id := irma.NewAttributeTypeIdentifier("irma-demo.RU.studentCard.studentID") request := getSigningRequest(id) sessionHelper(t, request, "signature", nil) }
explode_data.jsonl/69986
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 93358, 5283, 1155, 353, 8840, 836, 8, 341, 15710, 1669, 6216, 1728, 7121, 3907, 929, 8714, 445, 44011, 58893, 2013, 52, 40113, 5770, 40113, 915, 1138, 23555, 1669, 633, 93358, 1900, 3724, 340, 25054, 5511, 1155, 11, 1681, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestAndroidCamputFile(t *testing.T) { w := test.GetWorld(t) // UploadThread.java sets: // CAMLI_AUTH (set by w.CmdWithEnv) // CAMLI_TRUSTED_CERT (not needed) // CAMLI_CACHE_DIR // CAMPUT_ANDROID_OUTPUT=1 cacheDir, clean := mustTempDir(t) defer clean() env := append(os.Environ(), "CAMPUT_ANDROID_OUTPUT=1", "CAMLI_CACHE_DIR="+cacheDir, ) cmd := w.CmdWithEnv("pk-put", env, "--server="+w.ServerBaseURL(), "file", "-stdinargs", "-vivify") cmd.Stderr = os.Stderr in, err := cmd.StdinPipe() if err != nil { t.Fatal(err) } out, err := cmd.StdoutPipe() if err != nil { t.Fatal(err) } if err := w.Ping(); err != nil { t.Fatal(err) } if err := cmd.Start(); err != nil { t.Fatal(err) } defer cmd.Process.Kill() srcDir, clean := mustTempDir(t) defer clean() file1 := filepath.Join(srcDir, "file1.txt") mustWriteFile(t, file1, "contents 1") file2 := filepath.Join(srcDir, "file2.txt") mustWriteFile(t, file2, "contents 2 longer length") go func() { fmt.Fprintf(in, "%s\n", file1) fmt.Fprintf(in, "%s\n", file2) }() waitc := make(chan error) go func() { sc := bufio.NewScanner(out) fileUploaded := 0 for sc.Scan() { t.Logf("Got: %q", sc.Text()) f := strings.Fields(sc.Text()) if len(f) == 0 { t.Logf("empty text?") continue } if f[0] == "FILE_UPLOADED" { fileUploaded++ if fileUploaded == 2 { break } } } in.Close() if err := sc.Err(); err != nil { t.Error(err) } }() defer cmd.Process.Kill() go func() { waitc <- cmd.Wait() }() select { case <-time.After(5 * time.Second): t.Fatal("timeout waiting for pk-put to end") case err := <-waitc: if err != nil { t.Errorf("pk-put exited uncleanly: %v", err) } } }
explode_data.jsonl/6980
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 844 }
[ 2830, 3393, 21831, 25406, 628, 1703, 1155, 353, 8840, 836, 8, 341, 6692, 1669, 1273, 2234, 10134, 1155, 340, 197, 322, 24996, 6855, 10848, 7289, 510, 197, 322, 256, 28297, 18537, 22675, 320, 746, 553, 289, 64512, 2354, 14359, 340, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestXPubSub(t *testing.T) { var ( topics = []string{"", "MSG", "msg"} wantNumMsgs = []int{3, 1, 1} msg0 = zmq4.NewMsgString("anything") msg1 = zmq4.NewMsgString("MSG 1") msg2 = zmq4.NewMsgString("msg 2") msgs = [][]zmq4.Msg{ 0: {msg0, msg1, msg2}, 1: {msg1}, 2: {msg2}, } ) for i := range xpubsubs { tc := xpubsubs[i] t.Run(tc.name, func(t *testing.T) { defer tc.xpub.Close() defer tc.sub0.Close() defer tc.sub1.Close() defer tc.sub2.Close() ep := tc.endpoint cleanUp(ep) if tc.skip { t.Skipf(tc.name) } // t.Parallel() ctx, timeout := context.WithTimeout(context.Background(), 20*time.Second) defer timeout() nmsgs := []int{0, 0, 0} subs := []zmq4.Socket{tc.sub0, tc.sub1, tc.sub2} var wg1 sync.WaitGroup var wg2 sync.WaitGroup wg1.Add(len(subs)) wg2.Add(len(subs)) grp, ctx := errgroup.WithContext(ctx) grp.Go(func() error { err := tc.xpub.Listen(ep) if err != nil { return fmt.Errorf("could not listen: %w", err) } if addr := tc.xpub.Addr(); addr == nil { return fmt.Errorf("listener with nil Addr") } wg1.Wait() wg2.Wait() time.Sleep(1 * time.Second) if sck, ok := tc.xpub.(zmq4.Topics); ok { got := sck.Topics() if !reflect.DeepEqual(got, topics) { t.Fatalf("invalid topics.\ngot= %q\nwant=%q", got, topics) } } for _, msg := range msgs[0] { err = tc.xpub.Send(msg) if err != nil { return fmt.Errorf("could not send message %v: %w", msg, err) } } return err }) for isub := range subs { func(isub int, sub zmq4.Socket) { grp.Go(func() error { var err error err = sub.Dial(ep) if err != nil { return fmt.Errorf("could not dial: %w", err) } if addr := sub.Addr(); addr != nil { return fmt.Errorf("dialer with non-nil Addr") } wg1.Done() wg1.Wait() err = sub.SetOption(zmq4.OptionSubscribe, topics[isub]) if err != nil { return fmt.Errorf("could not subscribe to topic %q: %w", topics[isub], err) } wg2.Done() wg2.Wait() msgs := msgs[isub] for imsg, want := range msgs { msg, err := sub.Recv() if err != nil { return fmt.Errorf("could not recv message %v: %w", want, err) } if !reflect.DeepEqual(msg, want) { return fmt.Errorf("sub[%d][msg=%d]: got = %v, want= %v", isub, imsg, msg, want) } nmsgs[isub]++ } return err }) }(isub, subs[isub]) } if err := grp.Wait(); err != nil { t.Fatalf("error: %+v", err) } if err := ctx.Err(); err != nil && err != context.Canceled { t.Fatalf("error: %+v", err) } for i, want := range wantNumMsgs { if want != nmsgs[i] { t.Errorf("xsub[%d]: got %d messages, want %d msgs=%v", i, nmsgs[i], want, nmsgs) } } }) } }
explode_data.jsonl/77146
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1561 }
[ 2830, 3393, 27375, 392, 3136, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 42118, 1211, 414, 284, 3056, 917, 4913, 497, 330, 19575, 497, 330, 3236, 16707, 197, 50780, 4651, 6611, 82, 284, 3056, 396, 90, 18, 11, 220, 16, 11, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestDuplicateRequest(t *testing.T) { events := []*cdcpb.ChangeDataEvent{ {Events: []*cdcpb.Event{ { RegionId: 3, RequestId: currentRequestID(), Event: &cdcpb.Event_Error{ Error: &cdcpb.Error{ DuplicateRequest: &cdcpb.DuplicateRequest{RegionId: 3}, }, }, }, }}, } testEventCommitTsFallback(t, events) }
explode_data.jsonl/32878
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 170 }
[ 2830, 3393, 53979, 1900, 1155, 353, 8840, 836, 8, 341, 90873, 1669, 29838, 4385, 4672, 65, 39348, 1043, 1556, 515, 197, 197, 90, 7900, 25, 29838, 4385, 4672, 65, 6904, 515, 298, 197, 515, 571, 197, 14091, 764, 25, 220, 220, 18, 345,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetCallbacks(t *testing.T) { if testing.Short() { t.Skip("skip test in short mode") } sess := newSessionForTesting(t) defer sess.Close() builder, err := sess.NewExperimentBuilder("example") if err != nil { t.Fatal(err) } if err := builder.SetOptionInt("SleepTime", 0); err != nil { t.Fatal(err) } register := &registerCallbacksCalled{} builder.SetCallbacks(register) if _, err := builder.NewExperiment().Measure(""); err != nil { t.Fatal(err) } if register.onProgressCalled == false { t.Fatal("OnProgress not called") } }
explode_data.jsonl/26315
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 212 }
[ 2830, 3393, 1649, 44461, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 445, 20599, 1273, 304, 2805, 3856, 1138, 197, 532, 1903, 433, 1669, 501, 5283, 2461, 16451, 1155, 340, 16867, 21875, 10421, 741, 44546,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestHandlesInvalidSTHPollination(t *testing.T) { s := createAndOpenStorage() defer closeAndDeleteStorage(s) v := mustCreateSignatureVerifiers(t) h := newHandlerWithClock(s, v, testStuckClock(stuckClockTimeMillis)) rr := httptest.NewRecorder() req, err := http.NewRequest("POST", "/.well-known/ct/v1/sth-pollination", strings.NewReader("blahblah,,}{")) if err != nil { t.Fatalf("Failed to create request: %v", err) } h.HandleSTHPollination(rr, req) assert.Equal(t, http.StatusBadRequest, rr.Code) }
explode_data.jsonl/80085
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 194 }
[ 2830, 3393, 65928, 7928, 784, 6610, 965, 2554, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 1855, 3036, 5002, 5793, 741, 16867, 3265, 3036, 6435, 5793, 1141, 340, 5195, 1669, 1969, 4021, 25088, 10141, 11836, 1155, 340, 9598, 1669, 501, 305...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_RepoResolveMetadataIDs(t *testing.T) { http := &httpmock.Registry{} client := NewClient(ReplaceTripper(http)) repo, _ := ghrepo.FromFullName("OWNER/REPO") input := RepoResolveInput{ Assignees: []string{"monalisa", "hubot"}, Reviewers: []string{"monalisa", "octocat", "OWNER/core", "/robots"}, Labels: []string{"bug", "help wanted"}, } expectedQuery := `{ u000: user(login:"monalisa"){id,login} u001: user(login:"hubot"){id,login} u002: user(login:"octocat"){id,login} repository(owner:"OWNER",name:"REPO"){ l000: label(name:"bug"){id,name} l001: label(name:"help wanted"){id,name} } organization(login:"OWNER"){ t000: team(slug:"core"){id,slug} t001: team(slug:"robots"){id,slug} } } ` responseJSON := ` { "data": { "u000": { "login": "MonaLisa", "id": "MONAID" }, "u001": { "login": "hubot", "id": "HUBOTID" }, "u002": { "login": "octocat", "id": "OCTOID" }, "repository": { "l000": { "name": "bug", "id": "BUGID" }, "l001": { "name": "Help Wanted", "id": "HELPID" } }, "organization": { "t000": { "slug": "core", "id": "COREID" }, "t001": { "slug": "Robots", "id": "ROBOTID" } } } } ` http.Register( httpmock.MatchAny, httpmock.GraphQLQuery(responseJSON, func(q string, _ map[string]interface{}) { if q != expectedQuery { t.Errorf("expected query %q, got %q", expectedQuery, q) } })) result, err := RepoResolveMetadataIDs(client, repo, input) if err != nil { t.Fatalf("unexpected error: %v", err) } expectedMemberIDs := []string{"MONAID", "HUBOTID", "OCTOID"} memberIDs, err := result.MembersToIDs([]string{"monalisa", "hubot", "octocat"}) if err != nil { t.Errorf("error resolving members: %v", err) } if !sliceEqual(memberIDs, expectedMemberIDs) { t.Errorf("expected members %v, got %v", expectedMemberIDs, memberIDs) } expectedTeamIDs := []string{"COREID", "ROBOTID"} teamIDs, err := result.TeamsToIDs([]string{"/core", "/robots"}) if err != nil { t.Errorf("error resolving teams: %v", err) } if !sliceEqual(teamIDs, expectedTeamIDs) { t.Errorf("expected members %v, got %v", expectedTeamIDs, teamIDs) } expectedLabelIDs := []string{"BUGID", "HELPID"} labelIDs, err := result.LabelsToIDs([]string{"bug", "help wanted"}) if err != nil { t.Errorf("error resolving labels: %v", err) } if !sliceEqual(labelIDs, expectedLabelIDs) { t.Errorf("expected members %v, got %v", expectedLabelIDs, labelIDs) } }
explode_data.jsonl/74334
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1010 }
[ 2830, 3393, 62, 25243, 56808, 14610, 30466, 1155, 353, 8840, 836, 8, 341, 28080, 1669, 609, 1254, 16712, 89142, 16094, 25291, 1669, 1532, 2959, 7, 23107, 21884, 6922, 19886, 4390, 17200, 5368, 11, 716, 1669, 36124, 23476, 11439, 36217, 44...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAndNot_DifferentBitmapSizes(t *testing.T) { var a, b, c, d Bitmap for i := uint32(0); i < 100; i += 2 { a.Set(i) c.Set(i) } for i := uint32(0); i < 200; i += 2 { b.Set(i) d.Set(i) } a.AndNot(b) d.AndNot(c) for i := uint32(0); i < 100; i++ { assert.Equal(t, false, a.Contains(i), "for "+strconv.Itoa(int(i))) assert.Equal(t, false, d.Contains(i), "for "+strconv.Itoa(int(i))) } for i := uint32(100); i < 200; i++ { assert.Equal(t, b.Contains(i), d.Contains(i), "for "+strconv.Itoa(int(i))) } assert.Equal(t, 0, a.Count()) assert.Equal(t, 50, d.Count()) }
explode_data.jsonl/39886
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 299 }
[ 2830, 3393, 3036, 2623, 1557, 18277, 16773, 34930, 1155, 353, 8840, 836, 8, 341, 2405, 264, 11, 293, 11, 272, 11, 294, 17533, 198, 2023, 600, 1669, 2622, 18, 17, 7, 15, 1215, 600, 366, 220, 16, 15, 15, 26, 600, 1421, 220, 17, 34...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestMmark(t *testing.T) { testData := readTestFile(t, "mmark.test") ext := parser.CommonExtensions | parser.Attributes | parser.OrderedListStart | parser.SuperSubscript | parser.Mmark for _, td := range testData { p := parser.NewWithExtensions(ext) got := ToHTML(td.md, p, nil) want := td.html if bytes.Compare(got, want) != 0 { t.Errorf("want (%d bytes) %s, got (%d bytes) %s, for input %q", len(want), want, len(got), got, td.md) } } }
explode_data.jsonl/74747
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 179 }
[ 2830, 3393, 44, 3987, 1155, 353, 8840, 836, 8, 341, 18185, 1043, 1669, 1349, 2271, 1703, 1155, 11, 330, 3821, 838, 5958, 1138, 95450, 1669, 6729, 16010, 31282, 760, 6729, 31384, 760, 6729, 19664, 38301, 3479, 760, 6729, 808, 3466, 3136,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3