text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestBalances(t *testing.T) { client := v1.New(&v1.Config{ Key: os.Getenv("BFKEY"), Secret: os.Getenv("BFSECRET"), }) res, err := client.Balances(list.NewForBalances( "JPY", 500, 0, 0, )) assert.NoError(t, err) for i, v := range *res { fmt.Printf("%d %+v\n", i, v) } fmt.Printf("%+v %+v\n", client.Limit.Remain(true), client.Limit.Remain(false)) }
explode_data.jsonl/41221
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 182 }
[ 2830, 3393, 37889, 3020, 1155, 353, 8840, 836, 8, 341, 25291, 1669, 348, 16, 7121, 2099, 85, 16, 10753, 515, 197, 55242, 25, 262, 2643, 64883, 445, 19883, 4784, 4461, 197, 7568, 50856, 25, 2643, 64883, 445, 19883, 65310, 4461, 197, 35...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPqNotconcurrent(t *testing.T) { ctx, end := trace.WithTaskFromStack(context.Background()) defer end() var ctr uint32 q := newStepQueue() var wg sync.WaitGroup wg.Add(4) go func() { ctx, end := trace.WithTaskFromStack(ctx) defer end() defer wg.Done() defer q.WaitReady(ctx, "1", time.Unix(9999, 0))() ret := atomic.AddUint32(&ctr, 1) assert.Equal(t, uint32(1), ret) time.Sleep(1 * time.Second) }() // give goroutine "1" 500ms to enter queue, get the active slot and enter time.Sleep defer q.Start(1)() time.Sleep(500 * time.Millisecond) // while "1" is still running, queue in "2", "3" and "4" go func() { ctx, end := trace.WithTaskFromStack(ctx) defer end() defer wg.Done() defer q.WaitReady(ctx, "2", time.Unix(2, 0))() ret := atomic.AddUint32(&ctr, 1) assert.Equal(t, uint32(2), ret) }() go func() { ctx, end := trace.WithTaskFromStack(ctx) defer end() defer wg.Done() defer q.WaitReady(ctx, "3", time.Unix(3, 0))() ret := atomic.AddUint32(&ctr, 1) assert.Equal(t, uint32(3), ret) }() go func() { ctx, end := trace.WithTaskFromStack(ctx) defer end() defer wg.Done() defer q.WaitReady(ctx, "4", time.Unix(4, 0))() ret := atomic.AddUint32(&ctr, 1) assert.Equal(t, uint32(4), ret) }() wg.Wait() }
explode_data.jsonl/22489
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 565 }
[ 2830, 3393, 47, 80, 2623, 443, 3231, 1155, 353, 8840, 836, 8, 341, 20985, 11, 835, 1669, 11655, 26124, 6262, 3830, 4336, 5378, 19047, 2398, 16867, 835, 741, 2405, 50638, 2622, 18, 17, 198, 18534, 1669, 501, 8304, 7554, 741, 2405, 6358...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStamperStamping(t *testing.T) { privKey, err := crypto.GenerateSecp256k1Key() if err != nil { t.Fatal(err) } owner, err := crypto.NewEthereumAddress(privKey.PublicKey) if err != nil { t.Fatal(err) } signer := crypto.NewDefaultSigner(privKey) createStamp := func(t *testing.T, stamper postage.Stamper) (swarm.Address, *postage.Stamp) { t.Helper() h := make([]byte, 32) _, err = io.ReadFull(crand.Reader, h) if err != nil { t.Fatal(err) } chunkAddr := swarm.NewAddress(h) stamp, err := stamper.Stamp(chunkAddr) if err != nil { t.Fatal(err) } return chunkAddr, stamp } // tests a valid stamp t.Run("valid stamp", func(t *testing.T) { st := newTestStampIssuer(t, 1000) stamper := postage.NewStamper(st, signer) chunkAddr, stamp := createStamp(t, stamper) if err := stamp.Valid(chunkAddr, owner, 12, 8, true); err != nil { t.Fatalf("expected no error, got %v", err) } }) // tests that Stamps returns with postage.ErrBucketMismatch t.Run("bucket mismatch", func(t *testing.T) { st := newTestStampIssuer(t, 1000) stamper := postage.NewStamper(st, signer) chunkAddr, stamp := createStamp(t, stamper) a := chunkAddr.Bytes() a[0] ^= 0xff if err := stamp.Valid(swarm.NewAddress(a), owner, 12, 8, true); !errors.Is(err, postage.ErrBucketMismatch) { t.Fatalf("expected ErrBucketMismatch, got %v", err) } }) // tests that Stamps returns with postage.ErrInvalidIndex t.Run("invalid index", func(t *testing.T) { st := newTestStampIssuer(t, 1000) stamper := postage.NewStamper(st, signer) // issue 1 stamp chunkAddr, _ := createStamp(t, stamper) // issue another 15 // collision depth is 8, committed batch depth is 12, bucket volume 2^4 for i := 0; i < 14; i++ { _, err = stamper.Stamp(chunkAddr) if err != nil { t.Fatalf("error adding stamp at step %d: %v", i, err) } } stamp, err := stamper.Stamp(chunkAddr) if err != nil { t.Fatalf("error adding last stamp: %v", err) } if err := stamp.Valid(chunkAddr, owner, 11, 8, true); !errors.Is(err, postage.ErrInvalidIndex) { t.Fatalf("expected ErrInvalidIndex, got %v", err) } }) // tests that Stamps returns with postage.ErrBucketFull iff // issuer has the corresponding collision bucket filled] t.Run("bucket full", func(t *testing.T) { st := postage.NewStampIssuer("", "", newTestStampIssuer(t, 1000).ID(), big.NewInt(3), 12, 8, 1000, true) stamper := postage.NewStamper(st, signer) // issue 1 stamp chunkAddr, _ := createStamp(t, stamper) // issue another 15 // collision depth is 8, committed batch depth is 12, bucket volume 2^4 for i := 0; i < 15; i++ { _, err = stamper.Stamp(chunkAddr) if err != nil { t.Fatalf("error adding stamp at step %d: %v", i, err) } } // the bucket should now be full, not allowing a stamp for the pivot chunk if _, err = stamper.Stamp(chunkAddr); !errors.Is(err, postage.ErrBucketFull) { t.Fatalf("expected ErrBucketFull, got %v", err) } }) // tests return with ErrOwnerMismatch t.Run("owner mismatch", func(t *testing.T) { owner[0] ^= 0xff // bitflip the owner first byte, this case must come last! st := newTestStampIssuer(t, 1000) stamper := postage.NewStamper(st, signer) chunkAddr, stamp := createStamp(t, stamper) if err := stamp.Valid(chunkAddr, owner, 12, 8, true); !errors.Is(err, postage.ErrOwnerMismatch) { t.Fatalf("expected ErrOwnerMismatch, got %v", err) } }) }
explode_data.jsonl/19320
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1422 }
[ 2830, 3393, 623, 309, 712, 20906, 287, 1155, 353, 8840, 836, 8, 341, 71170, 1592, 11, 1848, 1669, 19028, 57582, 8430, 79, 17, 20, 21, 74, 16, 1592, 741, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 630, 197, 8118, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSwitchThread(t *testing.T) { protest.AllowRecording(t) withTestProcess("testnextprog", t, func(p *proc.Target, fixture protest.Fixture) { // With invalid thread id err := p.SwitchThread(-1) if err == nil { t.Fatal("Expected error for invalid thread id") } setFunctionBreakpoint(p, t, "main.main") err = p.Continue() if err != nil { t.Fatal(err) } var nt int ct := p.CurrentThread().ThreadID() for _, thread := range p.ThreadList() { if thread.ThreadID() != ct { nt = thread.ThreadID() break } } if nt == 0 { t.Fatal("could not find thread to switch to") } // With valid thread id err = p.SwitchThread(nt) if err != nil { t.Fatal(err) } if p.CurrentThread().ThreadID() != nt { t.Fatal("Did not switch threads") } }) }
explode_data.jsonl/56214
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 337 }
[ 2830, 3393, 16837, 6855, 1155, 353, 8840, 836, 8, 341, 197, 776, 1944, 29081, 52856, 1155, 340, 46948, 2271, 7423, 445, 1944, 3600, 32992, 497, 259, 11, 2915, 1295, 353, 15782, 35016, 11, 12507, 8665, 991, 12735, 8, 341, 197, 197, 322...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestSpecC3(t *testing.T) { var f, g *[]string init := func(c *Cmd) { f = c.StringsArg("SRC", nil, "") g = c.StringsArg("DST", nil, "") } spec := "(SRC... DST) | SRC" okCmd(t, spec, init, []string{"A"}) require.Equal(t, []string{"A"}, *f) require.Equal(t, 0, len(*g)) okCmd(t, spec, init, []string{"A", "B"}) require.Equal(t, []string{"A"}, *f) require.Equal(t, []string{"B"}, *g) okCmd(t, spec, init, []string{"A", "B", "C"}) require.Equal(t, []string{"A", "B"}, *f) require.Equal(t, []string{"C"}, *g) okCmd(t, spec, init, []string{"A", "B", "C", "D"}) require.Equal(t, []string{"A", "B", "C"}, *f) require.Equal(t, []string{"D"}, *g) }
explode_data.jsonl/23934
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 318 }
[ 2830, 3393, 8327, 34, 18, 1155, 353, 8840, 836, 8, 341, 2405, 282, 11, 342, 353, 1294, 917, 198, 28248, 1669, 2915, 1337, 353, 15613, 8, 341, 197, 1166, 284, 272, 89154, 2735, 445, 56017, 497, 2092, 11, 14676, 197, 3174, 284, 272, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParsersJavaElasticsearchLogs(t *testing.T) { env := newInputTestingEnvironment(t) testlogName := "test.log" inp := env.mustCreateInput(map[string]interface{}{ "paths": []string{env.abspath(testlogName)}, "prospector.scanner.check_interval": "1ms", "parsers": []map[string]interface{}{ map[string]interface{}{ "multiline": map[string]interface{}{ "type": "pattern", "pattern": "^\\[", "negate": true, "match": "after", "timeout": "100ms", // set to lower value to speed up test }, }, }, }) testlines := []byte(elasticsearchMultilineLogs) env.mustWriteLinesToFile(testlogName, testlines) ctx, cancelInput := context.WithCancel(context.Background()) env.startInput(ctx, inp) env.waitUntilEventCount(20) env.requireOffsetInRegistry(testlogName, len(testlines)) cancelInput() env.waitUntilInputStops() }
explode_data.jsonl/14916
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 380 }
[ 2830, 3393, 47, 40488, 15041, 36, 51179, 1836, 51053, 1155, 353, 8840, 836, 8, 341, 57538, 1669, 501, 2505, 16451, 12723, 1155, 692, 18185, 839, 675, 1669, 330, 1944, 1665, 698, 17430, 79, 1669, 6105, 69419, 4021, 2505, 9147, 14032, 313...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAttributeValue(t *testing.T) { t.Parallel() ctx, cancel := testAllocate(t, "image.html") defer cancel() tests := []struct { sel string by QueryOption attr string exp string }{ {`//*[@id="icon-brankas"]`, BySearch, "alt", "Brankas - Easy Money Management"}, {`body > img:first-child`, ByQuery, "alt", "Brankas - Easy Money Management"}, {`body > img:nth-child(2)`, ByQueryAll, "alt", "How people build software"}, {`#icon-github`, ByID, "alt", "How people build software"}, {`document.querySelector('#icon-github')`, ByJSPath, "alt", "How people build software"}, } for i, test := range tests { var value string var ok bool if err := Run(ctx, AttributeValue(test.sel, test.attr, &value, &ok, test.by)); err != nil { t.Fatalf("test %d got error: %v", i, err) } if !ok { t.Fatalf("test %d failed to get attribute %s on %s", i, test.attr, test.sel) } if value != test.exp { t.Errorf("test %d expected %s to be %s, got: %s", i, test.attr, test.exp, value) } } }
explode_data.jsonl/59478
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 407 }
[ 2830, 3393, 78554, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 20985, 11, 9121, 1669, 1273, 75380, 1155, 11, 330, 1805, 2564, 1138, 16867, 9121, 2822, 78216, 1669, 3056, 1235, 341, 197, 1903, 301, 220, 914, 198, 197, 197, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestUint32to24(t *testing.T) { if v := uint32to24(0xdabeef); !bytes.Equal(v, []byte{218, 190, 239}) { t.Fatalf("Unexpected result. Want [218 190 239], have %v", v) } }
explode_data.jsonl/45523
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 77 }
[ 2830, 3393, 21570, 18, 17, 983, 17, 19, 1155, 353, 8840, 836, 8, 341, 743, 348, 1669, 2622, 18, 17, 983, 17, 19, 7, 15, 9703, 8229, 823, 1215, 753, 9651, 12808, 3747, 11, 3056, 3782, 90, 17, 16, 23, 11, 220, 16, 24, 15, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRuleText(t *testing.T) { input := ` package test r[x] = y { x = input.a x = "foo" } { x = input.b x = "bar" } { x = input.c x = "baz" } r[x] = y { x = input.d x = "qux" } ` mod := MustParseModule(input) rules := mod.Rules if len(rules) != 4 { t.Fatalf("Expected 4 rules, got %d", len(rules)) } expectedRuleText := []string{ ` r[x] = y { x = input.a x = "foo" } `, ` { x = input.b x = "bar" } `, ` { x = input.c x = "baz" } `, ` r[x] = y { x = input.d x = "qux" } `, } assertLocationText(t, strings.TrimSpace(expectedRuleText[0]), rules[0].Location) assertLocationText(t, "r[x] = y", rules[0].Head.Location) assertLocationText(t, "y", rules[0].Head.Value.Location) // Chained rules recursively set text on heads to be the full rule for i := 1; i < len(expectedRuleText)-1; i++ { text := strings.TrimSpace(expectedRuleText[i]) assertLocationText(t, text, rules[i].Location) assertLocationText(t, text, rules[i].Head.Location) assertLocationText(t, text, rules[i].Head.Value.Location) } assertLocationText(t, strings.TrimSpace(expectedRuleText[3]), rules[3].Location) assertLocationText(t, "r[x] = y", rules[3].Head.Location) assertLocationText(t, "y", rules[3].Head.Value.Location) }
explode_data.jsonl/50502
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 548 }
[ 2830, 3393, 11337, 1178, 1155, 353, 8840, 836, 8, 341, 22427, 1669, 1565, 6328, 1273, 271, 81, 8323, 60, 284, 379, 341, 10225, 284, 1946, 5849, 198, 10225, 284, 330, 7975, 698, 92, 341, 10225, 284, 1946, 948, 198, 10225, 284, 330, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInodeIdsIncrementing(t *testing.T) { runTestNoQfs(t, func(test *testHelper) { ids := newInodeIds(100*time.Millisecond, time.Hour) c := test.newCtx() test.Assert(newInodeId(c, ids) == 4, "Wrong 1st inodeId given") test.Assert(newInodeId(c, ids) == 5, "Wrong 2nd inodeId given") test.Assert(newInodeId(c, ids) == 6, "Wrong 3rd inodeId given") ids.releaseInodeId(c, 4) time.Sleep(50 * time.Millisecond) test.Assert(newInodeId(c, ids) == 7, "Wrong next id during delay") time.Sleep(60 * time.Millisecond) test.Assert(newInodeId(c, ids) == 4, "Didn't get to reuse 1st id") test.Assert(newInodeId(c, ids) == 8, "Wrong next id") }) }
explode_data.jsonl/1826
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 299 }
[ 2830, 3393, 641, 534, 12701, 38311, 287, 1155, 353, 8840, 836, 8, 341, 56742, 2271, 2753, 48, 3848, 1155, 11, 2915, 8623, 353, 1944, 5511, 8, 341, 197, 197, 3365, 1669, 501, 641, 534, 12701, 7, 16, 15, 15, 77053, 71482, 11, 882, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLogHandler_GetLogEntries(t *testing.T) { type fields struct { logManager *fake.ILogManagerMock } tests := []struct { name string fields fields request *http.Request wantStatus int wantLogs *models.GetLogsResponse wantGetLogsParams *models.GetLogParams }{ { name: "get logs - no filter", fields: fields{ &fake.ILogManagerMock{ GetLogEntriesFunc: func(filter models.GetLogParams) (*models.GetLogsResponse, error) { return &models.GetLogsResponse{ NextPageKey: 0, PageSize: 1, TotalCount: 1, Logs: []models.LogEntry{ { IntegrationID: "my-id", Message: "my message", }, }, }, nil }, }, }, request: httptest.NewRequest(http.MethodGet, "/log", nil), wantStatus: http.StatusOK, wantLogs: &models.GetLogsResponse{ NextPageKey: 0, PageSize: 1, TotalCount: 1, Logs: []models.LogEntry{ { IntegrationID: "my-id", Message: "my message", }, }, }, wantGetLogsParams: &models.GetLogParams{}, }, { name: "get logs - with filter", fields: fields{ &fake.ILogManagerMock{ GetLogEntriesFunc: func(filter models.GetLogParams) (*models.GetLogsResponse, error) { return &models.GetLogsResponse{ NextPageKey: 0, PageSize: 1, TotalCount: 1, Logs: []models.LogEntry{ { IntegrationID: "my-id", Message: "my message", }, }, }, nil }, }, }, request: httptest.NewRequest(http.MethodGet, "/log?nextPageKey=1&pageSize=2&integrationId=my-id&fromTime=from&beforeTime=to", nil), wantStatus: http.StatusOK, wantLogs: &models.GetLogsResponse{ NextPageKey: 0, PageSize: 1, TotalCount: 1, Logs: []models.LogEntry{ { IntegrationID: "my-id", Message: "my message", }, }, }, wantGetLogsParams: &models.GetLogParams{ NextPageKey: 1, PageSize: 2, LogFilter: models.LogFilter{ IntegrationID: "my-id", FromTime: "from", BeforeTime: "to", }, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { lh := handler.NewLogHandler(tt.fields.logManager) router := gin.Default() router.GET("/log", func(c *gin.Context) { lh.GetLogEntries(c) }) w := performRequest(router, tt.request) require.Equal(t, tt.wantStatus, w.Code) if tt.wantGetLogsParams != nil { require.Len(t, tt.fields.logManager.GetLogEntriesCalls(), 1) require.Equal(t, *tt.wantGetLogsParams, tt.fields.logManager.GetLogEntriesCalls()[0].Filter) } if tt.wantLogs != nil { logs := &models.GetLogsResponse{} err := json.Unmarshal(w.Body.Bytes(), logs) require.Nil(t, err) require.Equal(t, tt.wantLogs, logs) } }) } }
explode_data.jsonl/71970
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1481 }
[ 2830, 3393, 2201, 3050, 13614, 2201, 24533, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 6725, 2043, 353, 30570, 13, 64909, 2043, 11571, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 1060, 914, 198, 197, 55276, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDefineTypeOfExample(t *testing.T) { var example interface{} var err error example, err = defineTypeOfExample("string", "", "example") assert.NoError(t, err) assert.Equal(t, example.(string), "example") example, err = defineTypeOfExample("number", "", "12.34") assert.NoError(t, err) assert.Equal(t, example.(float64), 12.34) example, err = defineTypeOfExample("boolean", "", "true") assert.NoError(t, err) assert.Equal(t, example.(bool), true) example, err = defineTypeOfExample("array", "", "one,two,three") assert.Error(t, err) assert.Nil(t, example) example, err = defineTypeOfExample("array", "string", "one,two,three") assert.NoError(t, err) arr := []string{} for _, v := range example.([]interface{}) { arr = append(arr, v.(string)) } assert.Equal(t, arr, []string{"one", "two", "three"}) example, err = defineTypeOfExample("object", "", "key_one:one,key_two:two,key_three:three") assert.Error(t, err) assert.Nil(t, example) example, err = defineTypeOfExample("object", "string", "key_one,key_two,key_three") assert.Error(t, err) assert.Nil(t, example) example, err = defineTypeOfExample("object", "oops", "key_one:one,key_two:two,key_three:three") assert.Error(t, err) assert.Nil(t, example) example, err = defineTypeOfExample("object", "string", "key_one:one,key_two:two,key_three:three") assert.NoError(t, err) obj := map[string]string{} for k, v := range example.(map[string]interface{}) { obj[k] = v.(string) } assert.Equal(t, obj, map[string]string{"key_one": "one", "key_two": "two", "key_three": "three"}) example, err = defineTypeOfExample("oops", "", "") assert.Error(t, err) assert.Nil(t, example) }
explode_data.jsonl/63591
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 637 }
[ 2830, 3393, 35338, 929, 2124, 13314, 1155, 353, 8840, 836, 8, 341, 2405, 3110, 3749, 16094, 2405, 1848, 1465, 271, 8122, 1516, 11, 1848, 284, 6979, 929, 2124, 13314, 445, 917, 497, 7342, 330, 8687, 1138, 6948, 35699, 1155, 11, 1848, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestErrorCodeStringer(t *testing.T) { tests := []struct { in database.ErrorCode want string }{ {database.ErrDbTypeRegistered, "ErrDbTypeRegistered"}, {database.ErrDbUnknownType, "ErrDbUnknownType"}, {database.ErrDbDoesNotExist, "ErrDbDoesNotExist"}, {database.ErrDbExists, "ErrDbExists"}, {database.ErrDbNotOpen, "ErrDbNotOpen"}, {database.ErrDbAlreadyOpen, "ErrDbAlreadyOpen"}, {database.ErrInvalid, "ErrInvalid"}, {database.ErrCorruption, "ErrCorruption"}, {database.ErrTxClosed, "ErrTxClosed"}, {database.ErrTxNotWritable, "ErrTxNotWritable"}, {database.ErrBucketNotFound, "ErrBucketNotFound"}, {database.ErrBucketExists, "ErrBucketExists"}, {database.ErrBucketNameRequired, "ErrBucketNameRequired"}, {database.ErrKeyRequired, "ErrKeyRequired"}, {database.ErrKeyTooLarge, "ErrKeyTooLarge"}, {database.ErrValueTooLarge, "ErrValueTooLarge"}, {database.ErrIncompatibleValue, "ErrIncompatibleValue"}, {database.ErrBlockNotFound, "ErrBlockNotFound"}, {database.ErrBlockExists, "ErrBlockExists"}, {database.ErrBlockRegionInvalid, "ErrBlockRegionInvalid"}, {database.ErrDriverSpecific, "ErrDriverSpecific"}, {0xffff, "Unknown ErrorCode (65535)"}, } // Detect additional error codes that don't have the stringer added. if len(tests)-1 != int(database.TstNumErrorCodes) { t.Errorf("It appears an error code was added without adding " + "an associated stringer test") } t.Logf("Running %d tests", len(tests)) for i, test := range tests { result := test.in.String() if result != test.want { t.Errorf("String #%d\ngot: %s\nwant: %s", i, result, test.want) continue } } }
explode_data.jsonl/7152
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 630 }
[ 2830, 3393, 30748, 703, 261, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 17430, 256, 4625, 98433, 198, 197, 50780, 914, 198, 197, 59403, 197, 197, 90, 12216, 27862, 20886, 41430, 11, 330, 7747, 20886, 41430, 7115, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDeleteService(t *testing.T) { handler := func(w http.ResponseWriter, r *http.Request) { testutils.AssertEqual(t, r.Method, "DELETE") testutils.AssertEqual(t, r.URL.Path, "/service/id") headers := map[string]string{ "Location": "/job/jobid", "X-JobID": "jobid", } MarshalAndWriteHeader(t, w, "", headers, 202) } client, server := newClientAndServer(handler) defer server.Close() jobID, err := client.DeleteService("id") if err != nil { t.Fatal(err) } testutils.AssertEqual(t, jobID, "jobid") }
explode_data.jsonl/24179
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 218 }
[ 2830, 3393, 6435, 1860, 1155, 353, 8840, 836, 8, 341, 53326, 1669, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 18185, 6031, 11711, 2993, 1155, 11, 435, 20798, 11, 330, 14424, 1138, 197, 18185, 6031, 11711, 2993, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFileByPhotoUID(t *testing.T) { t.Run("files found", func(t *testing.T) { file, err := FileByPhotoUID("pt9jtdre2lvl0y11") if err != nil { t.Fatal(err) } assert.Equal(t, "bridge.jpg", file.FileName) }) t.Run("no files found", func(t *testing.T) { file, err := FileByPhotoUID("111") assert.Error(t, err, "record not found") t.Log(file) }) }
explode_data.jsonl/71299
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 164 }
[ 2830, 3393, 1703, 1359, 10463, 6463, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 7198, 1730, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 17661, 11, 1848, 1669, 2887, 1359, 10463, 6463, 445, 417, 24, 73, 1296, 265, 17, 58160, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestLookup(t *testing.T) { zone, err := Parse(strings.NewReader(dbMiekNL), testzone, "stdin", 0) if err != nil { t.Fatalf("Expected no error when reading zone, got %q", err) } fm := File{Next: test.ErrorHandler(), Zones: Zones{Z: map[string]*Zone{testzone: zone}, Names: []string{testzone}}} ctx := context.TODO() for _, tc := range dnsTestCases { m := tc.Msg() rec := dnstest.NewRecorder(&test.ResponseWriter{}) _, err := fm.ServeDNS(ctx, rec, m) if err != nil { t.Errorf("Expected no error, got %v", err) return } resp := rec.Msg if err := test.SortAndCheck(resp, tc); err != nil { t.Error(err) } } }
explode_data.jsonl/68724
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 272 }
[ 2830, 3393, 34247, 1155, 353, 8840, 836, 8, 341, 197, 8684, 11, 1848, 1669, 14775, 51442, 68587, 9791, 44, 35007, 30042, 701, 1273, 8684, 11, 330, 51602, 497, 220, 15, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 18896, 902,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestExternalModuleExclusionScopedPackage(t *testing.T) { default_suite.expectBundled(t, bundled{ files: map[string]string{ "/index.js": ` import '@a1' import '@a1/a2' import '@a1-a2' import '@b1' import '@b1/b2' import '@b1/b2/b3' import '@b1/b2-b3' import '@c1' import '@c1/c2' import '@c1/c2/c3' import '@c1/c2/c3/c4' import '@c1/c2/c3-c4' `, }, entryPaths: []string{"/index.js"}, options: config.Options{ Mode: config.ModeBundle, AbsOutputFile: "/out.js", ExternalModules: config.ExternalModules{ NodeModules: map[string]bool{ "@a1": true, "@b1/b2": true, "@c1/c2/c3": true, }, }, }, expectedScanLog: `index.js: error: Could not resolve "@a1-a2" (mark it as external to exclude it from the bundle) index.js: error: Could not resolve "@b1" (mark it as external to exclude it from the bundle) index.js: error: Could not resolve "@b1/b2-b3" (mark it as external to exclude it from the bundle) index.js: error: Could not resolve "@c1" (mark it as external to exclude it from the bundle) index.js: error: Could not resolve "@c1/c2" (mark it as external to exclude it from the bundle) index.js: error: Could not resolve "@c1/c2/c3-c4" (mark it as external to exclude it from the bundle) `, }) }
explode_data.jsonl/38523
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 595 }
[ 2830, 3393, 25913, 3332, 840, 8957, 39437, 13100, 1155, 353, 8840, 836, 8, 341, 11940, 57239, 25952, 33, 1241, 832, 1155, 11, 51450, 515, 197, 74075, 25, 2415, 14032, 30953, 515, 298, 197, 3115, 1252, 2857, 788, 22074, 571, 21918, 3458,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestABCIValidatorFromPubKeyAndPower(t *testing.T) { pubkey := bls12381.GenPrivKey().PubKey() abciVal := TM2PB.NewValidatorUpdate(pubkey, 10) assert.Equal(t, int64(10), abciVal.Power) assert.Panics(t, func() { TM2PB.NewValidatorUpdate(nil, 10) }) assert.Panics(t, func() { TM2PB.NewValidatorUpdate(pubKeyBLS{}, 10) }) }
explode_data.jsonl/65077
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 1867, 11237, 14256, 3830, 29162, 1592, 3036, 14986, 1155, 353, 8840, 836, 8, 341, 62529, 792, 1669, 1501, 82, 16, 17, 18, 23, 16, 65384, 32124, 1592, 1005, 29162, 1592, 2822, 197, 370, 5855, 2208, 1669, 23975, 17, 40637, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInvalidNilValues(t *testing.T) { var date1 time.Time var date2 int tests := []struct { name string input interface{} expectedError string }{ { name: "time.Time", input: &date1, expectedError: `sql: Scan error on column index 0, name "bdate": unsupported Scan, storing driver.Value type <nil> into type *time.Time`, }, { name: "int", input: &date2, expectedError: `sql: Scan error on column index 0, name "bdate": converting NULL to int is unsupported`, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { db := newTestDB(t, "people") defer closeDB(t, db) ctx, cancel := context.WithCancel(context.Background()) defer cancel() conn, err := db.Conn(ctx) if err != nil { t.Fatal(err) } conn.dc.ci.(*fakeConn).skipDirtySession = true defer conn.Close() err = conn.QueryRowContext(ctx, "SELECT|people|bdate|age=?", 1).Scan(tt.input) if err == nil { t.Fatal("expected error when querying nil column, but succeeded") } if err.Error() != tt.expectedError { t.Fatalf("Expected error: %s\nReceived: %s", tt.expectedError, err.Error()) } err = conn.PingContext(ctx) if err != nil { t.Fatal(err) } }) } }
explode_data.jsonl/15982
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 561 }
[ 2830, 3393, 7928, 19064, 6227, 1155, 353, 8840, 836, 8, 341, 2405, 2400, 16, 882, 16299, 198, 2405, 2400, 17, 526, 271, 78216, 1669, 3056, 1235, 341, 197, 11609, 688, 914, 198, 197, 22427, 260, 3749, 16094, 197, 42400, 1454, 914, 198,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestSortedArray_Clone(t *testing.T) { gtest.C(t, func(t *gtest.T) { a1 := []interface{}{"a", "d", "c", "b", "e", "f"} func1 := func(v1, v2 interface{}) int { return strings.Compare(gconv.String(v1), gconv.String(v2)) } array1 := garray.NewSortedArrayFrom(a1, func1) array2 := array1.Clone() t.Assert(array1, array2) array1.Remove(1) t.AssertNE(array1, array2) }) }
explode_data.jsonl/67019
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 187 }
[ 2830, 3393, 51051, 1857, 85110, 603, 1155, 353, 8840, 836, 8, 341, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 11323, 16, 1669, 3056, 4970, 6257, 4913, 64, 497, 330, 67, 497, 330, 66, 497, 330, 65, 497, 33...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGoCSVNewWithInputsToOutput(t *testing.T) { // Change directory to project root so the test cases can form the correct pkg imports cleanupFunc := chDirWithCleanup(t, testNonStandardLayoutDataDir) defer cleanupFunc() // Temporary output dir for generating catalog bundle outputDir, rmDirFunc := mkTempDirWithCleanup(t, t.Name()+"-output-catalog") defer rmDirFunc() cfg := gen.Config{ OperatorName: testProjectName, Inputs: map[string]string{ DeployDirKey: "config", APIsDirKey: "api", CRDsDirKey: filepath.Join("config", "crds"), }, OutputDir: outputDir, } csvVersion := "0.0.1" g := NewBundle(cfg, csvVersion, "", false, false).(bundleGenerator) g.noUpdate = true if err := g.Generate(); err != nil { t.Fatalf("Failed to execute CSV generator: %v", err) } csvFileName := getCSVFileNameLegacy(testProjectName, csvVersion) // Read expected CSV expBundleDir := filepath.Join("expected-catalog", OLMCatalogChildDir, testProjectName, csvVersion) csvExp := string(readFile(t, filepath.Join(expBundleDir, csvFileName))) // Read generated CSV from outputDir path outputBundleDir := filepath.Join(outputDir, OLMCatalogChildDir, testProjectName, csvVersion) csvOutput := string(readFile(t, filepath.Join(outputBundleDir, csvFileName))) assert.Equal(t, csvExp, csvOutput) }
explode_data.jsonl/21320
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 468 }
[ 2830, 3393, 10850, 44209, 3564, 2354, 31946, 1249, 5097, 1155, 353, 8840, 836, 8, 341, 197, 322, 10388, 6220, 311, 2390, 3704, 773, 279, 1273, 5048, 646, 1352, 279, 4396, 24793, 15202, 198, 1444, 60639, 9626, 1669, 521, 6184, 2354, 6733...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAllKeys(t *testing.T) { initConfigs() ks := sort.StringSlice{"title", "newkey", "owner.organization", "owner.dob", "owner.bio", "name", "beard", "ppu", "batters.batter", "hobbies", "clothing.jacket", "clothing.trousers", "clothing.pants.size", "age", "hacker", "id", "type", "eyes", "p_id", "p_ppu", "p_batters.batter.type", "p_type", "p_name", "foos"} dob, _ := time.Parse(time.RFC3339, "1979-05-27T07:32:00Z") all := map[string]interface{}{"owner": map[string]interface{}{"organization": "MongoDB", "bio": "MongoDB Chief Developer Advocate & Hacker at Large", "dob": dob}, "title": "TOML Example", "ppu": 0.55, "eyes": "brown", "clothing": map[string]interface{}{"trousers": "denim", "jacket": "leather", "pants": map[string]interface{}{"size": "large"}}, "id": "0001", "batters": map[string]interface{}{"batter": []interface{}{map[string]interface{}{"type": "Regular"}, map[string]interface{}{"type": "Chocolate"}, map[string]interface{}{"type": "Blueberry"}, map[string]interface{}{"type": "Devil's Food"}}}, "hacker": true, "beard": true, "hobbies": []interface{}{"skateboarding", "snowboarding", "go"}, "age": 35, "type": "donut", "newkey": "remote", "name": "Cake", "p_id": "0001", "p_ppu": "0.55", "p_name": "Cake", "p_batters": map[string]interface{}{"batter": map[string]interface{}{"type": "Regular"}}, "p_type": "donut", "foos": []map[string]interface{}{map[string]interface{}{"foo": []map[string]interface{}{map[string]interface{}{"key": 1}, map[string]interface{}{"key": 2}, map[string]interface{}{"key": 3}, map[string]interface{}{"key": 4}}}}} var allkeys sort.StringSlice allkeys = AllKeys() allkeys.Sort() ks.Sort() assert.Equal(t, ks, allkeys) assert.Equal(t, all, AllSettings()) }
explode_data.jsonl/5564
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 652 }
[ 2830, 3393, 2403, 8850, 1155, 353, 8840, 836, 8, 341, 28248, 84905, 2822, 197, 2787, 1669, 3378, 6431, 33236, 4913, 2102, 497, 330, 931, 792, 497, 330, 8118, 69224, 497, 330, 8118, 950, 674, 497, 330, 8118, 78818, 497, 330, 606, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnitExtractTarballTmp(t *testing.T) { sut := NewSPDX() // Non existent files shoud error _, err := sut.ExtractTarballTmp("lsdjkflskdjfl") require.Error(t, err) // Lets test a zipped and unzipped tarball for _, tf := range []bool{true, false} { tarFile := writeTestTarball(t, tf) require.NotNil(t, tarFile) defer os.Remove(tarFile.Name()) dir, err := sut.ExtractTarballTmp(tarFile.Name()) require.Nil(t, err, "extracting file") defer os.RemoveAll(dir) require.True(t, util.Exists(filepath.Join(dir, "/text.txt")), "checking directory") require.True(t, util.Exists(filepath.Join(dir, "/subdir/text.txt")), "checking subdirectory") require.True(t, util.Exists(dir), "checking directory") } }
explode_data.jsonl/7735
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 286 }
[ 2830, 3393, 4562, 28959, 62733, 3959, 35986, 1155, 353, 8840, 836, 8, 341, 1903, 332, 1669, 1532, 4592, 16591, 741, 197, 322, 11581, 3000, 306, 3542, 557, 2950, 1465, 198, 197, 6878, 1848, 1669, 34212, 5121, 2144, 62733, 3959, 35986, 44...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIDDriverScanByteFromDatabase(t *testing.T) { id := ID{} bs := []byte("9m4e2mr0ui3e8a215n4g") err := id.Scan(bs) assert.NoError(t, err) assert.Equal(t, ID{0x4d, 0x88, 0xe1, 0x5b, 0x60, 0xf4, 0x86, 0xe4, 0x28, 0x41, 0x2d, 0xc9}, id) }
explode_data.jsonl/58929
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 915, 11349, 26570, 7153, 3830, 5988, 1155, 353, 8840, 836, 8, 341, 15710, 1669, 3034, 16094, 93801, 1669, 3056, 3782, 445, 24, 76, 19, 68, 17, 20946, 15, 1963, 18, 68, 23, 64, 17, 16, 20, 77, 19, 70, 1138, 9859, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestV1beta1TLSConfig(t *testing.T) { tests := []struct { test string clientCert, clientKey, clientCA []byte serverCert, serverKey, serverCA []byte wantErr bool }{ { test: "TLS setup between client and server", clientCert: clientCert, clientKey: clientKey, clientCA: caCert, serverCert: serverCert, serverKey: serverKey, serverCA: caCert, }, { test: "Server does not require client auth", clientCA: caCert, serverCert: serverCert, serverKey: serverKey, }, { test: "Server does not require client auth, client provides it", clientCert: clientCert, clientKey: clientKey, clientCA: caCert, serverCert: serverCert, serverKey: serverKey, }, { test: "Client does not trust server", clientCert: clientCert, clientKey: clientKey, serverCert: serverCert, serverKey: serverKey, wantErr: true, }, { test: "Server does not trust client", clientCert: clientCert, clientKey: clientKey, clientCA: caCert, serverCert: serverCert, serverKey: serverKey, serverCA: badCACert, wantErr: true, }, { // Plugin does not support insecure configurations. test: "Server is using insecure connection", wantErr: true, }, } for _, tt := range tests { // Use a closure so defer statements trigger between loop iterations. func() { service := new(mockV1beta1Service) service.statusCode = 200 server, err := NewV1beta1TestServer(service, tt.serverCert, tt.serverKey, tt.serverCA) if err != nil { t.Errorf("%s: failed to create server: %v", tt.test, err) return } defer server.Close() wh, err := newV1beta1TokenAuthenticator(server.URL, tt.clientCert, tt.clientKey, tt.clientCA, 0, nil) if err != nil { t.Errorf("%s: failed to create client: %v", tt.test, err) return } // Allow all and see if we get an error. service.Allow() _, authenticated, err := wh.AuthenticateToken(context.Background(), "t0k3n") if tt.wantErr { if err == nil { t.Errorf("expected error making authorization request: %v", err) } return } if !authenticated { t.Errorf("%s: failed to authenticate token", tt.test) return } service.Deny() _, authenticated, err = wh.AuthenticateToken(context.Background(), "t0k3n") if err != nil { t.Errorf("%s: unexpectedly failed AuthenticateToken", tt.test) } if authenticated { t.Errorf("%s: incorrectly authenticated token", tt.test) } }() } }
explode_data.jsonl/35037
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1032 }
[ 2830, 3393, 53, 16, 19127, 16, 45439, 2648, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 18185, 999, 914, 198, 197, 25291, 36934, 11, 2943, 1592, 11, 2943, 5049, 3056, 3782, 198, 197, 41057, 36934, 11, 3538, 1592, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func Test_costructRegionFromZone(t *testing.T) { type args struct { zone string } tests := []struct { name string args args want string }{ { name: "DC Zone", args: args{ zone: "dal12", }, want: "dal", }, { name: "AZ Zone", args: args{ zone: "eu-de-1", }, want: "eu-de", }, { name: "Region Zone", args: args{ zone: "us-south", }, want: "us-south", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := costructRegionFromZone(tt.args.zone); got != tt.want { t.Errorf("costructRegionFromZone() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/37962
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 321 }
[ 2830, 3393, 11393, 1235, 14091, 3830, 15363, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 197, 8684, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197, 50780, 914, 198, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestLoggerDebugExpected(t *testing.T) { checkMessages(t, zapcore.DebugLevel, []Option{WithDebug()}, zapcore.DebugLevel, []string{ "hello", "world", "foo", }, func(logger *Logger) { logger.Print("hello") logger.Printf("world") logger.Println("foo") }) }
explode_data.jsonl/58460
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 7395, 7939, 18896, 1155, 353, 8840, 836, 8, 341, 25157, 15820, 1155, 11, 32978, 2153, 20345, 4449, 11, 3056, 5341, 90, 2354, 7939, 76777, 32978, 2153, 20345, 4449, 11, 3056, 917, 515, 197, 197, 1, 14990, 756, 197, 197, 1, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUpdateTeamMemberSchemeRoles(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() SystemAdminClient := th.SystemAdminClient th.LoginBasic() s1 := &model.SchemeRoles{ SchemeAdmin: false, SchemeUser: false, SchemeGuest: false, } _, r1 := SystemAdminClient.UpdateTeamMemberSchemeRoles(th.BasicTeam.Id, th.BasicUser.Id, s1) CheckNoError(t, r1) tm1, rtm1 := SystemAdminClient.GetTeamMember(th.BasicTeam.Id, th.BasicUser.Id, "") CheckNoError(t, rtm1) assert.Equal(t, false, tm1.SchemeGuest) assert.Equal(t, false, tm1.SchemeUser) assert.Equal(t, false, tm1.SchemeAdmin) s2 := &model.SchemeRoles{ SchemeAdmin: false, SchemeUser: true, SchemeGuest: false, } _, r2 := SystemAdminClient.UpdateTeamMemberSchemeRoles(th.BasicTeam.Id, th.BasicUser.Id, s2) CheckNoError(t, r2) tm2, rtm2 := SystemAdminClient.GetTeamMember(th.BasicTeam.Id, th.BasicUser.Id, "") CheckNoError(t, rtm2) assert.Equal(t, false, tm2.SchemeGuest) assert.Equal(t, true, tm2.SchemeUser) assert.Equal(t, false, tm2.SchemeAdmin) s3 := &model.SchemeRoles{ SchemeAdmin: true, SchemeUser: false, SchemeGuest: false, } _, r3 := SystemAdminClient.UpdateTeamMemberSchemeRoles(th.BasicTeam.Id, th.BasicUser.Id, s3) CheckNoError(t, r3) tm3, rtm3 := SystemAdminClient.GetTeamMember(th.BasicTeam.Id, th.BasicUser.Id, "") CheckNoError(t, rtm3) assert.Equal(t, false, tm3.SchemeGuest) assert.Equal(t, false, tm3.SchemeUser) assert.Equal(t, true, tm3.SchemeAdmin) s4 := &model.SchemeRoles{ SchemeAdmin: true, SchemeUser: true, SchemeGuest: false, } _, r4 := SystemAdminClient.UpdateTeamMemberSchemeRoles(th.BasicTeam.Id, th.BasicUser.Id, s4) CheckNoError(t, r4) tm4, rtm4 := SystemAdminClient.GetTeamMember(th.BasicTeam.Id, th.BasicUser.Id, "") CheckNoError(t, rtm4) assert.Equal(t, false, tm4.SchemeGuest) assert.Equal(t, true, tm4.SchemeUser) assert.Equal(t, true, tm4.SchemeAdmin) s5 := &model.SchemeRoles{ SchemeAdmin: false, SchemeUser: false, SchemeGuest: true, } _, r5 := SystemAdminClient.UpdateTeamMemberSchemeRoles(th.BasicTeam.Id, th.BasicUser.Id, s5) CheckNoError(t, r5) tm5, rtm5 := SystemAdminClient.GetTeamMember(th.BasicTeam.Id, th.BasicUser.Id, "") CheckNoError(t, rtm5) assert.Equal(t, true, tm5.SchemeGuest) assert.Equal(t, false, tm5.SchemeUser) assert.Equal(t, false, tm5.SchemeAdmin) s6 := &model.SchemeRoles{ SchemeAdmin: false, SchemeUser: true, SchemeGuest: true, } _, resp := SystemAdminClient.UpdateTeamMemberSchemeRoles(th.BasicTeam.Id, th.BasicUser.Id, s6) CheckBadRequestStatus(t, resp) _, resp = SystemAdminClient.UpdateTeamMemberSchemeRoles(model.NewId(), th.BasicUser.Id, s4) CheckNotFoundStatus(t, resp) _, resp = SystemAdminClient.UpdateTeamMemberSchemeRoles(th.BasicTeam.Id, model.NewId(), s4) CheckNotFoundStatus(t, resp) _, resp = SystemAdminClient.UpdateTeamMemberSchemeRoles("ASDF", th.BasicUser.Id, s4) CheckBadRequestStatus(t, resp) _, resp = SystemAdminClient.UpdateTeamMemberSchemeRoles(th.BasicTeam.Id, "ASDF", s4) CheckBadRequestStatus(t, resp) th.LoginBasic2() _, resp = th.Client.UpdateTeamMemberSchemeRoles(th.BasicTeam.Id, th.BasicUser.Id, s4) CheckForbiddenStatus(t, resp) SystemAdminClient.Logout() _, resp = SystemAdminClient.UpdateTeamMemberSchemeRoles(th.BasicTeam.Id, th.SystemAdminUser.Id, s4) CheckUnauthorizedStatus(t, resp) }
explode_data.jsonl/70733
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1398 }
[ 2830, 3393, 4289, 14597, 9366, 28906, 25116, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 741, 5816, 7210, 2959, 1669, 270, 16620, 7210, 2959, 198, 70479, 32499, 15944, 2822, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetEventsInRangeWithEventsInsertedLikeBackfill(t *testing.T) { t.Parallel() db := MustCreateDatabase(t) events, _ := SimpleRoom(t, testRoomID, testUserIDA, testUserIDB) // "federation" join userC := fmt.Sprintf("@radiance:%s", testOrigin) joinEvent := MustCreateEvent(t, testRoomID, []gomatrixserverlib.HeaderedEvent{events[len(events)-1]}, &gomatrixserverlib.EventBuilder{ Content: []byte(fmt.Sprintf(`{"membership":"join"}`)), Type: "m.room.member", StateKey: &userC, Sender: userC, Depth: int64(len(events) + 1), }) MustWriteEvents(t, db, []gomatrixserverlib.HeaderedEvent{joinEvent}) // Sync will return this for the prev_batch from := topologyTokenBefore(t, db, joinEvent.EventID()) // inject events in batches as if they were from backfill // e.g [1,2,3,4,5,6] => [4,5,6] , [1,2,3] chunkSize := 5 for i := len(events); i >= 0; i -= chunkSize { start := i - chunkSize if start < 0 { start = 0 } backfill := events[start:i] MustWriteEvents(t, db, backfill) } // head towards the beginning of time to := types.NewTopologyToken(0, 0) // starting at `from`, backpaginate to the beginning of time, asserting as we go. chunkSize = 3 events = reversed(events) for i := 0; i < len(events); i += chunkSize { paginatedEvents, err := db.GetEventsInTopologicalRange(ctx, from, &to, testRoomID, chunkSize, true) if err != nil { t.Fatalf("GetEventsInRange returned an error: %s", err) } gots := gomatrixserverlib.HeaderedToClientEvents(db.StreamEventsToEvents(&testUserDeviceA, paginatedEvents), gomatrixserverlib.FormatAll) endi := i + chunkSize if endi > len(events) { endi = len(events) } assertEventsEqual(t, from.String(), true, gots, events[i:endi]) from = topologyTokenBefore(t, db, paginatedEvents[len(paginatedEvents)-1].EventID()) } }
explode_data.jsonl/36041
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 695 }
[ 2830, 3393, 1949, 7900, 76059, 2354, 7900, 91269, 12949, 3707, 7559, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 20939, 1669, 15465, 4021, 5988, 1155, 340, 90873, 11, 716, 1669, 8993, 14003, 1155, 11, 1273, 14003, 915, 11, 127...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestGenerateHelmWithValues(t *testing.T) { service := newService("../..") res, err := service.GenerateManifest(context.Background(), &apiclient.ManifestRequest{ Repo: &argoappv1.Repository{}, AppName: "test", ApplicationSource: &argoappv1.ApplicationSource{ Path: "./util/helm/testdata/redis", Helm: &argoappv1.ApplicationSourceHelm{ ValueFiles: []string{"values-production.yaml"}, Values: `cluster: {slaveCount: 2}`, }, }, }) assert.NoError(t, err) replicasVerified := false for _, src := range res.Manifests { obj := unstructured.Unstructured{} err = json.Unmarshal([]byte(src), &obj) assert.NoError(t, err) if obj.GetKind() == "Deployment" && obj.GetName() == "test-redis-slave" { var dep v1.Deployment err := runtime.DefaultUnstructuredConverter.FromUnstructured(obj.Object, &dep) assert.NoError(t, err) assert.Equal(t, int32(2), *dep.Spec.Replicas) replicasVerified = true } } assert.True(t, replicasVerified) }
explode_data.jsonl/5674
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 402 }
[ 2830, 3393, 31115, 39, 23162, 2354, 6227, 1155, 353, 8840, 836, 8, 341, 52934, 1669, 501, 1860, 17409, 496, 5130, 10202, 11, 1848, 1669, 2473, 57582, 38495, 5378, 19047, 1507, 609, 391, 292, 1451, 72272, 1900, 515, 197, 197, 25243, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestIssue16029(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test;") tk.MustExec("drop table if exists t0,t1;") tk.MustExec("CREATE TABLE t0(c0 INT);") tk.MustExec("CREATE TABLE t1(c0 INT);") tk.MustExec("INSERT INTO t0 VALUES (NULL), (1);") tk.MustExec("INSERT INTO t1 VALUES (0);") tk.MustQuery("SELECT t0.c0 FROM t0 JOIN t1 ON (t0.c0 REGEXP 1) | t1.c0 WHERE BINARY STRCMP(t1.c0, t0.c0);").Check(testkit.Rows("1")) tk.MustExec("drop table t0;") tk.MustExec("drop table t1;") }
explode_data.jsonl/65528
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 251 }
[ 2830, 3393, 42006, 16, 21, 15, 17, 24, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 340, 3244, 74, 50...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBeginningOfMonth(t *testing.T) { assert := assert.New(t) testCases := [...]struct { Input time.Time Expected time.Time }{ {Input: time.Date(2019, 9, 9, 17, 59, 44, 0, time.UTC), Expected: time.Date(2019, 9, 1, 0, 0, 0, 0, time.UTC)}, {Input: time.Date(2019, 9, 1, 0, 0, 0, 0, time.UTC), Expected: time.Date(2019, 9, 1, 0, 0, 0, 0, time.UTC)}, {Input: time.Date(2019, 9, 30, 23, 59, 59, 0, time.UTC), Expected: time.Date(2019, 9, 1, 0, 0, 0, 0, time.UTC)}, } for _, tc := range testCases { assert.InTimeDelta( tc.Expected, BeginningOfMonth(tc.Input), time.Second, fmt.Sprintf("input: %v expected: %v", tc.Input, tc.Expected), ) } }
explode_data.jsonl/81201
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 305 }
[ 2830, 3393, 75290, 59646, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 18185, 37302, 1669, 48179, 1235, 341, 197, 66588, 262, 882, 16299, 198, 197, 197, 18896, 882, 16299, 198, 197, 59403, 197, 197, 90, 2505, 25, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGet_archive(t *testing.T) { dst := tempDir(t) u := filepath.Join("./test-fixtures", "archive.tar.gz") u, _ = filepath.Abs(u) if err := Get(dst, u); err != nil { t.Fatalf("err: %s", err) } mainPath := filepath.Join(dst, "main.tf") if _, err := os.Stat(mainPath); err != nil { t.Fatalf("err: %s", err) } }
explode_data.jsonl/813
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 149 }
[ 2830, 3393, 1949, 42873, 1155, 353, 8840, 836, 8, 341, 52051, 1669, 2730, 6184, 1155, 340, 10676, 1669, 26054, 22363, 13988, 1944, 70913, 18513, 497, 330, 16019, 28048, 20963, 1138, 10676, 11, 716, 284, 26054, 33255, 8154, 692, 743, 1848,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUpdateWithNoRepo(t *testing.T) { // Set up a fake repo srv, err := repotest.NewTempServerWithCleanup(t, "testdata/*.tgz*") if err != nil { t.Fatal(err) } defer srv.Stop() if err := srv.LinkIndices(); err != nil { t.Fatal(err) } dir := func(p ...string) string { return filepath.Join(append([]string{srv.Root()}, p...)...) } // Setup the dependent chart d := &chart.Chart{ Metadata: &chart.Metadata{ Name: "dep-chart", Version: "0.1.0", APIVersion: "v1", }, } // Save a chart with the dependency c := &chart.Chart{ Metadata: &chart.Metadata{ Name: "with-dependency", Version: "0.1.0", APIVersion: "v2", Dependencies: []*chart.Dependency{{ Name: d.Metadata.Name, Version: "0.1.0", }}, }, } if err := chartutil.SaveDir(c, dir()); err != nil { t.Fatal(err) } // Save dependent chart into the parents charts directory. If the chart is // not in the charts directory Helm will return an error that it is not // found. if err := chartutil.SaveDir(d, dir(c.Metadata.Name, "charts")); err != nil { t.Fatal(err) } // Set-up a manager b := bytes.NewBuffer(nil) g := getter.Providers{getter.Provider{ Schemes: []string{"http", "https"}, New: getter.NewHTTPGetter, }} m := &Manager{ ChartPath: dir(c.Metadata.Name), Out: b, Getters: g, RepositoryConfig: dir("repositories.yaml"), RepositoryCache: dir(), } // Test the update err = m.Update() if err != nil { t.Fatal(err) } }
explode_data.jsonl/24400
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 677 }
[ 2830, 3393, 4289, 2354, 2753, 25243, 1155, 353, 8840, 836, 8, 341, 197, 322, 2573, 705, 264, 12418, 15867, 198, 1903, 10553, 11, 1848, 1669, 2064, 354, 477, 7121, 12151, 5475, 2354, 67335, 1155, 11, 330, 92425, 23540, 41428, 89, 9, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEnvDeduplication(t *testing.T) { for desc, test := range map[string]struct { existing []string kv [][2]string expected []string }{ "single env": { kv: [][2]string{ {"a", "b"}, }, expected: []string{"a=b"}, }, "multiple envs": { kv: [][2]string{ {"a", "b"}, {"c", "d"}, {"e", "f"}, }, expected: []string{ "a=b", "c=d", "e=f", }, }, "env override": { kv: [][2]string{ {"k1", "v1"}, {"k2", "v2"}, {"k3", "v3"}, {"k3", "v4"}, {"k1", "v5"}, {"k4", "v6"}, }, expected: []string{ "k1=v5", "k2=v2", "k3=v4", "k4=v6", }, }, "existing env": { existing: []string{ "k1=v1", "k2=v2", "k3=v3", }, kv: [][2]string{ {"k3", "v4"}, {"k2", "v5"}, {"k4", "v6"}, }, expected: []string{ "k1=v1", "k2=v5", "k3=v4", "k4=v6", }, }, } { t.Logf("TestCase %q", desc) var spec runtimespec.Spec if len(test.existing) > 0 { spec.Process = &runtimespec.Process{ Env: test.existing, } } for _, kv := range test.kv { oci.WithEnv([]string{kv[0] + "=" + kv[1]})(context.Background(), nil, nil, &spec) } assert.Equal(t, test.expected, spec.Process.Env) } }
explode_data.jsonl/8828
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 737 }
[ 2830, 3393, 14359, 35, 291, 454, 1693, 1155, 353, 8840, 836, 8, 341, 2023, 6560, 11, 1273, 1669, 2088, 2415, 14032, 60, 1235, 341, 197, 8122, 11083, 3056, 917, 198, 197, 16463, 85, 981, 508, 1457, 17, 30953, 198, 197, 42400, 3056, 9...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestVStreamCopySimpleFlow(t *testing.T) { if testing.Short() { t.Skip() } execStatements(t, []string{ "create table t1(id11 int, id12 int, primary key(id11))", "create table t2(id21 int, id22 int, primary key(id21))", }) log.Infof("Pos before bulk insert: %s", primaryPosition(t)) insertLotsOfData(t, 10) log.Infof("Pos after bulk insert: %s", primaryPosition(t)) defer execStatements(t, []string{ "drop table t1", "drop table t2", }) engine.se.Reload(context.Background()) ctx := context.Background() qr, err := env.Mysqld.FetchSuperQuery(ctx, "SELECT count(*) as cnt from t1, t2 where t1.id11 = t2.id21") if err != nil { t.Fatal("Query failed") } require.Equal(t, "[[INT64(10)]]", fmt.Sprintf("%v", qr.Rows)) filter := &binlogdatapb.Filter{ Rules: []*binlogdatapb.Rule{{ Match: "t1", Filter: "select * from t1", }, { Match: "t2", Filter: "select * from t2", }}, } var tablePKs []*binlogdatapb.TableLastPK tablePKs = append(tablePKs, getTablePK("t1", 1)) tablePKs = append(tablePKs, getTablePK("t2", 2)) t1FieldEvent := []string{"begin", "type:FIELD field_event:{table_name:\"t1\" fields:{name:\"id11\" type:INT32 table:\"t1\" org_table:\"t1\" database:\"vttest\" org_name:\"id11\" column_length:11 charset:63} fields:{name:\"id12\" type:INT32 table:\"t1\" org_table:\"t1\" database:\"vttest\" org_name:\"id12\" column_length:11 charset:63}}"} t2FieldEvent := []string{"begin", "type:FIELD field_event:{table_name:\"t2\" fields:{name:\"id21\" type:INT32 table:\"t2\" org_table:\"t2\" database:\"vttest\" org_name:\"id21\" column_length:11 charset:63} fields:{name:\"id22\" type:INT32 table:\"t2\" org_table:\"t2\" database:\"vttest\" org_name:\"id22\" column_length:11 charset:63}}"} t1Events := []string{} t2Events := []string{} for i := 1; i <= 10; i++ { t1Events = append(t1Events, fmt.Sprintf("type:ROW row_event:{table_name:\"t1\" row_changes:{after:{lengths:%d lengths:%d values:\"%d%d\"}}}", len(strconv.Itoa(i)), len(strconv.Itoa(i*10)), i, i*10)) t2Events = append(t2Events, fmt.Sprintf("type:ROW row_event:{table_name:\"t2\" row_changes:{after:{lengths:%d lengths:%d values:\"%d%d\"}}}", len(strconv.Itoa(i)), len(strconv.Itoa(i*20)), i, i*20)) } t1Events = append(t1Events, "lastpk", "commit") t2Events = append(t2Events, "lastpk", "commit") insertEvents1 := []string{ "begin", "type:FIELD field_event:{table_name:\"t1\" fields:{name:\"id11\" type:INT32 table:\"t1\" org_table:\"t1\" database:\"vttest\" org_name:\"id11\" column_length:11 charset:63 column_type:\"int(11)\"} fields:{name:\"id12\" type:INT32 table:\"t1\" org_table:\"t1\" database:\"vttest\" org_name:\"id12\" column_length:11 charset:63 column_type:\"int(11)\"}}", "type:ROW row_event:{table_name:\"t1\" row_changes:{after:{lengths:3 lengths:4 values:\"1011010\"}}}", "gtid", "commit"} insertEvents2 := []string{ "begin", "type:FIELD field_event:{table_name:\"t2\" fields:{name:\"id21\" type:INT32 table:\"t2\" org_table:\"t2\" database:\"vttest\" org_name:\"id21\" column_length:11 charset:63 column_type:\"int(11)\"} fields:{name:\"id22\" type:INT32 table:\"t2\" org_table:\"t2\" database:\"vttest\" org_name:\"id22\" column_length:11 charset:63 column_type:\"int(11)\"}}", "type:ROW row_event:{table_name:\"t2\" row_changes:{after:{lengths:3 lengths:4 values:\"2022020\"}}}", "gtid", "commit"} testcases := []testcase{ { input: []string{}, output: [][]string{t1FieldEvent, {"gtid"}, t1Events, {"begin", "lastpk", "commit"}, t2FieldEvent, t2Events, {"begin", "lastpk", "commit"}}, }, { input: []string{ "insert into t1 values (101, 1010)", }, output: [][]string{insertEvents1}, }, { input: []string{ "insert into t2 values (202, 2020)", }, output: [][]string{insertEvents2}, }, } runCases(t, filter, testcases, "vscopy", tablePKs) log.Infof("Pos at end of test: %s", primaryPosition(t)) }
explode_data.jsonl/10403
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1636 }
[ 2830, 3393, 53, 3027, 12106, 16374, 18878, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 741, 197, 532, 67328, 93122, 1155, 11, 3056, 917, 515, 197, 197, 1, 3182, 1965, 259, 16, 3724, 16, 16, 526, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetMethod(t *testing.T) { var test = &KOS fun1 := KDbug.GetMethod(test, "GoMemory") fun2 := KDbug.GetMethod(test, "Hello") if fun1 == nil || fun2 != nil { t.Error("GetMethod fail") return } }
explode_data.jsonl/74245
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 86 }
[ 2830, 3393, 85073, 1155, 353, 8840, 836, 8, 341, 2405, 1273, 284, 609, 42, 3126, 271, 90126, 16, 1669, 62990, 2313, 2234, 3523, 8623, 11, 330, 10850, 10642, 1138, 90126, 17, 1669, 62990, 2313, 2234, 3523, 8623, 11, 330, 9707, 5130, 74...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetUserInformation(t *testing.T) { expected := NewMockUserRecord("SYSOP") userRecs = []bbs.UserRecord{ expected, } req, err := http.NewRequest("GET", "/v1/users/SYSOP/information", nil) if err != nil { t.Fatal(err) } token := newAccessTokenWithUsername(expected.UserID()) t.Logf("testing token: %v", token) req.Header.Add("Authorization", "bearer "+token) rr := httptest.NewRecorder() r := http.NewServeMux() r.HandleFunc("/v1/users/", routeUsers) r.ServeHTTP(rr, req) if status := rr.Code; status != http.StatusOK { t.Errorf("handler returned wrong status code: got %v want %v", status, http.StatusOK) } responsedMap := map[string]interface{}{} json.Unmarshal(rr.Body.Bytes(), &responsedMap) t.Logf("got response %v", rr.Body.String()) responsedData := responsedMap["data"].(map[string]interface{}) if responsedData["user_id"] != expected.UserID() { t.Errorf("handler returned unexpected body, user_id not match: got %v want userID %v", rr.Body.String(), expected.UserID()) } }
explode_data.jsonl/30219
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 390 }
[ 2830, 3393, 1949, 1474, 14873, 1155, 353, 8840, 836, 8, 1476, 42400, 1669, 1532, 11571, 1474, 6471, 445, 37931, 3067, 5130, 19060, 3820, 82, 284, 3056, 65, 1279, 7344, 6471, 515, 197, 42400, 345, 197, 630, 24395, 11, 1848, 1669, 1758, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestLongLogicalTypeFallback(t *testing.T) { schema := `{"type": "long", "logicalType": "this_logical_type_does_not_exist"}` testSchemaValid(t, schema) testBinaryCodecPass(t, schema, 12345, []byte("\xf2\xc0\x01")) }
explode_data.jsonl/12002
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 90 }
[ 2830, 3393, 6583, 64312, 929, 87206, 1155, 353, 8840, 836, 8, 341, 1903, 3416, 1669, 1565, 4913, 1313, 788, 330, 4825, 497, 330, 30256, 929, 788, 330, 574, 86484, 1819, 96374, 7913, 35906, 9207, 3989, 18185, 8632, 4088, 1155, 11, 10802,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRunIntegrationArtifactGetMplStatus(t *testing.T) { t.Parallel() t.Run("Successfully Test of Get Integration Flow MPL Status", func(t *testing.T) { apiServiceKey := `{ "oauth": { "url": "https://demo", "clientid": "demouser", "clientsecret": "******", "tokenurl": "https://demo/oauth/token" } }` config := integrationArtifactGetMplStatusOptions{ APIServiceKey: apiServiceKey, IntegrationFlowID: "flow1", } httpClient := httpMockCpis{CPIFunction: "IntegrationArtifactGetMplStatus", ResponseBody: ``, TestType: "Positive"} seOut := integrationArtifactGetMplStatusCommonPipelineEnvironment{} err := runIntegrationArtifactGetMplStatus(&config, nil, &httpClient, &seOut) if assert.NoError(t, err) { assert.EqualValues(t, seOut.custom.iFlowMplStatus, "COMPLETED") t.Run("check url", func(t *testing.T) { assert.Equal(t, "https://demo/api/v1/MessageProcessingLogs?$filter=IntegrationArtifact/Id+eq+'flow1'+and+Status+ne+'DISCARDED'&$orderby=LogEnd+desc&$top=1", httpClient.URL) }) t.Run("check method", func(t *testing.T) { assert.Equal(t, "GET", httpClient.Method) }) } }) t.Run("Failed Test of Get Integration Flow MPL Status", func(t *testing.T) { apiServiceKey := `{ "oauth": { "url": "https://demo", "clientid": "demouser", "clientsecret": "******", "tokenurl": "https://demo/oauth/token" } }` config := integrationArtifactGetMplStatusOptions{ APIServiceKey: apiServiceKey, IntegrationFlowID: "flow1", } httpClient := httpMockCpis{CPIFunction: "IntegrationArtifactGetMplStatus", ResponseBody: ``, TestType: "Negative"} seOut := integrationArtifactGetMplStatusCommonPipelineEnvironment{} err := runIntegrationArtifactGetMplStatus(&config, nil, &httpClient, &seOut) assert.EqualValues(t, seOut.custom.iFlowMplStatus, "") assert.EqualError(t, err, "HTTP GET request to https://demo/api/v1/MessageProcessingLogs?$filter=IntegrationArtifact/"+ "Id+eq+'flow1'+and+Status+ne+'DISCARDED'&$orderby=LogEnd+desc&$top=1 failed with error: "+ "Unable to get integration flow MPL status, Response Status code:400") }) }
explode_data.jsonl/49454
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 860 }
[ 2830, 3393, 6727, 52464, 85578, 1949, 44, 500, 2522, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 3244, 16708, 445, 35959, 3393, 315, 2126, 40069, 22452, 47356, 8104, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 54299, 1860, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIntegration_ReadCRC(t *testing.T) { // Test that the checksum is handled correctly when reading files. // For gzipped files, see https://github.com/GoogleCloudPlatform/google-cloud-dotnet/issues/1641. if testing.Short() { t.Skip("Integration tests skipped in short mode") } const ( // This is an uncompressed file. // See https://cloud.google.com/storage/docs/public-datasets/landsat uncompressedBucket = "gcp-public-data-landsat" uncompressedObject = "LC08/PRE/044/034/LC80440342016259LGN00/LC80440342016259LGN00_MTL.txt" gzippedBucket = "storage-library-test-bucket" gzippedObject = "gzipped-text.txt" gzippedContents = "hello world" // uncompressed contents of the file ) ctx := context.Background() client, err := NewClient(ctx, option.WithoutAuthentication()) if err != nil { t.Fatal(err) } defer client.Close() for _, test := range []struct { desc string obj *ObjectHandle offset, length int64 readCompressed bool // don't decompress a gzipped file wantErr bool wantCheck bool // Should Reader try to check the CRC? wantChecked bool // Did Reader actually check the CRC? }{ { desc: "uncompressed, entire file", obj: client.Bucket(uncompressedBucket).Object(uncompressedObject), offset: 0, length: -1, readCompressed: false, wantCheck: true, wantChecked: true, }, { desc: "uncompressed, entire file, don't decompress", obj: client.Bucket(uncompressedBucket).Object(uncompressedObject), offset: 0, length: -1, readCompressed: true, wantCheck: true, wantChecked: true, }, { desc: "uncompressed, suffix", obj: client.Bucket(uncompressedBucket).Object(uncompressedObject), offset: 1, length: -1, readCompressed: false, wantCheck: false, wantChecked: false, }, { desc: "uncompressed, prefix", obj: client.Bucket(uncompressedBucket).Object(uncompressedObject), offset: 0, length: 18, readCompressed: false, wantCheck: false, wantChecked: false, }, { // When a gzipped file is unzipped by GCS, we can't verify the checksum // because it was computed against the zipped contents. There is no // header that indicates that a gzipped file is being served unzipped. // But our CRC check only happens if there is a Content-Length header, // and that header is absent for this read. desc: "compressed, entire file, server unzips", obj: client.Bucket(gzippedBucket).Object(gzippedObject), offset: 0, length: -1, readCompressed: false, wantCheck: true, wantChecked: false, }, { // When we read a gzipped file uncompressed, it's like reading a regular file: // the served content and the CRC match. desc: "compressed, entire file, read compressed", obj: client.Bucket(gzippedBucket).Object(gzippedObject), offset: 0, length: -1, readCompressed: true, wantCheck: true, wantChecked: true, }, { desc: "compressed, partial, server unzips", obj: client.Bucket(gzippedBucket).Object(gzippedObject), offset: 1, length: 8, readCompressed: false, wantErr: true, // GCS can't serve part of a gzipped object wantCheck: false, wantChecked: false, }, { desc: "compressed, partial, read compressed", obj: client.Bucket(gzippedBucket).Object(gzippedObject), offset: 1, length: 8, readCompressed: true, wantCheck: false, wantChecked: false, }, } { obj := test.obj.ReadCompressed(test.readCompressed) r, err := obj.NewRangeReader(ctx, test.offset, test.length) if err != nil { if test.wantErr { continue } t.Fatalf("%s: %v", test.desc, err) } if got, want := r.checkCRC, test.wantCheck; got != want { t.Errorf("%s, checkCRC: got %t, want %t", test.desc, got, want) } _, err = ioutil.ReadAll(r) _ = r.Close() if err != nil { t.Fatalf("%s: %v", test.desc, err) } if got, want := r.checkedCRC, test.wantChecked; got != want { t.Errorf("%s, checkedCRC: got %t, want %t", test.desc, got, want) } } }
explode_data.jsonl/8915
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1965 }
[ 2830, 3393, 52464, 38381, 83339, 1155, 353, 8840, 836, 8, 341, 197, 322, 3393, 429, 279, 32529, 374, 17608, 12440, 979, 5290, 3542, 624, 197, 322, 1752, 76963, 6450, 3542, 11, 1490, 3703, 1110, 5204, 905, 14, 14444, 16055, 17296, 40689,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestAddress_Parse(t *testing.T) { for i, test := range addrTests { addr := &Address{} if err := addr.Parse(test.fields); err != nil { t.Error("Error parsing address:", err) } else if !reflect.DeepEqual(addr, test.addr) { t.Errorf("Invalid address for #%v: got %v but expected %v", i, addr, test.addr) } } }
explode_data.jsonl/43049
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 130 }
[ 2830, 3393, 4286, 77337, 1155, 353, 8840, 836, 8, 341, 2023, 600, 11, 1273, 1669, 2088, 10789, 18200, 341, 197, 53183, 1669, 609, 4286, 31483, 197, 743, 1848, 1669, 10789, 8937, 8623, 12920, 1215, 1848, 961, 2092, 341, 298, 3244, 6141, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRPC_QueryTX(t *testing.T) { codec.UpgradeHeight = 7000 var tx *types.TxResponse _, _, cleanup := NewInMemoryTendermintNode(t, oneValTwoNodeGenesisState()) _, _, evtChan := subscribeTo(t, tmTypes.EventNewBlock) <-evtChan // Wait for block _, stopCli, evtChan := subscribeTo(t, tmTypes.EventTx) kb := getInMemoryKeybase() cb, err := kb.GetCoinbase() assert.Nil(t, err) tx, err = nodes.Send(memCodec(), memCLI, kb, cb.GetAddress(), cb.GetAddress(), "test", types.NewInt(100), true) assert.Nil(t, err) <-evtChan // Wait for tx var params = HashAndProveParams{ Hash: tx.TxHash, } q := newQueryRequest("tx", newBody(params)) rec := httptest.NewRecorder() Tx(rec, q, httprouter.Params{}) resp := getJSONResponse(rec) assert.NotNil(t, resp) assert.NotEmpty(t, resp) var resTX core_types.ResultTx err = json.Unmarshal([]byte(resp), &resTX) assert.Nil(t, err) assert.NotEmpty(t, resTX.Height) memCLI, _, evtChan = subscribeTo(t, tmTypes.EventNewBlock) <-evtChan // Wait for block q = newQueryRequest("tx", newBody(params)) rec = httptest.NewRecorder() Tx(rec, q, httprouter.Params{}) resp = getJSONResponse(rec) assert.NotNil(t, resp) assert.NotEmpty(t, resp) var resTX2 core_types.ResultTx err = json.Unmarshal([]byte(resp), &resTX2) assert.Nil(t, err) assert.NotEmpty(t, resTX2.Height) cleanup() stopCli() }
explode_data.jsonl/44707
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 557 }
[ 2830, 3393, 29528, 48042, 22867, 1155, 353, 8840, 836, 8, 341, 43343, 66, 13, 43861, 3640, 284, 220, 22, 15, 15, 15, 198, 2405, 9854, 353, 9242, 81362, 2582, 198, 197, 6878, 8358, 21290, 1669, 1532, 641, 10642, 51, 1659, 67791, 1955, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrivateKeyFromHex(t *testing.T) { //str1 := "3714c34e68f8481d" //str2 := "9e3647445d5ca65e" //str3 := "9d150ddb24d2182a" //str4 := "6ac12143f1293835" //pri := core.PrivateKeyFromBytes(hexToBytes(str1 + str2 + str3 + str4)) //if pri == nil { // t.Error("pri is nil") //} //pub := pri.PubKey() //pub.Compressed = true //address, err := AddressFromPublicKey(pub.ToBytes()) //if err != nil { // t.Error(err.Error()) //} }
explode_data.jsonl/60823
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 199 }
[ 2830, 3393, 75981, 3830, 20335, 1155, 353, 8840, 836, 8, 1476, 197, 322, 495, 16, 1669, 330, 18, 22, 16, 19, 66, 18, 19, 68, 21, 23, 69, 23, 19, 23, 16, 67, 698, 197, 322, 495, 17, 1669, 330, 24, 68, 18, 21, 19, 22, 19, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContainsTargetScriptHash(t *testing.T) { //https://neotracker.io/tx/8f691ec7b9e9979964de9ce3f994588f31d6b6fea2588081d20010c14f32138d p := smartcontract.NewParserWithScript("0600027264cd0414aaef53a5153128fcb268b0337e8e7eae5724c78f146063795d3b9b3cd55aef026eae992b91063db0db53c1087472616e7366657267cf9472821400ceb06ca780c2a937fec5bbec51b9f166f73405540036bc1d") dbcScriptHash := "b951ecbbc5fe37a9c280a76cb0ce0014827294cf" contains := p.ContainsScriptHash(dbcScriptHash) log.Printf("%v", contains) }
explode_data.jsonl/28991
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 245 }
[ 2830, 3393, 23805, 6397, 5910, 6370, 1155, 353, 8840, 836, 8, 341, 197, 322, 2428, 1110, 811, 86379, 9683, 4245, 14, 3998, 14, 23, 69, 21, 24, 16, 757, 22, 65, 24, 68, 24, 24, 22, 24, 24, 21, 19, 450, 24, 346, 18, 69, 24, 24...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateDisableValidation(t *testing.T) { Given(t). Path("baddir"). When(). CreateApp("--validate=false"). Then(). And(func(app *Application) { _, err := RunCli("app", "create", app.Name, "--upsert", "--validate=false", "--repo", RepoURL(RepoURLTypeFile), "--path", "baddir2", "--project", app.Spec.Project, "--dest-server", KubernetesInternalAPIServerAddr, "--dest-namespace", DeploymentNamespace()) assert.NoError(t, err) }). When(). AppSet("--path", "baddir3", "--validate=false") }
explode_data.jsonl/35663
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 213 }
[ 2830, 3393, 4021, 25479, 13799, 1155, 353, 8840, 836, 8, 341, 9600, 2071, 1155, 4292, 197, 69640, 445, 65, 44525, 38609, 197, 197, 4498, 25829, 197, 75569, 2164, 21549, 7067, 12219, 38609, 197, 197, 12209, 25829, 197, 197, 3036, 18552, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCheckPatch(t *testing.T) { mocks, checkService := newCheckSvcStack() ch := mocks.pipingCoordinator.taskUpdatedChan() deadman := &check.Deadman{} deadman.SetTaskID(4) mocks.checkSvc.PatchCheckFn = func(context.Context, platform.ID, influxdb.CheckUpdate) (influxdb.Check, error) { return deadman, nil } check, err := checkService.PatchCheck(context.Background(), 1, influxdb.CheckUpdate{}) if err != nil { t.Fatal(err) } select { case task := <-ch: if task.ID != check.GetTaskID() { t.Fatalf("task sent to coordinator doesn't match expected") } default: t.Fatal("didn't receive task") } }
explode_data.jsonl/72196
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 233 }
[ 2830, 3393, 3973, 43622, 1155, 353, 8840, 836, 8, 341, 2109, 25183, 11, 1779, 1860, 1669, 501, 3973, 92766, 4336, 741, 23049, 1669, 68909, 556, 46095, 64304, 15034, 16196, 46019, 2822, 197, 33754, 1515, 1669, 609, 2028, 909, 3149, 1515, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSeparateV4V6WithWildcardCIDRPrefix(t *testing.T) { cfg := constructTestConfig() iptConfigurator := NewIptablesConfigurator(cfg, &dep.StdoutStubDependencies{}) v4Range, v6Range, _ := iptConfigurator.separateV4V6("*") if !v4Range.IsWildcard || !v6Range.IsWildcard { t.Errorf("Expected v4Range and v6Range to be wildcards") } }
explode_data.jsonl/65390
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 137 }
[ 2830, 3393, 14492, 349, 53, 19, 53, 21, 2354, 92988, 54146, 49, 14335, 1155, 353, 8840, 836, 8, 341, 50286, 1669, 9245, 2271, 2648, 2822, 8230, 417, 2648, 57383, 1669, 1532, 40, 417, 4788, 2648, 57383, 28272, 11, 609, 14891, 83225, 33...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNewNoDate(t *testing.T) { testTime := time.Date(2000, 12, 15, 17, 8, 0, 0, time.Local) timeNow = func() time.Time { return testTime } lib, err := New() if assert.NoError(t, err) { assert.True(t, testTime.Local().Equal(lib.ToTime())) assert.True(t, lib.ToTime().Location() == time.Local) } // Reset timeNow. timeNow = time.Now }
explode_data.jsonl/73965
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 145 }
[ 2830, 3393, 3564, 2753, 1916, 1155, 353, 8840, 836, 8, 341, 18185, 1462, 1669, 882, 8518, 7, 17, 15, 15, 15, 11, 220, 16, 17, 11, 220, 16, 20, 11, 220, 16, 22, 11, 220, 23, 11, 220, 15, 11, 220, 15, 11, 882, 20856, 340, 2195...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSelf(t *testing.T) { t.Parallel() cs := &cryptoService{} pkiID1 := common.PKIidType("1") jcm := &joinChanMsg{ members2AnchorPeers: map[string][]api.AnchorPeer{ string(orgInChannelA): {}, }, } adapter := new(gossipAdapterMock) configureAdapter(adapter) adapter.On("Gossip", mock.Anything) gc := NewGossipChannel(pkiID1, orgInChannelA, cs, channelA, adapter, jcm, disabledMetrics) gc.UpdateLedgerHeight(1) gMsg := gc.Self().GossipMessage env := gc.Self().Envelope sMsg, _ := env.ToGossipMessage() assert.True(t, gproto.Equal(gMsg, sMsg.GossipMessage)) assert.Equal(t, gMsg.GetStateInfo().Properties.LedgerHeight, uint64(1)) assert.Equal(t, gMsg.GetStateInfo().PkiId, []byte("1")) }
explode_data.jsonl/66311
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 297 }
[ 2830, 3393, 12092, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 71899, 1669, 609, 35772, 1860, 16094, 3223, 6642, 915, 16, 1669, 4185, 1069, 80971, 307, 929, 445, 16, 1138, 12428, 6226, 1669, 609, 5987, 46019, 6611, 515, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseFile(t *testing.T) { _, err := ParseFile("", nil, nil, 0) if err == nil { t.Error("missing io error") } var first error _, err = ParseFile("", func(err error) { if first == nil { first = err } }, nil, 0) if err == nil || first == nil { t.Error("missing io error") } if err != first { t.Errorf("got %v; want first error %v", err, first) } }
explode_data.jsonl/24651
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 158 }
[ 2830, 3393, 14463, 1703, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 14775, 1703, 19814, 2092, 11, 2092, 11, 220, 15, 340, 743, 1848, 621, 2092, 341, 197, 3244, 6141, 445, 30616, 6399, 1465, 1138, 197, 630, 2405, 1156, 1465, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEditorMoveWord(t *testing.T) { type Test struct { Text string Start int Skip int Want int } tests := []Test{ {"", 0, 0, 0}, {"", 0, -1, 0}, {"", 0, 1, 0}, {"hello", 0, -1, 0}, {"hello", 0, 1, 5}, {"hello world", 3, 1, 5}, {"hello world", 3, -1, 0}, {"hello world", 8, -1, 6}, {"hello world", 8, 1, 11}, {"hello world", 3, 1, 5}, {"hello world", 3, 2, 14}, {"hello world", 8, 1, 14}, {"hello world", 8, -1, 0}, {"hello brave new world", 0, 3, 15}, } setup := func(t string) *Editor { e := new(Editor) gtx := layout.Context{ Ops: new(op.Ops), Constraints: layout.Exact(image.Pt(100, 100)), } cache := text.NewCache(gofont.Collection()) fontSize := unit.Px(10) font := text.Font{} e.SetText(t) e.Layout(gtx, cache, font, fontSize, nil) return e } for ii, tt := range tests { e := setup(tt.Text) e.MoveCaret(tt.Start, tt.Start) e.moveWord(tt.Skip, selectionClear) if e.caret.start.ofs != tt.Want { t.Fatalf("[%d] moveWord: bad caret position: got %d, want %d", ii, e.caret.start.ofs, tt.Want) } } }
explode_data.jsonl/27264
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 530 }
[ 2830, 3393, 9410, 9860, 10879, 1155, 353, 8840, 836, 8, 341, 13158, 3393, 2036, 341, 197, 49635, 220, 914, 198, 197, 65999, 526, 198, 197, 7568, 13389, 220, 526, 198, 197, 17300, 517, 220, 526, 198, 197, 532, 78216, 1669, 3056, 2271, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAutoscalingGroup_spot(t *testing.T) { t.Parallel() if testing.Short() { t.Skip("skipping test in short mode") } tf := ` resource "aws_launch_template" "lt_spot" { image_id = "fake_ami" instance_type = "t3.medium" instance_market_options { market_type = "spot" } } resource "aws_autoscaling_group" "asg_lt_spot" { launch_template { id = aws_launch_template.lt_spot.id } desired_capacity = 2 max_size = 3 min_size = 1 } resource "aws_launch_template" "lt_mixed_instance_spot" { image_id = "fake_ami" instance_type = "t3.medium" } resource "aws_autoscaling_group" "asg_mixed_instance_spot" { desired_capacity = 6 max_size = 10 min_size = 1 mixed_instances_policy { launch_template { launch_template_specification { launch_template_id = aws_launch_template.lt_mixed_instance_spot.id } } instances_distribution { on_demand_base_capacity = 2 on_demand_percentage_above_base_capacity = 50 } } } ` resourceChecks := []testutil.ResourceCheck{ { Name: "aws_autoscaling_group.asg_lt_spot", SubResourceChecks: []testutil.ResourceCheck{ { Name: "aws_launch_template.lt_spot", CostComponentChecks: []testutil.CostComponentCheck{ { Name: "Instance usage (Linux/UNIX, spot, t3.medium)", PriceHash: "c8faba8210cd512ccab6b71ca400f4de-803d7f1cd2f621429b63f791730e7935", HourlyCostCheck: testutil.HourlyPriceMultiplierCheck(decimal.NewFromInt(2)), }, { Name: "CPU credits", PriceHash: "ccdf11d8e4c0267d78a19b6663a566c1-e8e892be2fbd1c8f42fd6761ad8977d8", MonthlyCostCheck: testutil.MonthlyPriceMultiplierCheck(decimal.Zero), }, }, }, }, }, { Name: "aws_autoscaling_group.asg_mixed_instance_spot", SubResourceChecks: []testutil.ResourceCheck{ { Name: "aws_launch_template.lt_mixed_instance_spot", CostComponentChecks: []testutil.CostComponentCheck{ { Name: "Instance usage (Linux/UNIX, on-demand, t3.medium)", PriceHash: "c8faba8210cd512ccab6b71ca400f4de-d2c98780d7b6e36641b521f1f8145c6f", HourlyCostCheck: testutil.HourlyPriceMultiplierCheck(decimal.NewFromInt(4)), }, { Name: "Instance usage (Linux/UNIX, spot, t3.medium)", PriceHash: "c8faba8210cd512ccab6b71ca400f4de-803d7f1cd2f621429b63f791730e7935", HourlyCostCheck: testutil.HourlyPriceMultiplierCheck(decimal.NewFromInt(2)), }, { Name: "CPU credits", PriceHash: "ccdf11d8e4c0267d78a19b6663a566c1-e8e892be2fbd1c8f42fd6761ad8977d8", MonthlyCostCheck: testutil.MonthlyPriceMultiplierCheck(decimal.Zero), }, }, }, }, }, } tftest.ResourceTests(t, tf, schema.NewEmptyUsageMap(), resourceChecks) }
explode_data.jsonl/78710
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1473 }
[ 2830, 3393, 19602, 436, 81552, 2808, 84766, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 445, 4886, 5654, 1273, 304, 2805, 3856, 1138, 197, 630, 3244, 69, 1669, 22074, 50346, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCollection_Upsert(t *testing.T) { mgoClient := Ins() result, err := mgoClient.C("test").Upsert(bson.M{"name": "test"}, bson.M{"name": "test", "age": 18}) if err != nil { t.Errorf("Upsert error: %s", err) t.FailNow() } t.Log("Upsert ok: ", result) }
explode_data.jsonl/30109
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 118 }
[ 2830, 3393, 6482, 6665, 1690, 529, 1155, 353, 8840, 836, 8, 341, 2109, 3346, 2959, 1669, 9726, 2822, 9559, 11, 1848, 1669, 296, 3346, 2959, 727, 445, 1944, 1827, 98778, 529, 1883, 930, 1321, 4913, 606, 788, 330, 1944, 14345, 50980, 13...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDistinctByStruct(t *testing.T) { type a struct { Id int32 } as := []*a{ {Id: 1}, {Id: 1}, {Id: 3}, } res := DistinctBy(as, func(v *a) int32 { return v.Id }).([]*a) t.Log(res[0].Id) }
explode_data.jsonl/66309
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 108 }
[ 2830, 3393, 72767, 1359, 9422, 1155, 353, 8840, 836, 8, 341, 13158, 264, 2036, 314, 5223, 526, 18, 17, 456, 60451, 1669, 29838, 64, 515, 197, 197, 90, 764, 25, 220, 16, 1583, 197, 197, 90, 764, 25, 220, 16, 1583, 197, 197, 90, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestListAvailableProjects(t *testing.T) { th.SetupHTTP() defer th.TeardownHTTP() HandleListAvailableProjectsSuccessfully(t) count := 0 err := projects.ListAvailable(client.ServiceClient()).EachPage(func(page pagination.Page) (bool, error) { count++ actual, err := projects.ExtractProjects(page) th.AssertNoErr(t, err) th.CheckDeepEquals(t, ExpectedAvailableProjectsSlice, actual) return true, nil }) th.AssertNoErr(t, err) th.CheckEquals(t, count, 1) }
explode_data.jsonl/49966
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 179 }
[ 2830, 3393, 852, 16485, 29958, 1155, 353, 8840, 836, 8, 341, 70479, 39820, 9230, 741, 16867, 270, 94849, 37496, 9230, 741, 197, 6999, 852, 16485, 29958, 35959, 1155, 692, 18032, 1669, 220, 15, 198, 9859, 1669, 7079, 5814, 16485, 12805, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWFLevelAutomountServiceAccountToken(t *testing.T) { woc := newWoc() ctx := context.Background() _, err := util.CreateServiceAccountWithToken(ctx, woc.controller.kubeclientset, "", "foo", "foo-token") assert.NoError(t, err) falseValue := false woc.execWf.Spec.AutomountServiceAccountToken = &falseValue woc.execWf.Spec.Executor = &wfv1.ExecutorConfig{ServiceAccountName: "foo"} tmplCtx, err := woc.createTemplateContext(wfv1.ResourceScopeLocal, "") assert.NoError(t, err) _, err = woc.executeContainer(ctx, woc.execWf.Spec.Entrypoint, tmplCtx.GetTemplateScope(), &woc.execWf.Spec.Templates[0], &wfv1.WorkflowStep{}, &executeTemplateOpts{}) assert.NoError(t, err) pods, err := listPods(woc) assert.NoError(t, err) assert.Len(t, pods.Items, 1) pod := pods.Items[0] assert.Equal(t, *pod.Spec.AutomountServiceAccountToken, false) }
explode_data.jsonl/75366
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 330 }
[ 2830, 3393, 32131, 4449, 41072, 629, 1860, 7365, 3323, 1155, 353, 8840, 836, 8, 341, 6692, 509, 1669, 501, 54, 509, 741, 20985, 1669, 2266, 19047, 741, 197, 6878, 1848, 1669, 4094, 7251, 1860, 7365, 2354, 3323, 7502, 11, 289, 509, 145...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestToInt8E(t *testing.T) { type args struct { s string } tests := []struct { name string args args want int8 wantErr bool }{ { name: "", args: args{ s: "21", }, want: 21, wantErr: false, }, { name: "", args: args{ s: "21a", }, want: 0, wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := ToInt8E(tt.args.s) if (err != nil) != tt.wantErr { t.Errorf("ToInt8E() error = %v, wantErr %v", err, tt.wantErr) return } if got != tt.want { t.Errorf("ToInt8E() got = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/5472
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 365 }
[ 2830, 3393, 38544, 23, 36, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 1903, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 31215, 262, 2827, 198, 197, 50780, 262, 526, 23, 198, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGenModel_Issue251(t *testing.T) { specDoc, err := loads.Spec("../fixtures/bugs/251/swagger.yml") require.NoError(t, err) definitions := specDoc.Spec().Definitions k := "example" opts := opts() genModel, err := makeGenDefinition(k, "models", definitions[k], specDoc, opts) require.NoError(t, err) buf := bytes.NewBuffer(nil) require.NoError(t, opts.templates.MustGet("model").Execute(buf, genModel)) ct, err := opts.LanguageOpts.FormatContent("example.go", buf.Bytes()) require.NoError(t, err) res := string(ct) assertInCode(t, "type "+swag.ToGoName(k)+" struct", res) assertInCode(t, "Begin *strfmt.DateTime `json:\"begin\"`", res) assertInCode(t, "End strfmt.DateTime `json:\"end,omitempty\"`", res) assertInCode(t, "Name string `json:\"name,omitempty\"`", res) assertInCode(t, "(m *"+swag.ToGoName(k)+") validateBegin", res) assertInCode(t, "(m *"+swag.ToGoName(k)+") Validate", res) }
explode_data.jsonl/2541
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 362 }
[ 2830, 3393, 9967, 1712, 7959, 83890, 17, 20, 16, 1155, 353, 8840, 836, 8, 341, 98100, 9550, 11, 1848, 1669, 20907, 36473, 17409, 45247, 14, 56176, 14, 17, 20, 16, 80930, 33936, 1138, 17957, 35699, 1155, 11, 1848, 692, 7452, 4054, 82, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFirstTrans(t *testing.T) { trans := newFirstTrans() Convey("TestFirstTrans", t, func() { Convey("trans exec succ", func() { transInfo := &transdsl.TransInfo{ AppInfo: &context.StubInfo{ X: "first", Y: 1, }, } err := trans.Start(transInfo) So(err, ShouldEqual, nil) So(transInfo.AppInfo.(*context.StubInfo).Y, ShouldEqual, 8) }) Convey("trans exec failed", func() { transInfo := &transdsl.TransInfo{ AppInfo: &context.StubInfo{ X: "test", Y: 1, }, } err := trans.Start(transInfo) So(err, ShouldNotEqual, nil) So(transInfo.AppInfo.(*context.StubInfo).Y, ShouldEqual, 0) }) }) }
explode_data.jsonl/19235
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 306 }
[ 2830, 3393, 5338, 3167, 1155, 353, 8840, 836, 8, 341, 72453, 1669, 501, 5338, 3167, 741, 93070, 5617, 445, 2271, 5338, 3167, 497, 259, 11, 2915, 368, 1476, 197, 93070, 5617, 445, 1458, 3883, 20038, 497, 2915, 368, 341, 298, 72453, 173...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewFromRepoNoObjectPool(t *testing.T) { pool, testRepo := setupObjectPool(t) testRepoPath := filepath.Join(pool.cfg.Storages[0].Path, testRepo.RelativePath) // no alternates file poolFromRepo, err := FromRepo(pool.cfg, pool.locator, pool.gitCmdFactory, nil, testRepo) require.Equal(t, ErrAlternateObjectDirNotExist, err) require.Nil(t, poolFromRepo) // with an alternates file testCases := []struct { desc string fileContent []byte expectedErr error }{ { desc: "points to non existent path", fileContent: []byte("/tmp/invalid_path"), expectedErr: ErrInvalidPoolRepository, }, { desc: "empty file", fileContent: nil, expectedErr: nil, }, { desc: "first line commented out", fileContent: []byte("#/tmp/invalid/path"), expectedErr: ErrAlternateObjectDirNotExist, }, } require.NoError(t, os.MkdirAll(filepath.Join(testRepoPath, "objects", "info"), 0755)) for _, tc := range testCases { t.Run(tc.desc, func(t *testing.T) { alternateFilePath := filepath.Join(testRepoPath, "objects", "info", "alternates") require.NoError(t, ioutil.WriteFile(alternateFilePath, tc.fileContent, 0644)) poolFromRepo, err := FromRepo(pool.cfg, pool.locator, pool.gitCmdFactory, nil, testRepo) require.Equal(t, tc.expectedErr, err) require.Nil(t, poolFromRepo) require.NoError(t, os.Remove(alternateFilePath)) }) } }
explode_data.jsonl/44265
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 582 }
[ 2830, 3393, 3564, 3830, 25243, 2753, 1190, 10551, 1155, 353, 8840, 836, 8, 341, 85273, 11, 1273, 25243, 1669, 6505, 1190, 10551, 1155, 692, 18185, 25243, 1820, 1669, 26054, 22363, 41838, 30481, 7758, 269, 1134, 58, 15, 936, 1820, 11, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestQueueOneReaderOneWriter(t *testing.T) { clus := NewClusterV3(t, &ClusterConfig{Size: 1}) defer clus.Terminate(t) done := make(chan struct{}) go func() { defer func() { done <- struct{}{} }() etcdc := clus.RandClient() q := recipe.NewQueue(etcdc, "testq") for i := 0; i < 5; i++ { if err := q.Enqueue(fmt.Sprintf("%d", i)); err != nil { t.Fatalf("error enqueuing (%v)", err) } } }() etcdc := clus.RandClient() q := recipe.NewQueue(etcdc, "testq") for i := 0; i < 5; i++ { s, err := q.Dequeue() if err != nil { t.Fatalf("error dequeueing (%v)", err) } if s != fmt.Sprintf("%d", i) { t.Fatalf("expected dequeue value %v, got %v", s, i) } } <-done }
explode_data.jsonl/14548
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 332 }
[ 2830, 3393, 7554, 3966, 5062, 3966, 6492, 1155, 353, 8840, 836, 8, 341, 197, 4163, 1669, 1532, 28678, 53, 18, 1155, 11, 609, 28678, 2648, 90, 1695, 25, 220, 16, 3518, 16867, 1185, 355, 836, 261, 34016, 1155, 692, 40495, 1669, 1281, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateSortedSignerArray(t *testing.T) { privVal1 := mock.NewPV() pubKey1, err := privVal1.GetPubKey() require.NoError(t, err) privVal2 := mock.NewPV() pubKey2, err := privVal2.GetPubKey() require.NoError(t, err) validator1 := tmtypes.NewValidator(pubKey1, 1) validator2 := tmtypes.NewValidator(pubKey2, 2) expected := []tmtypes.PrivValidator{privVal2, privVal1} actual := tibctesting.CreateSortedSignerArray(privVal1, privVal2, validator1, validator2) require.Equal(t, expected, actual) // swap order actual = tibctesting.CreateSortedSignerArray(privVal2, privVal1, validator2, validator1) require.Equal(t, expected, actual) // smaller address validator1.Address = []byte{1} validator2.Address = []byte{2} validator2.VotingPower = 1 expected = []tmtypes.PrivValidator{privVal1, privVal2} actual = tibctesting.CreateSortedSignerArray(privVal1, privVal2, validator1, validator2) require.Equal(t, expected, actual) // swap order actual = tibctesting.CreateSortedSignerArray(privVal2, privVal1, validator2, validator1) require.Equal(t, expected, actual) }
explode_data.jsonl/18666
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 408 }
[ 2830, 3393, 4021, 51051, 7264, 261, 1857, 1155, 353, 8840, 836, 8, 341, 71170, 2208, 16, 1669, 7860, 7121, 48469, 741, 62529, 1592, 16, 11, 1848, 1669, 6095, 2208, 16, 2234, 29162, 1592, 741, 17957, 35699, 1155, 11, 1848, 692, 71170, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSerializeConnectionJson(t *testing.T) { ctx := context.Background() c, rollback := makeConnectionAuth(t) defer rollback() serializedConnection, err := json.Marshal(c) if err != nil { t.Fatalf("Failed to serialize connection: %v", err) } c2 := new(swift.Connection) err = json.Unmarshal(serializedConnection, &c2) if err != nil { t.Fatalf("Failed to unserialize connection: %v", err) } if !c2.Authenticated() { t.Fatal("Should be authenticated") } _, _, err = c2.Account(ctx) if err != nil { t.Fatalf("Failed to use unserialized connection: %v", err) } }
explode_data.jsonl/12658
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 15680, 4526, 5014, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 1444, 11, 60414, 1669, 1281, 4526, 5087, 1155, 340, 16867, 60414, 741, 197, 75277, 4526, 11, 1848, 1669, 2951, 37271, 1337, 340, 743, 1848, 961,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestTextRanges(t *testing.T) { for name, tc := range map[string]struct { Markdown string ExpectedRanges []Range ExpectedValues []string }{ "simple": { Markdown: "hello", ExpectedRanges: []Range{{0, 5}}, ExpectedValues: []string{"hello"}, }, "simple2": { Markdown: "hello!", ExpectedRanges: []Range{{0, 6}}, ExpectedValues: []string{"hello!"}, }, "multiline": { Markdown: "hello world\nfoobar", ExpectedRanges: []Range{{0, 11}, {12, 18}}, ExpectedValues: []string{"hello world", "foobar"}, }, "code": { Markdown: "hello `code` world", ExpectedRanges: []Range{{0, 6}, {12, 18}}, ExpectedValues: []string{"hello ", " world"}, }, "notcode": { Markdown: "hello ` world", ExpectedRanges: []Range{{0, 13}}, ExpectedValues: []string{"hello ` world"}, }, "escape": { Markdown: "\\*hello\\*", ExpectedRanges: []Range{{1, 7}, {8, 9}}, ExpectedValues: []string{"*hello", "*"}, }, "escapeescape": { Markdown: "\\\\", ExpectedRanges: []Range{{1, 2}}, ExpectedValues: []string{"\\"}, }, "notescape": { Markdown: "foo\\x", ExpectedRanges: []Range{{0, 5}}, ExpectedValues: []string{"foo\\x"}, }, "notlink": { Markdown: "[foo", ExpectedRanges: []Range{{0, 4}}, ExpectedValues: []string{"[foo"}, }, "notlinkend": { Markdown: "[foo]", ExpectedRanges: []Range{{0, 5}}, ExpectedValues: []string{"[foo]"}, }, "notimage": { Markdown: "![foo", ExpectedRanges: []Range{{0, 5}}, ExpectedValues: []string{"![foo"}, }, "notimage2": { Markdown: "!foo", ExpectedRanges: []Range{{0, 4}}, ExpectedValues: []string{"!foo"}, }, "charref": { Markdown: "&quot;test", ExpectedRanges: []Range{{0, 1}, {6, 10}}, ExpectedValues: []string{"\"", "test"}, }, "notcharref": { Markdown: "&amp test", ExpectedRanges: []Range{{0, 9}}, ExpectedValues: []string{"&amp test"}, }, "notcharref2": { Markdown: "this is &mattermost;", ExpectedRanges: []Range{{0, 20}}, ExpectedValues: []string{"this is &mattermost;"}, }, "standalone-ampersand": { Markdown: "Hello & World", ExpectedRanges: []Range{{0, 13}}, ExpectedValues: []string{"Hello & World"}, }, } { t.Run(name, func(t *testing.T) { var ranges []Range var values []string Inspect(tc.Markdown, func(node interface{}) bool { if textNode, ok := node.(*Text); ok { ranges = append(ranges, textNode.Range) values = append(values, textNode.Text) } return true }) assert.Equal(t, tc.ExpectedRanges, ranges) assert.Equal(t, tc.ExpectedValues, values) }) } }
explode_data.jsonl/38283
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1262 }
[ 2830, 3393, 1178, 74902, 1155, 353, 8840, 836, 8, 341, 2023, 829, 11, 17130, 1669, 2088, 2415, 14032, 60, 1235, 341, 197, 197, 68005, 981, 914, 198, 197, 197, 18896, 74902, 3056, 6046, 198, 197, 197, 18896, 6227, 3056, 917, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestConnected(t *testing.T) { n := New("testconnected", lookupFunc) // Add a new address and get it err := n.addAddressByIP(someIP + ":8333") if err != nil { t.Fatalf("Adding address failed: %v", err) } ka := n.GetAddress() na := ka.NetAddress() // make it an hour ago na.Timestamp = time.Unix(time.Now().Add(time.Hour*-1).Unix(), 0) n.Connected(na) if !ka.NetAddress().Timestamp.After(na.Timestamp) { t.Errorf("Address should have a new timestamp, but does not") } }
explode_data.jsonl/26479
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 188 }
[ 2830, 3393, 21146, 1155, 353, 8840, 836, 8, 341, 9038, 1669, 1532, 445, 1944, 15288, 497, 18615, 9626, 692, 197, 322, 2691, 264, 501, 2621, 323, 633, 432, 198, 9859, 1669, 308, 1364, 4286, 1359, 3298, 1141, 635, 3298, 488, 13022, 23, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGet(t *testing.T) { m := NewSharedMap() // Get a missing element. val, ok := m.Load("Money") if ok == true { t.Error("ok should be false when item is missing from map.") } if val != nil { t.Error("Missing values should return as null.") } elephant := Animal{"elephant"} m.Store("elephant", elephant) // Retrieve inserted element. tmp, ok := m.Load("elephant") if ok == false { t.Error("ok should be true for item stored within the map.") } elephant, ok = tmp.(Animal) // Type assertion. if !ok { t.Error("expecting an element, not null.") } if elephant.name != "elephant" { t.Error("item was modified.") } }
explode_data.jsonl/77832
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 229 }
[ 2830, 3393, 1949, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1532, 16997, 2227, 2822, 197, 322, 2126, 264, 7402, 2392, 624, 19302, 11, 5394, 1669, 296, 13969, 445, 24786, 5130, 743, 5394, 621, 830, 341, 197, 3244, 6141, 445, 562, 1265,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestCancelExchangeOrder(t *testing.T) { p.SetDefaults() TestSetup(t) if areTestAPIKeysSet() && !canManipulateRealOrders { t.Skip("API keys set, canManipulateRealOrders false, skipping test") } currencyPair := currency.NewPair(currency.LTC, currency.BTC) var orderCancellation = &exchange.OrderCancellation{ OrderID: "1", WalletAddress: "1F5zVDgNjorJ51oGebSvNCrSAHpwGkUdDB", AccountID: "1", CurrencyPair: currencyPair, } err := p.CancelOrder(orderCancellation) if !areTestAPIKeysSet() && err == nil { t.Error("Expecting an error when no keys are set") } if areTestAPIKeysSet() && err != nil { t.Errorf("Could not cancel orders: %v", err) } }
explode_data.jsonl/23572
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 282 }
[ 2830, 3393, 9269, 31564, 4431, 1155, 353, 8840, 836, 8, 341, 3223, 4202, 16273, 741, 73866, 21821, 1155, 692, 743, 525, 2271, 7082, 8850, 1649, 368, 1009, 753, 4814, 92876, 6334, 12768, 24898, 341, 197, 3244, 57776, 445, 7082, 6894, 738...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestGetUsersNotInChannel(t *testing.T) { th := Setup().InitBasic().InitSystemAdmin() defer th.TearDown() Client := th.Client teamId := th.BasicTeam.Id channelId := th.BasicChannel.Id user := th.CreateUser() th.LinkUserToTeam(user, th.BasicTeam) rusers, resp := Client.GetUsersNotInChannel(teamId, channelId, 0, 60, "") CheckNoError(t, resp) for _, u := range rusers { CheckUserSanitization(t, u) } rusers, resp = Client.GetUsersNotInChannel(teamId, channelId, 0, 1, "") CheckNoError(t, resp) if len(rusers) != 1 { t.Log(len(rusers)) t.Fatal("should be 1 per page") } rusers, resp = Client.GetUsersNotInChannel(teamId, channelId, 10000, 100, "") CheckNoError(t, resp) if len(rusers) != 0 { t.Fatal("should be no users") } Client.Logout() _, resp = Client.GetUsersNotInChannel(teamId, channelId, 0, 60, "") CheckUnauthorizedStatus(t, resp) Client.Login(user.Email, user.Password) _, resp = Client.GetUsersNotInChannel(teamId, channelId, 0, 60, "") CheckForbiddenStatus(t, resp) _, resp = th.SystemAdminClient.GetUsersNotInChannel(teamId, channelId, 0, 60, "") CheckNoError(t, resp) }
explode_data.jsonl/21542
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 436 }
[ 2830, 3393, 1949, 7137, 2623, 641, 9629, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1005, 3803, 15944, 1005, 3803, 2320, 7210, 741, 16867, 270, 836, 682, 4454, 741, 71724, 1669, 270, 11716, 198, 197, 9196, 764, 1669, 270, 48868, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestImgpkgLockOutputSuccessful(t *testing.T) { env := BuildEnv(t) kbld := Kbld{t, env.Namespace, env.KbldBinaryPath, Logger{}} input := ` images: - image: nginx:1.14.2 - image: sample-app - sidecarImage: sample-app - - sample-app --- apiVersion: kbld.k14s.io/v1alpha1 kind: ImageOverrides overrides: - image: sample-app newImage: nginx:1.15.1 --- apiVersion: kbld.k14s.io/v1alpha1 kind: ImageKeys keys: - sidecarImage --- apiVersion: kbld.k14s.io/v1alpha1 kind: Config searchRules: - keyMatcher: path: [images, {allIndexes: true}, {index: 0}] ` path := "/tmp/kbld-test-lock-output-successful" defer os.RemoveAll(path) out, _ := kbld.RunWithOpts([]string{"-f", "-", "--images-annotation=false", "--imgpkg-lock-output=" + path}, RunOpts{ StdinReader: strings.NewReader(input), }) expectedOut := `--- images: - image: index.docker.io/library/nginx@sha256:f7988fb6c02e0ce69257d9bd9cf37ae20a60f1df7563c3a2a6abe24160306b8d - image: index.docker.io/library/nginx@sha256:4a5573037f358b6cdfa2f3e8a9c33a5cf11bcd1675ca72ca76fbe5bd77d0d682 - sidecarImage: index.docker.io/library/nginx@sha256:4a5573037f358b6cdfa2f3e8a9c33a5cf11bcd1675ca72ca76fbe5bd77d0d682 - - index.docker.io/library/nginx@sha256:4a5573037f358b6cdfa2f3e8a9c33a5cf11bcd1675ca72ca76fbe5bd77d0d682 ` if out != expectedOut { t.Fatalf("Expected >>>%s<<< to match >>>%s<<<", out, expectedOut) } bs, err := ioutil.ReadFile(path) if err != nil { t.Fatalf("Failed while reading " + path) } if string(bs) != imgLock { t.Fatalf("Expected >>>%s<<< to match >>>%s<<<", bs, imgLock) } }
explode_data.jsonl/33766
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 707 }
[ 2830, 3393, 13033, 30069, 11989, 5097, 36374, 1155, 353, 8840, 836, 8, 341, 57538, 1669, 7854, 14359, 1155, 340, 16463, 65, 507, 1669, 98086, 507, 90, 83, 11, 6105, 46011, 11, 6105, 11352, 65, 507, 21338, 1820, 11, 9514, 6257, 630, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestNewPoll(t *testing.T) { t.Run("all fine", func(t *testing.T) { assert := assert.New(t) patch1 := monkey.Patch(model.GetMillis, func() int64 { return 1234567890 }) patch2 := monkey.Patch(model.NewId, testutils.GetPollID) defer patch1.Unpatch() defer patch2.Unpatch() creator := model.NewRandomString(10) question := model.NewRandomString(10) answerOptions := []string{model.NewRandomString(10), model.NewRandomString(10), model.NewRandomString(10)} p, err := poll.NewPoll(creator, question, answerOptions, []string{"anonymous", "progress", "public-add-option"}) require.Nil(t, err) require.NotNil(t, p) assert.Equal(testutils.GetPollID(), p.ID) assert.Equal(int64(1234567890), p.CreatedAt) assert.Equal(creator, p.Creator) assert.Equal(question, p.Question) assert.Equal(&poll.AnswerOption{Answer: answerOptions[0], Voter: nil}, p.AnswerOptions[0]) assert.Equal(&poll.AnswerOption{Answer: answerOptions[1], Voter: nil}, p.AnswerOptions[1]) assert.Equal(&poll.AnswerOption{Answer: answerOptions[2], Voter: nil}, p.AnswerOptions[2]) assert.Equal(poll.Settings{Anonymous: true, Progress: true, PublicAddOption: true}, p.Settings) }) t.Run("error, unknown setting", func(t *testing.T) { assert := assert.New(t) creator := model.NewRandomString(10) question := model.NewRandomString(10) answerOptions := []string{model.NewRandomString(10), model.NewRandomString(10), model.NewRandomString(10)} p, err := poll.NewPoll(creator, question, answerOptions, []string{"unkownOption"}) assert.Nil(p) assert.NotNil(err) }) t.Run("error, duplicate option", func(t *testing.T) { assert := assert.New(t) creator := model.NewRandomString(10) question := model.NewRandomString(10) option := model.NewRandomString(10) answerOptions := []string{option, model.NewRandomString(10), option} p, err := poll.NewPoll(creator, question, answerOptions, nil) assert.Nil(p) assert.NotNil(err) }) }
explode_data.jsonl/30283
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 706 }
[ 2830, 3393, 3564, 49207, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 541, 6915, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 6948, 1669, 2060, 7121, 1155, 340, 197, 3223, 754, 16, 1669, 38703, 1069, 754, 7635, 2234, 17897, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseInputTextArgument(t *testing.T) { argument := &model.AutocompleteArg{ Name: "", //positional HelpText: "some_help", Type: model.AutocompleteArgTypeText, Data: &model.AutocompleteTextArg{Hint: "hint", Pattern: "pat"}, } found, _, _, suggestion := parseInputTextArgument(argument, "", "") assert.True(t, found) assert.Equal(t, model.AutocompleteSuggestion{Complete: "", Suggestion: "", Hint: "hint", Description: "some_help"}, suggestion) found, _, _, suggestion = parseInputTextArgument(argument, "", " ") assert.True(t, found) assert.Equal(t, model.AutocompleteSuggestion{Complete: " ", Suggestion: "", Hint: "hint", Description: "some_help"}, suggestion) found, _, _, suggestion = parseInputTextArgument(argument, "", "abc") assert.True(t, found) assert.Equal(t, model.AutocompleteSuggestion{Complete: "abc", Suggestion: "", Hint: "hint", Description: "some_help"}, suggestion) found, _, _, suggestion = parseInputTextArgument(argument, "", "\"abc dfd df ") assert.True(t, found) assert.Equal(t, model.AutocompleteSuggestion{Complete: "\"abc dfd df ", Suggestion: "", Hint: "hint", Description: "some_help"}, suggestion) found, parsed, toBeParsed, _ := parseInputTextArgument(argument, "", "abc efg ") assert.False(t, found) assert.Equal(t, "abc ", parsed) assert.Equal(t, "efg ", toBeParsed) found, parsed, toBeParsed, _ = parseInputTextArgument(argument, "", "abc ") assert.False(t, found) assert.Equal(t, "abc ", parsed) assert.Equal(t, "", toBeParsed) found, parsed, toBeParsed, _ = parseInputTextArgument(argument, "", "\"abc def\" abc") assert.False(t, found) assert.Equal(t, "\"abc def\" ", parsed) assert.Equal(t, "abc", toBeParsed) found, parsed, toBeParsed, _ = parseInputTextArgument(argument, "", "\"abc def\"") assert.False(t, found) assert.Equal(t, "\"abc def\"", parsed) assert.Equal(t, "", toBeParsed) }
explode_data.jsonl/58278
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 709 }
[ 2830, 3393, 14463, 2505, 1178, 9171, 1155, 353, 8840, 836, 8, 341, 197, 14479, 1669, 609, 2528, 875, 332, 20104, 2735, 515, 197, 21297, 25, 257, 7342, 442, 966, 3005, 198, 197, 197, 12689, 1178, 25, 330, 14689, 26926, 756, 197, 27725,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIntArray_RLockFunc(t *testing.T) { gtest.C(t, func(t *gtest.T) { s1 := []int{1, 2, 3, 4} a1 := garray.NewIntArrayFrom(s1, true) ch1 := make(chan int64, 3) ch2 := make(chan int64, 1) //go1 go a1.RLockFunc(func(n1 []int) { //读锁 time.Sleep(2 * time.Second) //暂停1秒 n1[2] = 6 ch2 <- gconv.Int64(time.Now().UnixNano() / 1000 / 1000) }) //go2 go func() { time.Sleep(100 * time.Millisecond) //故意暂停0.01秒,等go1执行锁后,再开始执行. ch1 <- gconv.Int64(time.Now().UnixNano() / 1000 / 1000) a1.Len() ch1 <- gconv.Int64(time.Now().UnixNano() / 1000 / 1000) }() t1 := <-ch1 t2 := <-ch1 <-ch2 //等待go1完成 // 防止ci抖动,以豪秒为单位 t.AssertLT(t2-t1, 20) //go1加的读锁,所go2读的时候,并没有阻塞。 t.Assert(a1.Contains(6), true) }) }
explode_data.jsonl/47626
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 488 }
[ 2830, 3393, 95338, 2568, 11989, 9626, 1155, 353, 8840, 836, 8, 341, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 1903, 16, 1669, 3056, 396, 90, 16, 11, 220, 17, 11, 220, 18, 11, 220, 19, 532, 197, 11323, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRWSchema(t *testing.T) { meta := arrow.NewMetadata([]string{"k1", "k2", "k3"}, []string{"v1", "v2", "v3"}) for _, tc := range []struct { schema *arrow.Schema memo dictMemo }{ { schema: arrow.NewSchema([]arrow.Field{ {Name: "f1", Type: arrow.PrimitiveTypes.Int64}, {Name: "f2", Type: arrow.PrimitiveTypes.Uint16}, {Name: "f3", Type: arrow.PrimitiveTypes.Float64}, }, &meta), memo: newMemo(), }, } { t.Run("", func(t *testing.T) { b := flatbuffers.NewBuilder(0) offset := schemaToFB(b, tc.schema, &tc.memo) b.Finish(offset) buf := b.FinishedBytes() fb := flatbuf.GetRootAsSchema(buf, 0) got, err := schemaFromFB(fb, &tc.memo) if err != nil { t.Fatal(err) } if !got.Equal(tc.schema) { t.Fatalf("r/w schema failed:\ngot = %#v\nwant= %#v\n", got, tc.schema) } { got := got.Metadata() want := tc.schema.Metadata() if got.Len() != want.Len() { t.Fatalf("invalid metadata len: got=%d, want=%d", got.Len(), want.Len()) } if got, want := got.Keys(), want.Keys(); !reflect.DeepEqual(got, want) { t.Fatalf("invalid metadata keys:\ngot =%v\nwant=%v\n", got, want) } if got, want := got.Values(), want.Values(); !reflect.DeepEqual(got, want) { t.Fatalf("invalid metadata values:\ngot =%v\nwant=%v\n", got, want) } } }) } }
explode_data.jsonl/17521
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 651 }
[ 2830, 3393, 49, 7433, 3416, 1155, 353, 8840, 836, 8, 341, 84004, 1669, 17921, 7121, 14610, 10556, 917, 4913, 74, 16, 497, 330, 74, 17, 497, 330, 74, 18, 14345, 3056, 917, 4913, 85, 16, 497, 330, 85, 17, 497, 330, 85, 18, 23625, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestReproduceParse_NoTaskID(t *testing.T) { t.Parallel() Convey(`Make sure Parse works with with no task ID.`, t, func() { c := reproduceRun{} c.init(&testAuthFlags{}) err := c.GetFlags().Parse([]string{"-server", "http://localhost:9050"}) So(err, ShouldBeNil) err = c.parse([]string(nil)) So(err, ShouldErrLike, "must specify exactly one task id.") }) }
explode_data.jsonl/74869
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 149 }
[ 2830, 3393, 693, 97274, 14463, 36989, 6262, 915, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 93070, 5617, 5809, 8078, 2704, 14775, 4278, 448, 448, 902, 3383, 3034, 13, 7808, 259, 11, 2915, 368, 341, 197, 1444, 1669, 22800, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWarningsInsideNodeModules(t *testing.T) { default_suite.expectBundled(t, bundled{ files: map[string]string{ "/entry.js": ` import "./dup-case.js"; import "./node_modules/dup-case.js"; import "@plugin/dup-case.js" import "./not-in.js"; import "./node_modules/not-in.js"; import "@plugin/not-in.js" import "./not-instanceof.js"; import "./node_modules/not-instanceof.js"; import "@plugin/not-instanceof.js" import "./return-asi.js"; import "./node_modules/return-asi.js"; import "@plugin/return-asi.js" import "./bad-typeof.js"; import "./node_modules/bad-typeof.js"; import "@plugin/bad-typeof.js" import "./equals-neg-zero.js"; import "./node_modules/equals-neg-zero.js"; import "@plugin/equals-neg-zero.js" import "./equals-nan.js"; import "./node_modules/equals-nan.js"; import "@plugin/equals-nan.js" import "./equals-object.js"; import "./node_modules/equals-object.js"; import "@plugin/equals-object.js" import "./write-getter.js"; import "./node_modules/write-getter.js"; import "@plugin/write-getter.js" import "./read-setter.js"; import "./node_modules/read-setter.js"; import "@plugin/read-setter.js" import "./delete-super.js"; import "./node_modules/delete-super.js"; import "@plugin/delete-super.js" `, "/dup-case.js": "switch (x) { case 0: case 0: }", "/node_modules/dup-case.js": "switch (x) { case 0: case 0: }", "/plugin-dir/node_modules/dup-case.js": "switch (x) { case 0: case 0: }", "/not-in.js": "!a in b", "/node_modules/not-in.js": "!a in b", "/plugin-dir/node_modules/not-in.js": "!a in b", "/not-instanceof.js": "!a instanceof b", "/node_modules/not-instanceof.js": "!a instanceof b", "/plugin-dir/node_modules/not-instanceof.js": "!a instanceof b", "/return-asi.js": "return\n123", "/node_modules/return-asi.js": "return\n123", "/plugin-dir/node_modules/return-asi.js": "return\n123", "/bad-typeof.js": "typeof x == 'null'", "/node_modules/bad-typeof.js": "typeof x == 'null'", "/plugin-dir/node_modules/bad-typeof.js": "typeof x == 'null'", "/equals-neg-zero.js": "x === -0", "/node_modules/equals-neg-zero.js": "x === -0", "/plugin-dir/node_modules/equals-neg-zero.js": "x === -0", "/equals-nan.js": "x === NaN", "/node_modules/equals-nan.js": "x === NaN", "/plugin-dir/node_modules/equals-nan.js": "x === NaN", "/equals-object.js": "x === []", "/node_modules/equals-object.js": "x === []", "/plugin-dir/node_modules/equals-object.js": "x === []", "/write-getter.js": "class Foo { get #foo() {} foo() { this.#foo = 123 } }", "/node_modules/write-getter.js": "class Foo { get #foo() {} foo() { this.#foo = 123 } }", "/plugin-dir/node_modules/write-getter.js": "class Foo { get #foo() {} foo() { this.#foo = 123 } }", "/read-setter.js": "class Foo { set #foo(x) {} foo() { return this.#foo } }", "/node_modules/read-setter.js": "class Foo { set #foo(x) {} foo() { return this.#foo } }", "/plugin-dir/node_modules/read-setter.js": "class Foo { set #foo(x) {} foo() { return this.#foo } }", "/delete-super.js": "class Foo extends Bar { foo() { delete super.foo } }", "/node_modules/delete-super.js": "class Foo extends Bar { foo() { delete super.foo } }", "/plugin-dir/node_modules/delete-super.js": "class Foo extends Bar { foo() { delete super.foo } }", }, entryPaths: []string{"/entry.js"}, options: config.Options{ Mode: config.ModeBundle, AbsOutputFile: "/out.js", Plugins: []config.Plugin{{ OnResolve: []config.OnResolve{ { Filter: regexp.MustCompile("^@plugin/"), Callback: func(args config.OnResolveArgs) config.OnResolveResult { return config.OnResolveResult{ Path: logger.Path{ Text: strings.Replace(args.Path, "@plugin/", "/plugin-dir/node_modules/", 1), Namespace: "file", }, } }, }, }, }}, }, expectedScanLog: `bad-typeof.js: warning: The "typeof" operator will never evaluate to "null" delete-super.js: warning: Attempting to delete a property of "super" will throw a ReferenceError dup-case.js: warning: This case clause will never be evaluated because it duplicates an earlier case clause equals-nan.js: warning: Comparison with NaN using the "===" operator here is always false equals-neg-zero.js: warning: Comparison with -0 using the "===" operator will also match 0 equals-object.js: warning: Comparison using the "===" operator here is always false not-in.js: warning: Suspicious use of the "!" operator inside the "in" operator not-instanceof.js: warning: Suspicious use of the "!" operator inside the "instanceof" operator read-setter.js: warning: Reading from setter-only property "#foo" will throw return-asi.js: warning: The following expression is not returned because of an automatically-inserted semicolon write-getter.js: warning: Writing to getter-only property "#foo" will throw `, }) }
explode_data.jsonl/38579
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2434 }
[ 2830, 3393, 20140, 24480, 1955, 28201, 1155, 353, 8840, 836, 8, 341, 11940, 57239, 25952, 33, 1241, 832, 1155, 11, 51450, 515, 197, 74075, 25, 2415, 14032, 30953, 515, 298, 197, 3115, 4085, 2857, 788, 22074, 571, 21918, 5924, 21912, 384...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPodIndexer(t *testing.T) { var testConfig = common.NewConfig() podIndexer, err := NewPodNameIndexer(*testConfig, metagen) assert.Nil(t, err) podName := "testpod" ns := "testns" pod := Pod{ Metadata: ObjectMeta{ Name: podName, Namespace: ns, Labels: map[string]string{ "labelkey": "labelvalue", }, }, Spec: PodSpec{}, } indexers := podIndexer.GetMetadata(&pod) assert.Equal(t, len(indexers), 1) assert.Equal(t, indexers[0].Index, fmt.Sprintf("%s/%s", ns, podName)) expected := common.MapStr{ "pod": common.MapStr{ "name": "testpod", }, "namespace": "testns", "labels": common.MapStr{ "labelkey": "labelvalue", }, } assert.Equal(t, expected.String(), indexers[0].Data.String()) indices := podIndexer.GetIndexes(&pod) assert.Equal(t, len(indices), 1) assert.Equal(t, indices[0], fmt.Sprintf("%s/%s", ns, podName)) }
explode_data.jsonl/80955
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 381 }
[ 2830, 3393, 23527, 1552, 261, 1155, 353, 8840, 836, 8, 341, 2405, 1273, 2648, 284, 4185, 7121, 2648, 2822, 3223, 347, 1552, 261, 11, 1848, 1669, 1532, 23527, 675, 1552, 261, 4071, 1944, 2648, 11, 2270, 8535, 340, 6948, 59678, 1155, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRsyslogScan(t *testing.T) { data := []byte("98 <133>Mar 14 04:20:29 example-host-prod-1-1 audit type=SYSCALL msg=audit(1489465219.995:1699): test") buffer := NewSyslogBuffer() buffer.Append(data) result := buffer.Next() assert.NotEqual(t, nil, result) assert.Equal(t, "<133>Mar 14 04:20:29 example-host-prod-1-1 audit type=SYSCALL msg=audit(1489465219.995:1699): test", string(result)) }
explode_data.jsonl/18083
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 163 }
[ 2830, 3393, 49, 7791, 839, 26570, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 3056, 3782, 445, 24, 23, 366, 16, 18, 18, 29, 12061, 220, 16, 19, 220, 15, 19, 25, 17, 15, 25, 17, 24, 3110, 38589, 9838, 67, 12, 16, 12, 16, 24275, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetEC2InstanceIDBlackholedError(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() ec2MetadataClient := mock_ec2.NewMockEC2MetadataClient(ctrl) agent := &ecsAgent{ec2MetadataClient: ec2MetadataClient} ec2MetadataClient.EXPECT().InstanceID().Return("", errors.New("blackholed")) assert.Equal(t, "", agent.getEC2InstanceID()) }
explode_data.jsonl/41591
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 132 }
[ 2830, 3393, 1949, 7498, 17, 2523, 915, 14417, 6161, 832, 1454, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 197, 757, 17, 14610, 2959, 1669, 7860, 36844, 17, 7121, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_CreateScratch_ValidDestpath_Success(t *testing.T) { rhcs := runhcs.Runhcs{ Debug: true, } scratchPath := filepath.Join(t.TempDir(), "scratch.vhdx") ctx := context.TODO() err := rhcs.CreateScratch(ctx, scratchPath) if err != nil { t.Fatalf("Failed 'CreateScratch' command with: %v", err) } _, err = os.Stat(scratchPath) if err != nil { t.Fatalf("Failed to stat scratch path with: %v", err) } }
explode_data.jsonl/71752
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 173 }
[ 2830, 3393, 34325, 65508, 754, 97279, 34830, 2343, 87161, 1155, 353, 8840, 836, 8, 341, 7000, 71, 4837, 1669, 1598, 71, 4837, 16708, 71, 4837, 515, 197, 34424, 25, 830, 345, 197, 630, 1903, 86284, 1820, 1669, 26054, 22363, 1155, 65009, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDefaultWebhookFlow(t *testing.T) { tests := []struct { name string body string reviewResponse *admissionv1beta1.AdmissionResponse expCode int expBody string }{ { name: "No admission review on request should return error", body: "", expBody: "no body found\n", expCode: 400, }, { name: "Bad admission review on request should return error", body: "wrong body", expBody: "could not decode the admission review from the request\n", expCode: 400, }, { name: "A regular call to the webhook handler should execute the webhook and return OK if nothing failed", body: getTestAdmissionReviewRequestStr("1234567890"), reviewResponse: &admissionv1beta1.AdmissionResponse{ UID: "1234567890", Allowed: true, }, expBody: `{"response":{"uid":"1234567890","allowed":true}}`, expCode: 200, }, { name: "A regular call to the webhook handler should execute the webhook and return error if something failed", body: getTestAdmissionReviewRequestStr("1234567890"), reviewResponse: &admissionv1beta1.AdmissionResponse{ UID: "1234567890", Result: &metav1.Status{ Status: "Failure", Message: "wanted error", }, }, expBody: `{"response":{"uid":"1234567890","allowed":false,"status":{"metadata":{},"status":"Failure","message":"wanted error"}}}`, expCode: 500, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { assert := assert.New(t) require := require.New(t) // Mocks. mwh := &mwebhook.Webhook{} mwh.On("Review", mock.Anything, mock.Anything).Once().Return(test.reviewResponse, nil) h, err := kubewebhookhttp.HandlerFor(mwh) require.NoError(err) req := httptest.NewRequest("GET", "/awesome/webhook", bytes.NewBufferString(test.body)) w := httptest.NewRecorder() h.ServeHTTP(w, req) assert.Equal(test.expCode, w.Code) assert.Equal(test.expBody, w.Body.String()) }) } }
explode_data.jsonl/36783
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 798 }
[ 2830, 3393, 3675, 5981, 20873, 18878, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 1843, 914, 198, 197, 35402, 1843, 914, 198, 197, 17200, 1050, 2582, 353, 329, 2728, 85, 16, 19127, 16, 17865, 2728, 2582, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestManifestGenerateAllOff(t *testing.T) { g := NewGomegaWithT(t) m, _, err := generateManifest("all_off", "", liveCharts) if err != nil { t.Fatal(err) } objs, err := parseObjectSetFromManifest(m) if err != nil { t.Fatal(err) } g.Expect(objs.size()).Should(Equal(0)) }
explode_data.jsonl/48758
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 129 }
[ 2830, 3393, 38495, 31115, 2403, 4596, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1532, 38, 32696, 2354, 51, 1155, 340, 2109, 11, 8358, 1848, 1669, 6923, 38495, 445, 541, 13651, 497, 7342, 3887, 64878, 340, 743, 1848, 961, 2092, 341, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUnmarshalStoredJson(t *testing.T) { Convey("Given one stored temperature", t, func() { s := NewStore("./test-data", 20) storedData1, err := s.unmarshal(storeJSON1) So(err, ShouldBeNil) So(storedData1, ShouldNotBeNil) So(len(storedData1.Temperatures), ShouldEqual, 1) }) }
explode_data.jsonl/4844
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 125 }
[ 2830, 3393, 1806, 27121, 93243, 5014, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 22043, 825, 9768, 9315, 497, 259, 11, 2915, 368, 341, 197, 1903, 1669, 1532, 6093, 13988, 1944, 13945, 497, 220, 17, 15, 340, 197, 18388, 3018, 1043...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReconcileServiceInstanceTimeoutTriggersOrphanMitigation(t *testing.T) { _, fakeCatalogClient, _, testController, sharedInformers := newTestController(t, fakeosb.FakeClientConfiguration{ ProvisionReaction: &fakeosb.ProvisionReaction{ Error: &url.Error{ Err: getTestTimeoutError(), }, }, }) sharedInformers.ClusterServiceBrokers().Informer().GetStore().Add(getTestClusterServiceBroker()) sharedInformers.ClusterServiceClasses().Informer().GetStore().Add(getTestClusterServiceClass()) sharedInformers.ClusterServicePlans().Informer().GetStore().Add(getTestClusterServicePlan()) instance := getTestServiceInstanceWithClusterRefs() if err := reconcileServiceInstance(t, testController, instance); err != nil { t.Fatalf("unexpected error: %v", err) } instance = assertServiceInstanceProvisionInProgressIsTheOnlyCatalogClientAction(t, fakeCatalogClient, instance) fakeCatalogClient.ClearActions() if err := reconcileServiceInstance(t, testController, instance); err == nil { t.Fatal("Reconciler should return error for timeout so that instance is orphan mitigated") } actions := fakeCatalogClient.Actions() assertNumberOfActions(t, actions, 1) updatedObject := assertUpdateStatus(t, actions[0], instance) updatedServiceInstance, ok := updatedObject.(*v1beta1.ServiceInstance) if !ok { fatalf(t, "Couldn't convert object %+v into a *v1beta1.ServiceInstance", updatedObject) } assertServiceInstanceReadyCondition(t, updatedServiceInstance, v1beta1.ConditionFalse, startingInstanceOrphanMitigationReason) assertServiceInstanceOrphanMitigationTrue(t, updatedServiceInstance, errorErrorCallingProvisionReason) assertServiceInstanceOrphanMitigationInProgressTrue(t, updatedServiceInstance) }
explode_data.jsonl/58181
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 510 }
[ 2830, 3393, 693, 40446, 457, 1860, 2523, 7636, 1282, 21385, 2195, 9943, 54370, 17930, 1155, 353, 8840, 836, 8, 341, 197, 6878, 12418, 41606, 2959, 11, 8358, 1273, 2051, 11, 6094, 37891, 388, 1669, 501, 2271, 2051, 1155, 11, 12418, 436, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetBuffers(t *testing.T) { ctx, handler := testSetup(t) defer ctx.TeardownTestCtx() const reply = `Pool Name Index NUMA Size Data Size Total Avail Cached Used default-numa-0 0 0 2304 2048 17290 17290 0 0 ` ctx.MockVpp.MockReply(&vpe.CliInbandReply{ Reply: reply, }) info, err := handler.GetBuffersInfo(context.TODO()) Expect(err).ShouldNot(HaveOccurred()) Expect(info.Items).To(HaveLen(1)) Expect(info.Items[0]).To(Equal(vppcalls.BuffersItem{ //ThreadID: 0, Name: "default-numa-0", Index: 0, Size: 2304, Alloc: 0, Free: 17290, //NumAlloc: 256, //NumFree: 19, })) /*Expect(info.Items[1]).To(Equal(vppcalls.BuffersItem{ ThreadID: 0, Name: "lacp-ethernet", Index: 1, Size: 256, Alloc: 1130000, Free: 27000, NumAlloc: 512, NumFree: 12, })) Expect(info.Items[2]).To(Equal(vppcalls.BuffersItem{ ThreadID: 0, Name: "marker-ethernet", Index: 2, Size: 256, Alloc: 1110000000, Free: 0, NumAlloc: 0, NumFree: 0, }))*/ }
explode_data.jsonl/69840
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 556 }
[ 2830, 3393, 1949, 36219, 1155, 353, 8840, 836, 8, 341, 20985, 11, 7013, 1669, 1273, 21821, 1155, 340, 16867, 5635, 94849, 37496, 2271, 23684, 2822, 4777, 9851, 284, 1565, 10551, 3988, 310, 8008, 15943, 32, 220, 8478, 220, 2885, 8478, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFailClose(t *testing.T) { s := &TestSetup{ t: t, conf: basicConfig+","+networkFailClose, no_mixer: true, } if err := s.SetUp(); err != nil { t.Fatalf("Failed to setup test: %v", err) } defer s.TearDown() url := fmt.Sprintf("http://localhost:%d/echo", ClientProxyPort) tag := "Fail-Open" // Use fail close policy. code, _, err := HTTPGet(url) if err != nil { t.Errorf("Failed in request %s: %v", tag, err) } if code != 503 { t.Errorf("Status code 503 is expected, got %d.", code) } }
explode_data.jsonl/51390
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 219 }
[ 2830, 3393, 19524, 7925, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 609, 2271, 21821, 515, 197, 3244, 25, 262, 259, 345, 197, 67850, 25, 6770, 2648, 85079, 17511, 19524, 7925, 345, 197, 72104, 717, 39014, 25, 830, 345, 197, 532, 743, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestMsgSell(t *testing.T) { _, _, a1 := testdata.KeyTestPubAddr() tests := map[string]struct { src MsgSell expErr bool }{ "valid": { src: MsgSell{ Owner: a1.String(), Orders: []*MsgSell_Order{ { BatchDenom: "A00-00000000-00000000-000", Quantity: "1.5", AskPrice: &sdk.Coin{ Denom: "uregen", Amount: sdk.NewInt(20), }, DisableAutoRetire: true, }, }, }, expErr: false, }, "invalid: bad owner address": { src: MsgSell{ Owner: "foobar", Orders: []*MsgSell_Order{ { BatchDenom: "A00-00000000-00000000-000", Quantity: "1.5", AskPrice: &sdk.Coin{ Denom: "uregen", Amount: sdk.NewInt(20), }, DisableAutoRetire: true, }, }, }, expErr: true, }, "invalid: bad batch denom": { src: MsgSell{ Owner: a1.String(), Orders: []*MsgSell_Order{ { BatchDenom: "foobar", Quantity: "1.5", AskPrice: &sdk.Coin{ Denom: "uregen", Amount: sdk.NewInt(20), }, DisableAutoRetire: true, }, }, }, expErr: true, }, "invalid: bad quantity": { src: MsgSell{ Owner: a1.String(), Orders: []*MsgSell_Order{ { BatchDenom: "A00-00000000-00000000-000", Quantity: "-1.5", AskPrice: &sdk.Coin{ Denom: "uregen", Amount: sdk.NewInt(20), }, DisableAutoRetire: true, }, }, }, expErr: true, }, "invalid: bad ask price": { src: MsgSell{ Owner: a1.String(), Orders: []*MsgSell_Order{ { BatchDenom: "A00-00000000-00000000-000", Quantity: "1.5", AskPrice: &sdk.Coin{ Denom: "uregen", Amount: sdk.NewInt(-20), }, DisableAutoRetire: true, }, }, }, expErr: true, }, } for msg, test := range tests { t.Run(msg, func(t *testing.T) { err := test.src.ValidateBasic() if test.expErr { require.Error(t, err) } else { require.NoError(t, err) } }) } }
explode_data.jsonl/65135
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1154 }
[ 2830, 3393, 6611, 68533, 1155, 353, 8840, 836, 8, 341, 197, 6878, 8358, 264, 16, 1669, 1273, 691, 9610, 2271, 29162, 13986, 2822, 78216, 1669, 2415, 14032, 60, 1235, 341, 197, 41144, 262, 24205, 68533, 198, 197, 48558, 7747, 1807, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGet_archiveRooted(t *testing.T) { dst := tempDir(t) u := testModule("archive-rooted/archive.tar.gz") if err := Get(dst, u); err != nil { t.Fatalf("err: %s", err) } mainPath := filepath.Join(dst, "root", "hello.txt") if _, err := os.Stat(mainPath); err != nil { t.Fatalf("err: %s", err) } }
explode_data.jsonl/815
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 136 }
[ 2830, 3393, 1949, 42873, 8439, 291, 1155, 353, 8840, 836, 8, 341, 52051, 1669, 2730, 6184, 1155, 340, 10676, 1669, 1273, 3332, 445, 16019, 39214, 291, 71627, 28048, 20963, 1138, 743, 1848, 1669, 2126, 30260, 11, 575, 1215, 1848, 961, 20...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPrivateActivityYesInvisibleForOtherUser(t *testing.T) { defer prepareTestEnv(t)() testPrivateActivityDoSomethingForActionEntries(t) testPrivateActivityHelperEnablePrivateActivity(t) session := loginUser(t, privateActivityTestOtherUser) visible := testPrivateActivityHelperHasVisibleActivitiesFromSession(t, session) assert.False(t, visible, "user should have no visible activities") }
explode_data.jsonl/51651
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 111 }
[ 2830, 3393, 16787, 4052, 9454, 641, 12601, 2461, 11409, 1474, 1155, 353, 8840, 836, 8, 341, 16867, 10549, 2271, 14359, 1155, 8, 741, 18185, 16787, 4052, 5404, 23087, 2461, 2512, 24533, 1155, 340, 18185, 16787, 4052, 5511, 11084, 16787, 40...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeleteEmailAddresses(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) // delete multiple email address emails := make([]*EmailAddress, 2) emails[0] = &EmailAddress{ UID: int64(2), ID: int64(3), Email: "user2@example.com", LowerEmail: "user2@example.com", } emails[1] = &EmailAddress{ UID: int64(2), Email: "user2-2@example.com", LowerEmail: "user2-2@example.com", } assert.NoError(t, DeleteEmailAddresses(emails)) // ErrEmailAlreadyUsed err := DeleteEmailAddresses(emails) assert.Error(t, err) }
explode_data.jsonl/67889
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 251 }
[ 2830, 3393, 6435, 4781, 52290, 1155, 353, 8840, 836, 8, 341, 6948, 35699, 1155, 11, 19905, 28770, 3380, 2271, 5988, 12367, 197, 322, 3698, 5248, 2551, 2621, 198, 197, 51376, 1669, 1281, 85288, 79986, 11, 220, 17, 340, 197, 51376, 58, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServiceEntry(t *testing.T) { t.Logf("running performance test with the following parameters { Number of ServiceEntry: %d, Number of Envoys: %d, Total test time: %vs }", *serviceEntryCount, *envoyCount, terminateAfter.Seconds()) t.Log("building & starting mock mcp server...") mcpServer, err := runMcpServer() if err != nil { t.Fatal(err) } defer mcpServer.Close() quit := make(chan struct{}) go runSnapshot(mcpServer, quit, t) debugAddr := fmt.Sprintf("127.0.0.1:%d", pilotDebugPort) pilotAddr := fmt.Sprintf("127.0.0.1:%d", pilotGrpcPort) tearDown := initLocalPilotTestEnv(t, mcpServer.Port, pilotAddr, debugAddr) defer tearDown() t.Log("run sidecar envoy(s)...") adscs := adsConnectAndWait(*envoyCount, pilotAddr, t) for _, adsc := range adscs { defer adsc.Close() } t.Log("check for service entries in pilot's debug endpoint...") g := gomega.NewGomegaWithT(t) g.Eventually(func() (int, error) { return registeredServiceEntries(fmt.Sprintf("http://127.0.0.1:%d/debug/configz", pilotDebugPort)) }, "30s", "1s").Should(gomega.Equal(*serviceEntryCount)) t.Log("check for registered endpoints in envoys's debug endpoint...") g.Eventually(func() error { for _, adsc := range adscs { var data map[string]interface{} err = json.Unmarshal([]byte(adsc.EndpointsJSON()), &data) if err != nil { return err } if len(data) != *serviceEntryCount { return errors.New("endpoint not registered") } } return nil }, "30s", "1s").ShouldNot(gomega.HaveOccurred()) terminate(quit) }
explode_data.jsonl/74663
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 602 }
[ 2830, 3393, 1860, 5874, 1155, 353, 8840, 836, 8, 341, 3244, 98954, 445, 27173, 5068, 1273, 448, 279, 2701, 5029, 314, 5624, 315, 5362, 5874, 25, 1018, 67, 11, 5624, 315, 2925, 3334, 1047, 25, 1018, 67, 11, 10657, 1273, 882, 25, 1018...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBQMultipleCallsForTheSameRank(t *testing.T) { lower, upper := -5.0, 5.0 bq := getNoiselessBQ(t, lower, upper) for _, i := range createEntries() { bq.Add(i) } for _, rank := range getRanks() { got, err := bq.Result(rank) if err != nil { t.Fatalf("Couldn't compute dp result for rank=%f: %v", rank, err) } want, err := bq.Result(rank) if err != nil { t.Fatalf("Couldn't compute dp result for rank=%f: %v", rank, err) } if !cmp.Equal(got, want) { t.Errorf("Add: Wanted the same result for multiple calls for rank %f got %f, want %f", rank, got, want) } } }
explode_data.jsonl/80637
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 260 }
[ 2830, 3393, 33, 48, 32089, 55292, 2461, 785, 19198, 22550, 1155, 353, 8840, 836, 8, 341, 8810, 1202, 11, 8416, 1669, 481, 20, 13, 15, 11, 220, 20, 13, 15, 198, 2233, 80, 1669, 633, 61819, 1717, 33, 48, 1155, 11, 4722, 11, 8416, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestRgPyDumpReqs(t *testing.T) { t.Parallel() /** * Success */ t.Run("should process command", func(t *testing.T) { t.Parallel() // Client client := testClient{ rcv: []models.PyDumpReq{{ GearReqVersion: 1, Name: "pandas", IsDownloaded: "yes", IsInstalled: "yes", CompiledOs: "linux-buster-x64", Wheels: []string{"pytz-2021.1-py2.py3-none-any.whl", "numpy-1.20.2-cp37-cp37m-manylinux2010_x86_64.whl"}, }}, err: nil, } // Response resp := queryRgPydumpReqs(queryModel{Command: models.GearsPyDumpReqs}, &client) require.Len(t, resp.Frames, 1) require.Len(t, resp.Frames[0].Fields, 6) require.Equal(t, int64(1), resp.Frames[0].Fields[0].At(0)) require.Equal(t, string("pandas"), resp.Frames[0].Fields[1].At(0)) require.Equal(t, string("yes"), resp.Frames[0].Fields[2].At(0)) require.Equal(t, string("yes"), resp.Frames[0].Fields[3].At(0)) require.Equal(t, "GearReqVersion", resp.Frames[0].Fields[0].Name) require.Equal(t, "Name", resp.Frames[0].Fields[1].Name) require.Equal(t, "IsDownloaded", resp.Frames[0].Fields[2].Name) require.Equal(t, "IsInstalled", resp.Frames[0].Fields[3].Name) require.Equal(t, "CompiledOs", resp.Frames[0].Fields[4].Name) require.Equal(t, "Wheels", resp.Frames[0].Fields[5].Name) }) /** * Error */ t.Run("should handle error", func(t *testing.T) { t.Parallel() // Client client := testClient{ rcv: nil, batchRcv: nil, err: errors.New("error occurred")} // Response resp := queryRgPydumpReqs(queryModel{Command: models.GearsPyDumpReqs}, &client) require.EqualError(t, resp.Error, "error occurred") }) }
explode_data.jsonl/66884
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 777 }
[ 2830, 3393, 49, 70, 13828, 51056, 693, 26358, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 1747, 197, 353, 13047, 198, 197, 735, 3244, 16708, 445, 5445, 1882, 3210, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 3244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRoaringBitmapBitmapOf(t *testing.T) { array := []uint64{5580, 33722, 44031, 57276, 83097} bmp := BitmapOf(array...) assert.EqualValues(t, len(array), bmp.GetCardinality()) }
explode_data.jsonl/20323
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 76 }
[ 2830, 3393, 38872, 3249, 16773, 16773, 2124, 1155, 353, 8840, 836, 8, 341, 11923, 1669, 3056, 2496, 21, 19, 90, 20, 20, 23, 15, 11, 220, 18, 18, 22, 17, 17, 11, 220, 19, 19, 15, 18, 16, 11, 220, 20, 22, 17, 22, 21, 11, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInvalidateWhenSuccess(t *testing.T) { // Given options := store.InvalidateOptions{ Tags: []string{"tag1"}, } store := &mocksStore.StoreInterface{} store.On("Invalidate", options).Return(nil) codec := New(store) // When err := codec.Invalidate(options) // Then assert.Nil(t, err) assert.Equal(t, 0, codec.GetStats().Hits) assert.Equal(t, 0, codec.GetStats().Miss) assert.Equal(t, 0, codec.GetStats().SetSuccess) assert.Equal(t, 0, codec.GetStats().SetError) assert.Equal(t, 0, codec.GetStats().DeleteSuccess) assert.Equal(t, 0, codec.GetStats().DeleteError) assert.Equal(t, 1, codec.GetStats().InvalidateSuccess) assert.Equal(t, 0, codec.GetStats().InvalidateError) }
explode_data.jsonl/29043
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 264 }
[ 2830, 3393, 641, 7067, 4498, 7188, 1155, 353, 8840, 836, 8, 341, 197, 322, 16246, 198, 35500, 1669, 3553, 5337, 7067, 3798, 515, 197, 10261, 2032, 25, 3056, 917, 4913, 4578, 16, 7115, 197, 630, 57279, 1669, 609, 16712, 82, 6093, 38047...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClient_SimpleScalarXmlProperties_awsEc2queryDeserialize(t *testing.T) { cases := map[string]struct { StatusCode int Header http.Header BodyMediaType string Body []byte ExpectResult *SimpleScalarXmlPropertiesOutput }{ // Serializes simple scalar properties "Ec2SimpleScalarProperties": { StatusCode: 200, Header: http.Header{ "Content-Type": []string{"text/xml;charset=UTF-8"}, }, BodyMediaType: "application/xml", Body: []byte(`<SimpleScalarXmlPropertiesResponse xmlns="https://example.com/"> <stringValue>string</stringValue> <emptyStringValue/> <trueBooleanValue>true</trueBooleanValue> <falseBooleanValue>false</falseBooleanValue> <byteValue>1</byteValue> <shortValue>2</shortValue> <integerValue>3</integerValue> <longValue>4</longValue> <floatValue>5.5</floatValue> <DoubleDribble>6.5</DoubleDribble> <RequestId>requestid</RequestId> </SimpleScalarXmlPropertiesResponse> `), ExpectResult: &SimpleScalarXmlPropertiesOutput{ StringValue: ptr.String("string"), EmptyStringValue: ptr.String(""), TrueBooleanValue: ptr.Bool(true), FalseBooleanValue: ptr.Bool(false), ByteValue: ptr.Int8(1), ShortValue: ptr.Int16(2), IntegerValue: ptr.Int32(3), LongValue: ptr.Int64(4), FloatValue: ptr.Float32(5.5), DoubleValue: ptr.Float64(6.5), }, }, } for name, c := range cases { t.Run(name, func(t *testing.T) { url := "http://localhost:8888/" client := New(Options{ HTTPClient: smithyhttp.ClientDoFunc(func(r *http.Request) (*http.Response, error) { headers := http.Header{} for k, vs := range c.Header { for _, v := range vs { headers.Add(k, v) } } if len(c.BodyMediaType) != 0 && len(headers.Values("Content-Type")) == 0 { headers.Set("Content-Type", c.BodyMediaType) } response := &http.Response{ StatusCode: c.StatusCode, Header: headers, Request: r, } if len(c.Body) != 0 { response.ContentLength = int64(len(c.Body)) response.Body = ioutil.NopCloser(bytes.NewReader(c.Body)) } else { response.Body = http.NoBody } return response, nil }), APIOptions: []func(*middleware.Stack) error{ func(s *middleware.Stack) error { s.Finalize.Clear() return nil }, }, EndpointResolver: EndpointResolverFunc(func(region string, options EndpointResolverOptions) (e aws.Endpoint, err error) { e.URL = url e.SigningRegion = "us-west-2" return e, err }), IdempotencyTokenProvider: smithyrand.NewUUIDIdempotencyToken(&smithytesting.ByteLoop{}), Region: "us-west-2", }) var params SimpleScalarXmlPropertiesInput result, err := client.SimpleScalarXmlProperties(context.Background(), &params) if err != nil { t.Fatalf("expect nil err, got %v", err) } if result == nil { t.Fatalf("expect not nil result") } if err := smithytesting.CompareValues(c.ExpectResult, result, cmpopts.IgnoreUnexported(middleware.Metadata{})); err != nil { t.Errorf("expect c.ExpectResult value match:\n%v", err) } }) } }
explode_data.jsonl/3139
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1487 }
[ 2830, 3393, 2959, 1098, 6456, 20639, 11593, 7903, 62, 8635, 50730, 17, 1631, 64465, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 2415, 14032, 60, 1235, 341, 197, 197, 15872, 262, 526, 198, 197, 197, 4047, 286, 1758, 15753, 198, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func Test_MapToMap2(t *testing.T) { type User struct { Id int Name string } params := g.Map{ "key": g.Map{ "id": 1, "name": "john", }, } gtest.C(t, func(t *gtest.T) { m := make(map[string]User) err := gconv.MapToMap(params, &m) t.Assert(err, nil) t.Assert(len(m), 1) t.Assert(m["key"].Id, 1) t.Assert(m["key"].Name, "john") }) gtest.C(t, func(t *gtest.T) { m := (map[string]User)(nil) err := gconv.MapToMap(params, &m) t.Assert(err, nil) t.Assert(len(m), 1) t.Assert(m["key"].Id, 1) t.Assert(m["key"].Name, "john") }) gtest.C(t, func(t *gtest.T) { m := make(map[string]*User) err := gconv.MapToMap(params, &m) t.Assert(err, nil) t.Assert(len(m), 1) t.Assert(m["key"].Id, 1) t.Assert(m["key"].Name, "john") }) gtest.C(t, func(t *gtest.T) { m := (map[string]*User)(nil) err := gconv.MapToMap(params, &m) t.Assert(err, nil) t.Assert(len(m), 1) t.Assert(m["key"].Id, 1) t.Assert(m["key"].Name, "john") }) }
explode_data.jsonl/41426
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 540 }
[ 2830, 3393, 56992, 1249, 2227, 17, 1155, 353, 8840, 836, 8, 341, 13158, 2657, 2036, 341, 197, 67211, 256, 526, 198, 197, 21297, 914, 198, 197, 532, 25856, 1669, 342, 10104, 515, 197, 197, 1, 792, 788, 342, 10104, 515, 298, 197, 2870...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReadASN1IntegerUnsigned(t *testing.T) { testData := []struct { in []byte out uint64 }{ {[]byte{2, 1, 0}, 0}, {[]byte{2, 1, 1}, 1}, {[]byte{2, 1, 2}, 2}, {[]byte{2, 1, 127}, 127}, {[]byte{2, 2, 0, 128}, 128}, {[]byte{2, 2, 1, 0}, 256}, {[]byte{2, 4, 0, 128, 0, 0}, 0x800000}, {[]byte{2, 8, 127, 255, 255, 255, 255, 255, 255, 255}, 0x7fffffffffffffff}, {[]byte{2, 9, 0, 128, 0, 0, 0, 0, 0, 0, 0}, 0x8000000000000000}, {[]byte{2, 9, 0, 255, 255, 255, 255, 255, 255, 255, 255}, 0xffffffffffffffff}, } for i, test := range testData { in := String(test.in) var out uint64 ok := in.ReadASN1Integer(&out) if !ok || out != test.out { t.Errorf("#%d: in.ReadASN1Integer() = %v, want true; out = %d, want %d", i, ok, out, test.out) } } }
explode_data.jsonl/16725
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 381 }
[ 2830, 3393, 4418, 68134, 16, 3486, 56421, 1155, 353, 8840, 836, 8, 341, 18185, 1043, 1669, 3056, 1235, 341, 197, 17430, 220, 3056, 3782, 198, 197, 13967, 2622, 21, 19, 198, 197, 59403, 197, 197, 90, 1294, 3782, 90, 17, 11, 220, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCommitInWindows(t *testing.T) { s := fmt.Sprintf(`cd /d %s & git pull & git add -A & git commit -m "%s"`, " E:/rox/roxliu.github.io", "publish post") log.Println("Run the command in windows: " + s) commitInWindows(s) }
explode_data.jsonl/43092
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 91 }
[ 2830, 3393, 33441, 641, 13164, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 8879, 17305, 5809, 4385, 608, 67, 1018, 82, 609, 16345, 6815, 609, 16345, 912, 481, 32, 609, 16345, 5266, 481, 76, 5962, 82, 1, 7808, 330, 468, 14375, 54789, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSyncGroupByGroupKey(t *testing.T) { type args []models.UserGroup type result struct { wantError bool } cases := []struct { name string in args want result }{ { name: `normal test http group`, in: args{ models.UserGroup{GroupName: "orange", GroupType: common.HTTPGroupType}, models.UserGroup{GroupName: "apple", GroupType: common.HTTPGroupType}, models.UserGroup{GroupName: "pearl", GroupType: common.HTTPGroupType}}, want: result{false}, }, { name: `normal test oidc group`, in: args{ models.UserGroup{GroupName: "dog", GroupType: common.OIDCGroupType}, models.UserGroup{GroupName: "cat", GroupType: common.OIDCGroupType}, models.UserGroup{GroupName: "bee", GroupType: common.OIDCGroupType}, }, want: result{false}, }, { name: `normal test oidc group`, in: args{ models.UserGroup{GroupName: "cn=sync_user_group1,dc=example,dc=com", LdapGroupDN: "cn=sync_user_group1,dc=example,dc=com", GroupType: common.LDAPGroupType}, models.UserGroup{GroupName: "cn=sync_user_group2,dc=example,dc=com", LdapGroupDN: "cn=sync_user_group2,dc=example,dc=com", GroupType: common.LDAPGroupType}, models.UserGroup{GroupName: "cn=sync_user_group3,dc=example,dc=com", LdapGroupDN: "cn=sync_user_group3,dc=example,dc=com", GroupType: common.LDAPGroupType}, models.UserGroup{GroupName: "cn=sync_user_group4,dc=example,dc=com", LdapGroupDN: "cn=sync_user_group4,dc=example,dc=com", GroupType: common.LDAPGroupType}, }, want: result{false}, }, } for _, tt := range cases { t.Run(tt.name, func(t *testing.T) { got, err := PopulateGroup(tt.in) if err != nil && !tt.want.wantError { t.Errorf("error %v", err) } if !assert.Equal(t, len(tt.in), len(got)) { t.Errorf(`(%v) != %v; want "%v"`, len(tt.in), len(got), len(tt.in)) } for _, id := range got { DeleteUserGroup(id) } }) } }
explode_data.jsonl/73822
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 812 }
[ 2830, 3393, 12154, 2808, 1359, 2808, 1592, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 3056, 6507, 7344, 2808, 198, 13158, 1102, 2036, 341, 197, 50780, 1454, 1807, 198, 197, 532, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 914, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5