text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestBTree(t *testing.T) { var tr btree spanMemo := make(map[int]roachpb.Span) // With degree == 16 (max-items/node == 31) we need 513 items in order for // there to be 3 levels in the tree. The count here is comfortably above // that. const count = 768 // Add keys in sorted order. for i := 0; i < count; i++ { tr.Set(newItem(span(i))) tr.Verify(t) if e := i + 1; e != tr.Len() { t.Fatalf("expected length %d, but found %d", e, tr.Len()) } checkIter(t, tr.MakeIter(), 0, i+1, spanMemo) } // Delete keys in sorted order. for i := 0; i < count; i++ { tr.Delete(newItem(span(i))) tr.Verify(t) if e := count - (i + 1); e != tr.Len() { t.Fatalf("expected length %d, but found %d", e, tr.Len()) } checkIter(t, tr.MakeIter(), i+1, count, spanMemo) } // Add keys in reverse sorted order. for i := 0; i < count; i++ { tr.Set(newItem(span(count - i))) tr.Verify(t) if e := i + 1; e != tr.Len() { t.Fatalf("expected length %d, but found %d", e, tr.Len()) } checkIter(t, tr.MakeIter(), count-i, count+1, spanMemo) } // Delete keys in reverse sorted order. for i := 0; i < count; i++ { tr.Delete(newItem(span(count - i))) tr.Verify(t) if e := count - (i + 1); e != tr.Len() { t.Fatalf("expected length %d, but found %d", e, tr.Len()) } checkIter(t, tr.MakeIter(), 1, count-i, spanMemo) } }
explode_data.jsonl/24880
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 581 }
[ 2830, 3393, 33, 6533, 1155, 353, 8840, 836, 8, 341, 2405, 489, 293, 9344, 198, 197, 1480, 46402, 1669, 1281, 9147, 18640, 60, 64500, 16650, 85309, 692, 197, 322, 3085, 8381, 621, 220, 16, 21, 320, 2810, 17459, 37211, 621, 220, 18, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func Test_AuthWithInvalidPassword_Gives403(t *testing.T) { handler := func(w http.ResponseWriter, r *http.Request) { io.WriteString(w, "<html><body>Hello World!</body></html>") } w := httptest.NewRecorder() wantUser := "admin" wantPassword := "test" r := httptest.NewRequest(http.MethodGet, "http://localhost:8080", nil) r.SetBasicAuth(wantUser, wantPassword) wantCredentials := &BasicAuthCredentials{ User: wantUser, Password: "", } decorated := DecorateWithBasicAuth(handler, wantCredentials) decorated.ServeHTTP(w, r) wantCode := http.StatusUnauthorized if w.Code != wantCode { t.Errorf("status code, want: %d, got: %d", wantCode, w.Code) t.Fail() } }
explode_data.jsonl/27516
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 264 }
[ 2830, 3393, 1566, 940, 2354, 7928, 4876, 2646, 1886, 19, 15, 18, 1155, 353, 8840, 836, 8, 1476, 53326, 1669, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 53112, 44747, 3622, 11, 4055, 1551, 1784, 2599, 79497, 4337, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAttachVolumeFailureUnmarshal(t *testing.T) { var error ErrorAttachVolumeFailure err := yaml.Unmarshal([]byte(testutil.AttachVolumeFailureYaml), &error) if err != nil { t.Error(err) } if error.NodeUUID != testutil.AgentUUID { t.Error("Wrong Node UUID field") } if error.InstanceUUID != testutil.InstanceUUID { t.Error("Wrong Instance UUID field") } if error.VolumeUUID != testutil.VolumeUUID { t.Error("Wrong Volume UUID field") } if error.Reason != AttachVolumeAttachFailure { t.Error("Wrong Error field") } }
explode_data.jsonl/76409
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 201 }
[ 2830, 3393, 30485, 18902, 17507, 1806, 27121, 1155, 353, 8840, 836, 8, 341, 2405, 1465, 4600, 30485, 18902, 17507, 198, 9859, 1669, 32246, 38097, 10556, 3782, 8623, 1314, 88284, 18902, 17507, 56, 9467, 701, 609, 841, 340, 743, 1848, 961, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestGroupCleanup(t *testing.T) { if os.Getuid() != 0 { t.Skip("we need root for credential") } cmd := exec.Command("id") cmd.SysProcAttr = &syscall.SysProcAttr{ Credential: &syscall.Credential{ Uid: 0, Gid: 0, }, } out, err := cmd.CombinedOutput() if err != nil { t.Fatalf("Cmd failed with err %v, output: %s", err, out) } strOut := strings.TrimSpace(string(out)) expected := "uid=0(root) gid=0(root)" // Just check prefix because some distros reportedly output a // context parameter; see https://golang.org/issue/16224. // Alpine does not output groups; see https://golang.org/issue/19938. if !strings.HasPrefix(strOut, expected) { t.Errorf("id command output: %q, expected prefix: %q", strOut, expected) } }
explode_data.jsonl/36119
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 292 }
[ 2830, 3393, 2808, 67335, 1155, 353, 8840, 836, 8, 341, 743, 2643, 2234, 2423, 368, 961, 220, 15, 341, 197, 3244, 57776, 445, 896, 1184, 3704, 369, 40207, 1138, 197, 532, 25920, 1669, 3883, 12714, 445, 307, 1138, 25920, 59418, 24508, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestMaxQuerySamples(t *testing.T) { test, err := NewTest(t, ` load 10s metric 1+1x100 bigmetric{a="1"} 1+1x100 bigmetric{a="2"} 1+1x100 `) require.NoError(t, err) defer test.Close() err = test.Run() require.NoError(t, err) // These test cases should be touching the limit exactly (hence no exceeding). // Exceeding the limit will be tested by doing -1 to the MaxSamples. cases := []struct { Query string MaxSamples int Start time.Time End time.Time Interval time.Duration }{ // Instant queries. { Query: "1", MaxSamples: 1, Start: time.Unix(1, 0), }, { Query: "metric", MaxSamples: 1, Start: time.Unix(1, 0), }, { Query: "metric[20s]", MaxSamples: 2, Start: time.Unix(10, 0), }, { Query: "rate(metric[20s])", MaxSamples: 3, Start: time.Unix(10, 0), }, { Query: "metric[20s:5s]", MaxSamples: 3, Start: time.Unix(10, 0), }, { Query: "metric[20s] @ 10", MaxSamples: 2, Start: time.Unix(0, 0), }, // Range queries. { Query: "1", MaxSamples: 3, Start: time.Unix(0, 0), End: time.Unix(2, 0), Interval: time.Second, }, { Query: "1", MaxSamples: 3, Start: time.Unix(0, 0), End: time.Unix(2, 0), Interval: time.Second, }, { Query: "metric", MaxSamples: 3, Start: time.Unix(0, 0), End: time.Unix(2, 0), Interval: time.Second, }, { Query: "metric", MaxSamples: 3, Start: time.Unix(0, 0), End: time.Unix(10, 0), Interval: 5 * time.Second, }, { Query: "rate(bigmetric[1s])", MaxSamples: 1, Start: time.Unix(0, 0), End: time.Unix(10, 0), Interval: 5 * time.Second, }, { // Result is duplicated, so @ also produces 3 samples. Query: "metric @ 10", MaxSamples: 3, Start: time.Unix(0, 0), End: time.Unix(10, 0), Interval: 5 * time.Second, }, { // The peak samples in memory is during the first evaluation: // - Subquery takes 22 samples, 11 for each bigmetric, // - Result is calculated per series where the series samples is buffered, hence 11 more here. // - The result of two series is added before the last series buffer is discarded, so 2 more here. // Hence at peak it is 22 (subquery) + 11 (buffer of a series) + 2 (result from 2 series). // The subquery samples and the buffer is discarded before duplicating. Query: `rate(bigmetric[10s:1s] @ 10)`, MaxSamples: 35, Start: time.Unix(0, 0), End: time.Unix(10, 0), Interval: 5 * time.Second, }, { // Here the reasoning is same as above. But LHS and RHS are done one after another. // So while one of them takes 35 samples at peak, we need to hold the 2 sample // result of the other till then. Query: `rate(bigmetric[10s:1s] @ 10) + rate(bigmetric[10s:1s] @ 30)`, MaxSamples: 37, Start: time.Unix(0, 0), End: time.Unix(10, 0), Interval: 5 * time.Second, }, { // Sample as above but with only 1 part as step invariant. // Here the peak is caused by the non-step invariant part as it touches more time range. // Hence at peak it is 2*21 (subquery from 0s to 20s) // + 11 (buffer of a series per evaluation) // + 6 (result from 2 series at 3 eval times). Query: `rate(bigmetric[10s:1s]) + rate(bigmetric[10s:1s] @ 30)`, MaxSamples: 59, Start: time.Unix(10, 0), End: time.Unix(20, 0), Interval: 5 * time.Second, }, { // Nested subquery. // We saw that innermost rate takes 35 samples which is still the peak // since the other two subqueries just duplicate the result. Query: `rate(rate(bigmetric[10s:1s] @ 10)[100s:25s] @ 1000)[100s:20s] @ 2000`, MaxSamples: 35, Start: time.Unix(10, 0), }, { // Nested subquery. // Now the outmost subquery produces more samples than inner most rate. Query: `rate(rate(bigmetric[10s:1s] @ 10)[100s:25s] @ 1000)[17s:1s] @ 2000`, MaxSamples: 36, Start: time.Unix(10, 0), }, } engine := test.QueryEngine() for _, c := range cases { t.Run(c.Query, func(t *testing.T) { testFunc := func(expError error) { var err error var qry Query if c.Interval == 0 { qry, err = engine.NewInstantQuery(test.Queryable(), c.Query, c.Start) } else { qry, err = engine.NewRangeQuery(test.Queryable(), c.Query, c.Start, c.End, c.Interval) } require.NoError(t, err) res := qry.Exec(test.Context()) require.Equal(t, expError, res.Err) } // Within limit. engine.maxSamplesPerQuery = c.MaxSamples testFunc(nil) // Exceeding limit. engine.maxSamplesPerQuery = c.MaxSamples - 1 testFunc(ErrTooManySamples(env)) }) } }
explode_data.jsonl/35555
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2335 }
[ 2830, 3393, 5974, 2859, 39571, 1155, 353, 8840, 836, 8, 341, 18185, 11, 1848, 1669, 1532, 2271, 1155, 11, 22074, 1078, 220, 16, 15, 82, 198, 220, 18266, 220, 16, 10, 16, 87, 16, 15, 15, 198, 220, 2409, 15903, 90, 64, 428, 16, 92...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDetachedHead(t *testing.T) { detachedHead := "345c470bf286aa3ca8e8cb5d68361d4a685ba7d1" expectedErr := fmt.Errorf("git is in detached HEAD state, HEAD is: %s", detachedHead) res, err := abbreviatedHead(detachedHead) if (err == nil) || (res != "") { t.Errorf("Expected error for input %v but received %v", detachedHead, res) } if errors.Is(err, expectedErr) { t.Errorf("Unexpected error for input %v: %v", detachedHead, err) } }
explode_data.jsonl/22101
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 172 }
[ 2830, 3393, 17076, 3854, 12346, 1155, 353, 8840, 836, 8, 341, 2698, 295, 3854, 12346, 1669, 330, 18, 19, 20, 66, 19, 22, 15, 13233, 17, 23, 21, 5305, 18, 924, 23, 68, 23, 7221, 20, 67, 21, 23, 18, 21, 16, 67, 19, 64, 21, 23,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetFileSDScrapeWork(t *testing.T) { data := ` scrape_configs: - job_name: foo file_sd_configs: - files: [testdata/file_sd.json] ` var cfg Config if err := cfg.parse([]byte(data), "sss"); err != nil { t.Fatalf("cannot parase data: %s", err) } sws := cfg.getFileSDScrapeWork(nil) if !equalStaticConfigForScrapeWorks(sws, sws) { t.Fatalf("unexpected non-equal static configs;\nsws:\n%#v", sws) } // Load another static config dataNew := ` scrape_configs: - job_name: foo file_sd_configs: - files: [testdata/file_sd_1.yml] ` var cfgNew Config if err := cfgNew.parse([]byte(dataNew), "sss"); err != nil { t.Fatalf("cannot parse data: %s", err) } swsNew := cfgNew.getFileSDScrapeWork(sws) if equalStaticConfigForScrapeWorks(swsNew, sws) { t.Fatalf("unexpected equal static configs;\nswsNew:\n%#v\nsws:\n%#v", swsNew, sws) } // Try loading invalid static config data = ` scrape_configs: - job_name: foo file_sd_configs: - files: [testdata/prometheus.yml] ` if err := cfg.parse([]byte(data), "sss"); err != nil { t.Fatalf("cannot parse data: %s", err) } sws = cfg.getFileSDScrapeWork(swsNew) if len(sws) != 0 { t.Fatalf("unexpected non-empty sws:\n%#v", sws) } // Empty target in static config data = ` scrape_configs: - job_name: foo file_sd_configs: - files: [testdata/empty_target_file_sd.yml] ` if err := cfg.parse([]byte(data), "sss"); err != nil { t.Fatalf("cannot parse data: %s", err) } sws = cfg.getFileSDScrapeWork(swsNew) if len(sws) != 0 { t.Fatalf("unexpected non-empty sws:\n%#v", sws) } }
explode_data.jsonl/13543
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 689 }
[ 2830, 3393, 1949, 1703, 5491, 3326, 19842, 6776, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 22074, 2388, 19842, 59150, 510, 12, 2618, 1269, 25, 15229, 198, 220, 1034, 40168, 59150, 510, 220, 481, 3542, 25, 508, 92425, 23903, 40168, 4323,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestStrArray_ContainsI(t *testing.T) { gtest.C(t, func(t *gtest.T) { s := garray.NewStrArray() s.Append("a", "b", "C") t.Assert(s.Contains("A"), false) t.Assert(s.Contains("a"), true) t.Assert(s.ContainsI("A"), true) }) }
explode_data.jsonl/53081
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 120 }
[ 2830, 3393, 2580, 1857, 62, 23805, 40, 1155, 353, 8840, 836, 8, 341, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 1903, 1669, 342, 1653, 7121, 2580, 1857, 741, 197, 1903, 8982, 445, 64, 497, 330, 65, 497, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestListRepositoriesRaw(t *testing.T) { assert := assert.New(t) bb, apiBaseURL, teardown := bitbucketTestClient(stubRepositoriesResponse) defer teardown() resp, err := bb.RawRequest("GET", apiBaseURL+"/projects/VID/repos", "") if !assert.NoError(err) { return } defer resp.Close() assert.NotNil(resp) bytes, err := ioutil.ReadAll(resp) assert.NotEmpty(bytes) }
explode_data.jsonl/76092
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 852, 44814, 20015, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 2233, 65, 11, 6330, 3978, 3144, 11, 49304, 1669, 2699, 30410, 2271, 2959, 5895, 392, 44814, 2582, 340, 16867, 49304, 741, 34653, 11, 1848, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUnionUnknownVariantType(test *testing.T) { v, err := parseRDLString(`type MyUnion Union<Blah>;`) if err == nil { test.Errorf("Invalid union variant not caught: %v", v) } }
explode_data.jsonl/74362
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 69 }
[ 2830, 3393, 32658, 13790, 20746, 929, 8623, 353, 8840, 836, 8, 341, 5195, 11, 1848, 1669, 4715, 49, 16524, 703, 5809, 1313, 3017, 32658, 9145, 27, 4923, 1466, 65795, 24183, 743, 1848, 621, 2092, 341, 197, 18185, 13080, 445, 7928, 11300,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestParseValidTimeRange(t *testing.T) { timeRange, err := NewTimeRange("23:12-01:30") if err != nil { t.Fatal(err.Error()) } if timeRange.start.Hour() != 23 || timeRange.start.Minute() != 12 { t.Fatal("Failed to parse Start Time") } if timeRange.end.Hour() != 1 || timeRange.end.Minute() != 30 { t.Fatal("Failed to parse End Time") } }
explode_data.jsonl/61794
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 142 }
[ 2830, 3393, 14463, 4088, 1462, 6046, 1155, 353, 8840, 836, 8, 341, 21957, 6046, 11, 1848, 1669, 1532, 1462, 6046, 445, 17, 18, 25, 16, 17, 12, 15, 16, 25, 18, 15, 5130, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 6141, 2398...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestMatrixToValues(t *testing.T) { // v0 is from t 0-500 counting all up from 0 to 500 v0 := make([]model.SamplePair, 500) e0 := [][]float64{ make([]float64, 500), make([]float64, 500), } for i := 0; i < cap(v0); i++ { v0[i] = model.SamplePair{ Timestamp: model.Time(i * 1000), Value: model.SampleValue(i), } e0[0][i] = float64(i) e0[1][i] = float64(i) } v10 := make([]model.SamplePair, 100) for i := 0; i < cap(v10); i++ { v10[i] = model.SamplePair{ Timestamp: model.Time(i * 1000), Value: model.SampleValue(i), } } // offset by first 50 samples v11 := make([]model.SamplePair, 250) for i := 0; i < cap(v11); i++ { v11[i] = model.SamplePair{ Timestamp: model.Time((i + 50) * 1000), Value: model.SampleValue(i), } } e1 := [][]float64{ make([]float64, 300), // [0-100] + [50-300] make([]float64, 300), make([]float64, 300), } for i := 0; i < 300; i++ { e1[0][i] = float64(i) } for i := 0; i < 100; i++ { e1[1][i] = float64(i) } for i := 0; i < 250; i++ { e1[2][50+i] = float64(i) } // Check if NaNs are returned as 0 (it's fine for errors for example to convert these). // Additionally, NaNs aren't possible to be marshalled to JSON. Not sure if there's a better way. v2 := make([]model.SamplePair, 100) for i := 0; i < cap(v2); i++ { v2[i] = model.SamplePair{ Timestamp: model.Time(i * 1000), Value: model.SampleValue(math.NaN()), } } e2 := [][]float64{ make([]float64, 100), make([]float64, 100), } for i := 0; i < len(e2[0]); i++ { e2[0][i] = float64(i) } // Check NaN in multiple series v3 := make([]model.SamplePair, 100) for i := 0; i < len(v3); i++ { value := float64(i) if i%11 == 0 { value = math.NaN() } v3[i] = model.SamplePair{ Timestamp: model.Time(i * 1000), Value: model.SampleValue(value), } } e3 := [][]float64{ make([]float64, 100), // x make([]float64, 100), // y[0] make([]float64, 100), // y[1] } for i := 0; i < len(e3[0]); i++ { e32value := float64(i) if i%11 == 0 { e32value = 0 } e3[0][i] = float64(i) e3[1][i] = 0 e3[2][i] = e32value } for _, tc := range []struct { name string m []*model.SampleStream expected [][]float64 }{{ name: "empty", }, { name: "simple", m: []*model.SampleStream{{Values: v0}}, expected: e0, }, { name: "overlapping", m: []*model.SampleStream{{Values: v10}, {Values: v11}}, expected: e1, }, { name: "NaN", m: []*model.SampleStream{{Values: v2}}, expected: e2, }, { name: "NaNMultiple", m: []*model.SampleStream{{Values: v2}, {Values: v3}}, expected: e3, }} { t.Run(tc.name, func(t *testing.T) { require.Equal(t, tc.expected, matrixToValues(tc.m)) }) } }
explode_data.jsonl/48793
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1361 }
[ 2830, 3393, 6689, 1249, 6227, 1155, 353, 8840, 836, 8, 341, 197, 322, 348, 15, 374, 504, 259, 220, 15, 12, 20, 15, 15, 25009, 678, 705, 504, 220, 15, 311, 220, 20, 15, 15, 198, 5195, 15, 1669, 1281, 10556, 2528, 76266, 12443, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRedoLogger(t *testing.T) { provider, cleanup := redologTestSetup(t) defer cleanup() loggers := []*redoLogger{} records := []*redoRecord{} verifyLogRecords := func() { for i := 0; i < len(loggers); i++ { retrievedRec, err := loggers[i].load() assert.NoError(t, err) assert.Equal(t, records[i], retrievedRec) } } // write log records for multiple channels for i := 0; i < 10; i++ { logger := provider.newRedoLogger(fmt.Sprintf("channel-%d", i)) rec, err := logger.load() assert.NoError(t, err) assert.Nil(t, rec) loggers = append(loggers, logger) batch := statedb.NewUpdateBatch() blkNum := uint64(i) batch.Put("ns1", "key1", []byte("value1"), version.NewHeight(blkNum, 1)) batch.Put("ns2", string([]byte{0x00, 0xff}), []byte("value3"), version.NewHeight(blkNum, 3)) batch.PutValAndMetadata("ns2", string([]byte{0x00, 0xff}), []byte("value3"), []byte("metadata"), version.NewHeight(blkNum, 4)) batch.Delete("ns2", string([]byte{0xff, 0xff}), version.NewHeight(blkNum, 5)) rec = &redoRecord{ UpdateBatch: batch, Version: version.NewHeight(blkNum, 10), } records = append(records, rec) assert.NoError(t, logger.persist(rec)) } verifyLogRecords() // overwrite logrecord for one channel records[5].UpdateBatch = statedb.NewUpdateBatch() records[5].Version = version.NewHeight(5, 5) assert.NoError(t, loggers[5].persist(records[5])) verifyLogRecords() }
explode_data.jsonl/2697
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 581 }
[ 2830, 3393, 6033, 78, 7395, 1155, 353, 8840, 836, 8, 341, 197, 19979, 11, 21290, 1669, 2518, 1609, 2271, 21821, 1155, 340, 16867, 21290, 2822, 6725, 10637, 1669, 29838, 63561, 7395, 16094, 197, 26203, 1669, 29838, 63561, 6471, 31483, 9358...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPodSpecForCSIWithOlderCSIVersion(t *testing.T) { fakeClient := fakek8sclient.NewSimpleClientset() coreops.SetInstance(coreops.New(fakeClient)) // Should use 0.3 csi version for k8s version less than 1.13 fakeClient.Discovery().(*fakediscovery.FakeDiscovery).FakedServerVersion = &version.Info{ GitVersion: "v1.12.8", } expected := getExpectedPodSpecFromDaemonset(t, "testspec/px_csi_0.3.yaml") nodeName := "testNode" cluster := &corev1.StorageCluster{ ObjectMeta: metav1.ObjectMeta{ Name: "px-cluster", Namespace: "kube-system", }, Spec: corev1.StorageClusterSpec{ Image: "portworx/oci-monitor:2.1.1", FeatureGates: map[string]string{ string(pxutil.FeatureCSI): "true", }, }, Status: corev1.StorageClusterStatus{ DesiredImages: &corev1.ComponentImages{ CSIDriverRegistrar: "quay.io/k8scsi/driver-registrar:v0.4.2", }, }, } driver := portworx{} actual, err := driver.GetStoragePodSpec(cluster, nodeName) assert.NoError(t, err, "Unexpected error on GetStoragePodSpec") assertPodSpecEqual(t, expected, &actual) // Update Portworx version, which should use new CSI driver name cluster.Spec.Image = "portworx/oci-monitor:2.2" actual, err = driver.GetStoragePodSpec(cluster, nodeName) assert.NoError(t, err, "Unexpected error on GetStoragePodSpec") assert.Equal(t, actual.Containers[1].Args[3], "--kubelet-registration-path=/var/lib/kubelet/csi-plugins/pxd.portworx.com/csi.sock", ) }
explode_data.jsonl/55460
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 593 }
[ 2830, 3393, 23527, 8327, 2461, 48407, 2354, 18284, 261, 6412, 3090, 1325, 1155, 353, 8840, 836, 8, 341, 1166, 726, 2959, 1669, 12418, 74, 23, 82, 2972, 7121, 16374, 2959, 746, 741, 71882, 3721, 4202, 2523, 47867, 3721, 7121, 74138, 2959...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetPlanFilename(t *testing.T) { cases := []struct { workspace string maybeCfg *valid.Project exp string }{ { "workspace", nil, "workspace.tfplan", }, { "workspace", &valid.Project{}, "workspace.tfplan", }, { "workspace", &valid.Project{ Name: String("project"), }, "project-workspace.tfplan", }, } for i, c := range cases { t.Run(fmt.Sprintf("case %d", i), func(t *testing.T) { Equals(t, c.exp, runtime.GetPlanFilename(c.workspace, c.maybeCfg)) }) } }
explode_data.jsonl/42529
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 255 }
[ 2830, 3393, 1949, 20485, 20759, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 197, 42909, 914, 198, 197, 2109, 49791, 42467, 220, 353, 1891, 30944, 198, 197, 48558, 981, 914, 198, 197, 59403, 197, 197, 515, 298...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTagMissingColon(t *testing.T) { var opts = struct { Value bool `short` }{} assertParseFail(t, ErrTag, "expected `:' after key name, but got end of tag (in `short`)", &opts, "") }
explode_data.jsonl/44075
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 80 }
[ 2830, 3393, 5668, 25080, 88665, 1155, 353, 8840, 836, 8, 972, 2405, 12185, 284, 2036, 972, 197, 47399, 1807, 1565, 8676, 75679, 197, 15170, 2570, 6948, 14463, 19524, 1155, 11, 15495, 5668, 11, 330, 7325, 1565, 4884, 1283, 1376, 829, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestMatcherLookupByExtension(t *testing.T) { // If we are not on CI skip the test. if os.Getenv("CI") == "" { t.Skip("Not on CI, skipping comby-dependent test") } t.Parallel() input := map[string]string{ "file_without_extension": ` /* This foo(plain.empty) {} is in a Go comment should not match in Go, but should match in plaintext */ func foo(go.empty) {} `, "file.go": ` /* This foo(plain.go) {} is in a Go comment should not match in Go, but should match in plaintext */ func foo(go.go) {} `, "file.txt": ` /* This foo(plain.txt) {} is in a Go comment should not match in Go, but should match in plaintext */ func foo(go.txt) {} `, } zipData, err := testutil.CreateZip(input) if err != nil { t.Fatal(err) } zf, cleanup, err := testutil.TempZipFileOnDisk(zipData) if err != nil { t.Fatal(err) } defer cleanup() test := func(language, filename string) string { var languages []string if language != "" { languages = []string{language} } extensionHint := filepath.Ext(filename) ctx, cancel, sender := newLimitedStreamCollector(context.Background(), 1000000000) defer cancel() err := structuralSearch(ctx, zf, All, extensionHint, "foo(:[args])", "", languages, "repo_foo", sender) if err != nil { return "ERROR: " + err.Error() } var got []string for _, fileMatches := range sender.collected { for _, m := range fileMatches.LineMatches { got = append(got, m.Preview) } } sort.Strings(got) return strings.Join(got, " ") } cases := []struct { name string want string language string filename string }{{ name: "No language and no file extension => .generic matcher", want: "foo(go.empty) foo(go.go) foo(go.txt) foo(plain.empty) foo(plain.go) foo(plain.txt)", language: "", filename: "file_without_extension", }, { name: "No language and .go file extension => .go matcher", want: "foo(go.empty) foo(go.go) foo(go.txt)", language: "", filename: "a/b/c/file.go", }, { name: "Language Go and no file extension => .go matcher", want: "foo(go.empty) foo(go.go) foo(go.txt)", language: "go", filename: "", }, { name: "Language .go and .txt file extension => .go matcher", want: "foo(go.empty) foo(go.go) foo(go.txt)", language: "go", filename: "file.txt", }} t.Run("group", func(t *testing.T) { for _, tc := range cases { tc := tc t.Run(tc.name, func(t *testing.T) { t.Parallel() got := test(tc.language, tc.filename) if d := cmp.Diff(tc.want, got); d != "" { t.Errorf("mismatch (-want +got):\n%s", d) } }) } }) }
explode_data.jsonl/5442
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1059 }
[ 2830, 3393, 37554, 34247, 1359, 12049, 1155, 353, 8840, 836, 8, 341, 197, 322, 1416, 582, 525, 537, 389, 20694, 10706, 279, 1273, 624, 743, 2643, 64883, 445, 11237, 899, 621, 1591, 341, 197, 3244, 57776, 445, 2623, 389, 20694, 11, 426...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestLocalKMS_ImportPrivateKey(t *testing.T) { // create a real (not mocked) master key and secret lock to test the KMS end to end sl := createMasterKeyAndSecretLock(t) storeDB := make(map[string][]byte) // test New() kmsService, e := New(testMasterKeyURI, &mockProvider{ storage: mockstorage.NewCustomMockStoreProvider( &mockstorage.MockStore{ Store: storeDB, }), secretLock: sl, }) require.NoError(t, e) require.NotEmpty(t, kmsService) // test import with nil key _, _, err := kmsService.ImportPrivateKey(nil, kms.ECDSAP256TypeDER) require.EqualError(t, err, "import private key does not support this key type or key is public") flagTests := []struct { tcName string keyType kms.KeyType curve elliptic.Curve setID bool ksID string }{ { tcName: "import private key using ECDSAP256DER type", keyType: kms.ECDSAP256TypeDER, curve: elliptic.P256(), }, { tcName: "import private key using ECDSAP384TypeDER type", keyType: kms.ECDSAP384TypeDER, curve: elliptic.P384(), }, { tcName: "import private key using ECDSAP521TypeDER type", keyType: kms.ECDSAP521TypeDER, curve: elliptic.P521(), }, { tcName: "import private key using ECDSAP256TypeIEEEP1363 type", keyType: kms.ECDSAP256TypeIEEEP1363, curve: elliptic.P256(), }, { tcName: "import private key using ECDSAP384TypeIEEEP1363 type", keyType: kms.ECDSAP384TypeIEEEP1363, curve: elliptic.P384(), }, { tcName: "import private key using ECDSAP521TypeIEEEP1363 type", keyType: kms.ECDSAP521TypeIEEEP1363, curve: elliptic.P521(), }, { tcName: "import private key using ED25519Type type", keyType: kms.ED25519Type, }, { tcName: "import private key using ECDSAP256DER type and a set empty KeyID", keyType: kms.ECDSAP256TypeDER, curve: elliptic.P256(), setID: true, ksID: "", }, { tcName: "import private key using ECDSAP256DER type and a set non empty KeyID", keyType: kms.ECDSAP256TypeDER, curve: elliptic.P256(), setID: true, ksID: base64.RawURLEncoding.EncodeToString(random.GetRandomBytes( uint32(base64.RawURLEncoding.DecodedLen(maxKeyIDLen)))), }, { tcName: "import private key using ECDSAP256DER type and a set non KeyID larger than maxKeyIDLen", keyType: kms.ECDSAP256TypeDER, curve: elliptic.P256(), setID: true, ksID: base64.RawURLEncoding.EncodeToString(random.GetRandomBytes( uint32(base64.RawURLEncoding.DecodedLen(30)))), }, } for _, tc := range flagTests { tt := tc t.Run(tt.tcName, func(t *testing.T) { if tt.keyType == kms.ED25519Type { pubKey, privKey, err := ed25519.GenerateKey(rand.Reader) require.NoError(t, err) ksID, _, err := kmsService.ImportPrivateKey(privKey, tt.keyType) require.NoError(t, err) pubKeyBytes, err := kmsService.ExportPubKeyBytes(ksID) require.NoError(t, err) require.EqualValues(t, pubKey, pubKeyBytes) return } privKey, err := ecdsa.GenerateKey(tt.curve, rand.Reader) require.NoError(t, err) ksID := "" // test ImportPrivateKey if tt.setID { // with set keyset ID ksID, _, err = kmsService.ImportPrivateKey(privKey, tt.keyType, kms.WithKeyID(tt.ksID)) require.NoError(t, err) // calling ImportPrivatekeyt and WithKeyID("") will ignore the set KeyID and generate a new one if tt.ksID != "" { require.Equal(t, tt.ksID, ksID) } } else { // generate a new keyset ID ksID, _, err = kmsService.ImportPrivateKey(privKey, tt.keyType) require.NoError(t, err) } // export marshaled public key to verify it against the original public key (marshalled) pubKeyBytes, err := kmsService.ExportPubKeyBytes(ksID) require.NoError(t, err) switch tt.keyType { case kms.ECDSAP256TypeDER, kms.ECDSAP384TypeDER, kms.ECDSAP521TypeDER: pubKey, err := x509.MarshalPKIXPublicKey(privKey.Public()) require.NoError(t, err) require.EqualValues(t, pubKey, pubKeyBytes) case kms.ECDSAP256TypeIEEEP1363, kms.ECDSAP384TypeIEEEP1363, kms.ECDSAP521TypeIEEEP1363: pubKey := elliptic.Marshal(tt.curve, privKey.X, privKey.Y) require.EqualValues(t, pubKey, pubKeyBytes) } }) } }
explode_data.jsonl/33101
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1842 }
[ 2830, 3393, 7319, 42, 4826, 62, 11511, 75981, 1155, 353, 8840, 836, 8, 341, 197, 322, 1855, 264, 1931, 320, 1921, 46149, 8, 7341, 1376, 323, 6234, 5296, 311, 1273, 279, 730, 4826, 835, 311, 835, 198, 78626, 1669, 1855, 18041, 1592, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestExecValid(t *testing.T) { tests := []Exec{ Exec{"/bin/httpd"}, Exec{"/app"}, Exec{"/app", "arg1", "arg2"}, Exec{"app"}, } for i, tt := range tests { if err := tt.assertValid(); err != nil { t.Errorf("#%d: err == %v, want nil", i, err) } } }
explode_data.jsonl/66659
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 125 }
[ 2830, 3393, 10216, 4088, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 10216, 515, 197, 197, 10216, 90, 3115, 6863, 15627, 67, 7115, 197, 197, 10216, 90, 3115, 676, 7115, 197, 197, 10216, 90, 3115, 676, 497, 330, 858, 16, 497, 33...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestLogin(t *testing.T) { defer gock.Off() tests := []struct { user string pass string path string auth string tokens []*token token *token err error }{ // Success, match found. { user: "janedoe", pass: "password", path: "/api/v1/users/janedoe/token", auth: "Basic amFuZWRvZTpwYXNzd29yZA==", token: &token{Name: "default", Sha1: "3da541559"}, tokens: []*token{{Name: "default", Sha1: "3da541559"}}, }, // Success, match not found, token created. { user: "janedoe", pass: "password", path: "/api/v1/users/janedoe/token", auth: "Basic amFuZWRvZTpwYXNzd29yZA==", token: &token{Name: "default", Sha1: "918a808c2"}, tokens: []*token{}, }, // Failure, error getting token list. { user: "janedoe", pass: "password", path: "/api/v1/users/janedoe/token", auth: "Basic amFuZWRvZTpwYXNzd29yZA==", tokens: nil, token: nil, err: errors.New("Not Found"), }, // Failure, match not found, error creating token. { user: "janedoe", pass: "password", path: "/api/v1/users/janedoe/token", auth: "Basic amFuZWRvZTpwYXNzd29yZA==", tokens: []*token{{Name: "some-random-token-name", Sha1: "918a808c2"}}, token: nil, err: errors.New("Not Found"), }, } for _, test := range tests { gock.Flush() if test.tokens != nil { gock.New("https://gogs.io"). Get("/api/v1/users/janedoe/token"). MatchHeader("Authorization", test.auth). Reply(200). JSON(test.tokens) } else { gock.New("https://gogs.io"). Get("/api/v1/users/janedoe/token"). Reply(404) } if test.token != nil { gock.New("https://gogs.io"). Post("/api/v1/users/janedoe/token"). MatchHeader("Authorization", test.auth). Reply(200). JSON(test.token) } else { gock.New("https://gogs.io"). Post("/api/v1/users/janedoe/token"). Reply(404) } var ctx context.Context fn := func(w http.ResponseWriter, r *http.Request) { ctx = r.Context() } v := &Config{ Server: "https://try.gogs.io", Login: "/login/form", } h := v.Handler( http.HandlerFunc(fn), ) data := url.Values{ "username": {test.user}, "password": {test.pass}, }.Encode() res := httptest.NewRecorder() req := httptest.NewRequest("POST", "/", strings.NewReader(data)) req.Header.Set( "Content-Type", "application/x-www-form-urlencoded", ) h.ServeHTTP(res, req) tok := login.TokenFrom(ctx) err := login.ErrorFrom(ctx) if test.err != nil { if err == nil { t.Errorf("Want error") } else if got, want := err.Error(), test.err.Error(); got != want { t.Errorf("Want error %q, got %q", want, got) } } else { if tok == nil { t.Errorf("Want user token, got nil") } else if got, want := tok.Access, test.token.Sha1; got != want { t.Errorf("Want access token %s, got %s", want, got) } } } }
explode_data.jsonl/36375
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1391 }
[ 2830, 3393, 6231, 1155, 353, 8840, 836, 8, 341, 16867, 728, 377, 13, 4596, 2822, 78216, 1669, 3056, 1235, 341, 197, 19060, 256, 914, 198, 197, 41431, 256, 914, 198, 197, 26781, 256, 914, 198, 197, 78011, 256, 914, 198, 197, 3244, 97...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMergeConfigs(t *testing.T) { fakeConfig := func(u string) *rest.Config { return &rest.Config{Username: u} } cases := []struct { name string local *rest.Config foreign map[string]rest.Config current string expected map[string]rest.Config err bool }{ { name: "require at least one cluster", err: true, }, { name: "only local cluster", local: fakeConfig("local"), expected: map[string]rest.Config{ InClusterContext: *fakeConfig("local"), DefaultClusterAlias: *fakeConfig("local"), }, }, { name: "foreign without local uses current as default", foreign: map[string]rest.Config{ "current-context": *fakeConfig("current"), }, current: "current-context", expected: map[string]rest.Config{ InClusterContext: *fakeConfig("current"), DefaultClusterAlias: *fakeConfig("current"), "current-context": *fakeConfig("current"), }, }, { name: "reject only foreign without a current context", foreign: map[string]rest.Config{ DefaultClusterAlias: *fakeConfig("default"), }, err: true, }, { name: "accept only foreign with default", foreign: map[string]rest.Config{ DefaultClusterAlias: *fakeConfig("default"), "random-context": *fakeConfig("random"), }, current: "random-context", expected: map[string]rest.Config{ InClusterContext: *fakeConfig("random"), DefaultClusterAlias: *fakeConfig("default"), "random-context": *fakeConfig("random"), }, }, { name: "accept local and foreign, using local for default", local: fakeConfig("local"), foreign: map[string]rest.Config{ "random-context": *fakeConfig("random"), }, current: "random-context", expected: map[string]rest.Config{ InClusterContext: *fakeConfig("local"), DefaultClusterAlias: *fakeConfig("local"), "random-context": *fakeConfig("random"), }, }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { actual, err := mergeConfigs(tc.local, tc.foreign, tc.current) switch { case err != nil: if !tc.err { t.Errorf("unexpected error: %v", err) } case tc.err: t.Error("failed to receive an error") case !equality.Semantic.DeepEqual(actual, tc.expected): t.Errorf("configs do not match:\n%s", diff.ObjectReflectDiff(tc.expected, actual)) } }) } }
explode_data.jsonl/80357
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 949 }
[ 2830, 3393, 52096, 84905, 1155, 353, 8840, 836, 8, 341, 1166, 726, 2648, 1669, 2915, 8154, 914, 8, 353, 3927, 10753, 314, 470, 609, 3927, 10753, 90, 11115, 25, 575, 92, 456, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFilterServiceInstancesByPlanName(t *testing.T) { Convey("Test empty array return empty array", t, func() { result := FilterServiceInstancesByPlanName([]ServiceInstance{}, "test") So(result, ShouldResemble, []ServiceInstance{}) }) Convey("Test one matching by lowercase name instance returned from array", t, func() { instances := []ServiceInstance{{ServiceName: "Test", ServicePlanName: "TEST"}} result := FilterServiceInstancesByPlanName(instances, "test") So(len(result), ShouldEqual, 1) So(result[0].ServiceName, ShouldEqual, "Test") }) Convey("Test many matching by lowercase and upercase instances returned from array", t, func() { instances := []ServiceInstance{ {ServiceName: "TEST", Id: "1", ServicePlanName: "test"}, {ServiceName: "TeST", Id: "2", ServicePlanName: "TEST"}, {ServiceName: "test", Id: "3", ServicePlanName: "Test"}, {ServiceName: "notest", Id: "4", ServicePlanName: "test2"}, } result := FilterServiceInstancesByPlanName(instances, "test") So(len(result), ShouldEqual, 3) So(result[2].Id, ShouldEqual, "3") }) }
explode_data.jsonl/9315
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 371 }
[ 2830, 3393, 5632, 1860, 42725, 1359, 20485, 675, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 2271, 4287, 1334, 470, 4287, 1334, 497, 259, 11, 2915, 368, 341, 197, 9559, 1669, 12339, 1860, 42725, 1359, 20485, 675, 10556, 1860, 2523, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestActiveSendMSSLessThanMTU(t *testing.T) { const maxPayload = 100 c := context.New(t, 65535) defer c.Cleanup() c.CreateConnectedWithRawOptions(context.TestInitialSequenceNumber, 30000, -1 /* epRcvBuf */, []byte{ header.TCPOptionMSS, 4, byte(maxPayload / 256), byte(maxPayload % 256), }) testBrokenUpWrite(t, c, maxPayload) }
explode_data.jsonl/75969
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 5728, 11505, 44, 22594, 433, 26067, 8505, 52, 1155, 353, 8840, 836, 8, 341, 4777, 1932, 29683, 284, 220, 16, 15, 15, 198, 1444, 1669, 2266, 7121, 1155, 11, 220, 21, 20, 20, 18, 20, 340, 16867, 272, 727, 60639, 2822, 14...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDamerauLevenshtein(t *testing.T) { for _, row := range dlev_testdata { res := DamerauLevenshtein(row[0], row[1]) expected, err := strconv.Atoi(row[2]) if err != nil { t.Error("bad row in test data") } if res != expected { t.Errorf("DamerauLevenshtein(%q, %q) => %d, expected %d", row[0], row[1], res, expected) } } }
explode_data.jsonl/58058
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 156 }
[ 2830, 3393, 35, 3436, 84, 2304, 21097, 55499, 258, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 2802, 1669, 2088, 294, 3449, 4452, 691, 341, 197, 10202, 1669, 422, 3436, 84, 2304, 21097, 55499, 258, 7835, 58, 15, 1125, 2802, 58, 16, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCheckStringArray(t *testing.T) { l := lua.NewState() lua.DoString(l, `x = {"a=5", "b=6", "c=7"}`) l.Global("x") res := checkStringArray(l, -1) if len(res) != 3 { t.Fatal("Unexpected length of ", res) } if res[0] != "a=5" { t.Error("First element has wrong value", res[0]) } if res[1] != "b=6" { t.Error("First element has wrong value", res[1]) } if res[2] != "c=7" { t.Error("First element has wrong value", res[2]) } }
explode_data.jsonl/40381
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 205 }
[ 2830, 3393, 3973, 703, 1857, 1155, 353, 8840, 836, 8, 341, 8810, 1669, 20357, 7121, 1397, 741, 44822, 33596, 703, 2333, 11, 1565, 87, 284, 5212, 64, 28, 20, 497, 330, 65, 28, 21, 497, 330, 66, 28, 22, 1, 27085, 8810, 27381, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestIterStack(t *testing.T) { f := func(i int) iterFrame { return iterFrame{pos: int16(i)} } var is iterStack for i := 1; i <= 2*len(iterStackArr{}); i++ { var j int for j = 0; j < i; j++ { is.push(f(j)) } require.Equal(t, j, is.len()) for j--; j >= 0; j-- { require.Equal(t, f(j), is.pop()) } is.reset() } }
explode_data.jsonl/24884
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 166 }
[ 2830, 3393, 8537, 4336, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 2915, 1956, 526, 8, 5367, 4369, 314, 470, 5367, 4369, 90, 966, 25, 526, 16, 21, 1956, 9139, 456, 2405, 374, 5367, 4336, 198, 2023, 600, 1669, 220, 16, 26, 600, 2651...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMapValidatingRefreshable(t *testing.T) { r := refreshable.NewDefaultRefreshable("https://palantir.com:443") vr, err := refreshable.NewMapValidatingRefreshable(r, func(i interface{}) (interface{}, error) { return url.Parse(i.(string)) }) require.NoError(t, err) require.NoError(t, vr.LastValidateErr()) require.Equal(t, r.Current().(string), "https://palantir.com:443") require.Equal(t, vr.Current().(*url.URL).Hostname(), "palantir.com") // attempt bad update err = r.Update(":::error.com") require.NoError(t, err, "no err expected from default refreshable") assert.Equal(t, r.Current().(string), ":::error.com") require.EqualError(t, vr.LastValidateErr(), "parse \":::error.com\": missing protocol scheme", "expected err from validating refreshable") assert.Equal(t, vr.Current().(*url.URL).Hostname(), "palantir.com", "expected unchanged validating refreshable") // attempt good update require.NoError(t, r.Update("https://example.com")) require.NoError(t, vr.LastValidateErr()) require.Equal(t, r.Current().(string), "https://example.com") require.Equal(t, vr.Current().(*url.URL).Hostname(), "example.com") }
explode_data.jsonl/30065
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 408 }
[ 2830, 3393, 2227, 4088, 1095, 14567, 480, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 10408, 480, 7121, 3675, 14567, 480, 445, 2428, 1110, 19308, 517, 404, 905, 25, 19, 19, 18, 1138, 5195, 81, 11, 1848, 1669, 10408, 480, 7121, 2227, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetReadingsByDeviceAndValueDescriptor(t *testing.T) { reset() myMock := &dbMock.DBClient{} myMock.On("ReadingsByDeviceAndValueDescriptor", mock.Anything, mock.Anything, mock.Anything).Return([]models.Reading{}, nil) dbClient = myMock _, err := getReadingsByDeviceAndValueDescriptor("valid", "valid", 0, logger.NewMockClient()) if err != nil { t.Errorf("Unexpected error getting readings by device and value descriptor") } }
explode_data.jsonl/48185
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 149 }
[ 2830, 3393, 1949, 4418, 819, 1359, 6985, 3036, 1130, 11709, 1155, 353, 8840, 836, 8, 341, 70343, 741, 13624, 11571, 1669, 609, 1999, 11571, 22537, 2959, 31483, 13624, 11571, 8071, 445, 4418, 819, 1359, 6985, 3036, 1130, 11709, 497, 7860, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMatInvert(t *testing.T) { src := NewMatWithSize(4, 4, MatTypeCV32F) // only implemented for symm. Mats defer src.Close() dst := NewMat() defer dst.Close() Invert(src, &dst, 0) if dst.Empty() { t.Error("Invert dst should not be empty.") } }
explode_data.jsonl/81750
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 104 }
[ 2830, 3393, 11575, 641, 1621, 1155, 353, 8840, 836, 8, 341, 41144, 1669, 1532, 11575, 2354, 1695, 7, 19, 11, 220, 19, 11, 6867, 929, 19589, 18, 17, 37, 8, 442, 1172, 11537, 369, 7886, 76, 13, 67179, 198, 16867, 2286, 10421, 2822, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMergePullRequestAction(t *testing.T) { assert.NoError(t, PrepareTestDatabase()) user := AssertExistsAndLoadBean(t, &User{ID: 2}).(*User) repo := AssertExistsAndLoadBean(t, &Repository{ID: 1, OwnerID: user.ID}).(*Repository) repo.Owner = user issue := AssertExistsAndLoadBean(t, &Issue{ID: 3, RepoID: repo.ID}).(*Issue) actionBean := &Action{ OpType: ActionMergePullRequest, ActUserID: user.ID, ActUser: user, RepoID: repo.ID, Repo: repo, IsPrivate: repo.IsPrivate, } AssertNotExistsBean(t, actionBean) assert.NoError(t, MergePullRequestAction(user, repo, issue)) AssertExistsAndLoadBean(t, actionBean) CheckConsistencyFor(t, &Action{}) }
explode_data.jsonl/74213
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 287 }
[ 2830, 3393, 52096, 36068, 1900, 2512, 1155, 353, 8840, 836, 8, 341, 6948, 35699, 1155, 11, 31166, 2271, 5988, 2398, 19060, 1669, 5319, 15575, 3036, 5879, 10437, 1155, 11, 609, 1474, 90, 915, 25, 220, 17, 16630, 4071, 1474, 340, 17200, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAsPatch(t *testing.T) { r, _ := Prepare(mocks.NewRequest(), AsPatch()) if r.Method != "PATCH" { t.Fatal("autorest: AsPatch failed to set HTTP method header to PATCH") } }
explode_data.jsonl/20957
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 74 }
[ 2830, 3393, 2121, 43622, 1155, 353, 8840, 836, 8, 972, 7000, 11, 716, 1669, 31166, 1255, 25183, 75274, 1507, 1634, 43622, 13116, 743, 435, 20798, 961, 330, 31165, 1, 972, 197, 3244, 26133, 445, 2717, 41419, 25, 1634, 43622, 4641, 311, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestGetNotExistingKeyWithDefaultInt(t *testing.T) { name, ok := Config().GetInt("XXXXX", 72) if name != 72 { t.Errorf("Expected 72 and got '%d'", name) } if ok { t.Errorf("Expected ok=false and got ok=%+v", ok) } }
explode_data.jsonl/51567
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 97 }
[ 2830, 3393, 1949, 2623, 53067, 1592, 2354, 3675, 1072, 1155, 353, 8840, 836, 8, 341, 11609, 11, 5394, 1669, 5532, 1005, 85097, 445, 23830, 55, 497, 220, 22, 17, 692, 743, 829, 961, 220, 22, 17, 341, 197, 3244, 13080, 445, 18896, 220...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAtKeyframes(t *testing.T) { expectPrinted(t, "@keyframes {}", "@keyframes \"\" {\n}\n") expectPrinted(t, "@keyframes name{}", "@keyframes name {\n}\n") expectPrinted(t, "@keyframes name {}", "@keyframes name {\n}\n") expectPrinted(t, "@keyframes name{0%,50%{color:red}25%,75%{color:blue}}", "@keyframes name {\n 0%, 50% {\n color: red;\n }\n 25%, 75% {\n color: blue;\n }\n}\n") expectPrinted(t, "@keyframes name { 0%, 50% { color: red } 25%, 75% { color: blue } }", "@keyframes name {\n 0%, 50% {\n color: red;\n }\n 25%, 75% {\n color: blue;\n }\n}\n") expectPrinted(t, "@keyframes name{from{color:red}to{color:blue}}", "@keyframes name {\n from {\n color: red;\n }\n to {\n color: blue;\n }\n}\n") expectPrinted(t, "@keyframes name { from { color: red } to { color: blue } }", "@keyframes name {\n from {\n color: red;\n }\n to {\n color: blue;\n }\n}\n") expectPrinted(t, "@keyframes name { from { color: red } }", "@keyframes name {\n from {\n color: red;\n }\n}\n") expectPrinted(t, "@keyframes name { 100% { color: red } }", "@keyframes name {\n 100% {\n color: red;\n }\n}\n") expectPrintedMangle(t, "@keyframes name { from { color: red } }", "@keyframes name {\n 0% {\n color: red;\n }\n}\n") expectPrintedMangle(t, "@keyframes name { 100% { color: red } }", "@keyframes name {\n to {\n color: red;\n }\n}\n") expectPrinted(t, "@-webkit-keyframes name {}", "@-webkit-keyframes name {\n}\n") expectPrinted(t, "@-moz-keyframes name {}", "@-moz-keyframes name {\n}\n") expectPrinted(t, "@-ms-keyframes name {}", "@-ms-keyframes name {\n}\n") expectPrinted(t, "@-o-keyframes name {}", "@-o-keyframes name {\n}\n") expectParseError(t, "@keyframes {}", "<stdin>: warning: Expected identifier but found \"{\"\n") expectParseError(t, "@keyframes 'name' {}", "<stdin>: warning: Expected identifier but found \"'name'\"\n") expectParseError(t, "@keyframes name { 0% 100% {} }", "<stdin>: warning: Expected \",\" but found \"100%\"\n") expectParseError(t, "@keyframes name { {} 0% {} }", "<stdin>: warning: Expected percentage but found \"{\"\n") expectParseError(t, "@keyframes name { 100 {} }", "<stdin>: warning: Expected percentage but found \"100\"\n") expectParseError(t, "@keyframes name { into {} }", "<stdin>: warning: Expected percentage but found \"into\"\n") expectParseError(t, "@keyframes name { 1,2 {} }", "<stdin>: warning: Expected percentage but found \"1\"\n<stdin>: warning: Expected percentage but found \"2\"\n") expectParseError(t, "@keyframes name { 1, 2 {} }", "<stdin>: warning: Expected percentage but found \"1\"\n<stdin>: warning: Expected percentage but found \"2\"\n") expectParseError(t, "@keyframes name { 1 ,2 {} }", "<stdin>: warning: Expected percentage but found \"1\"\n<stdin>: warning: Expected percentage but found \"2\"\n") expectParseError(t, "@keyframes name { 1%,,2% {} }", "<stdin>: warning: Expected percentage but found \",\"\n") }
explode_data.jsonl/32629
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1155 }
[ 2830, 3393, 1655, 1592, 23719, 1155, 353, 8840, 836, 8, 341, 24952, 8994, 291, 1155, 11, 8428, 792, 23719, 24689, 8428, 792, 23719, 7245, 2105, 28152, 77, 11035, 77, 1138, 24952, 8994, 291, 1155, 11, 8428, 792, 23719, 829, 42351, 8428, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestError_Error(t *testing.T) { var herr Error // transaction failed happy path: with the appropriate extra fields herr = Error{ Problem: problem.P{ Title: "Transaction Failed", Type: "transaction_failed", Extras: map[string]interface{}{ "result_codes": map[string]interface{}{ "transaction": "tx_failed", "operations": []string{"op_underfunded", "op_already_exists"}, }, }, }, } assert.Equal(t, `horizon error: "Transaction Failed" (tx_failed, op_underfunded, op_already_exists) - check horizon.Error.Problem for more information`, herr.Error()) // transaction failed sad path: missing result_codes extra herr = Error{ Problem: problem.P{ Title: "Transaction Failed", Type: "transaction_failed", Extras: map[string]interface{}{}, }, } assert.Equal(t, `horizon error: "Transaction Failed" - check horizon.Error.Problem for more information`, herr.Error()) // transaction failed sad path: unparseable result_codes extra herr = Error{ Problem: problem.P{ Title: "Transaction Failed", Type: "transaction_failed", Extras: map[string]interface{}{ "result_codes": "kaboom", }, }, } assert.Equal(t, `horizon error: "Transaction Failed" - check horizon.Error.Problem for more information`, herr.Error()) // non-transaction errors herr = Error{ Problem: problem.P{ Type: "https://stellar.org/horizon-errors/not_found", Title: "Resource Missing", Status: 404, }, } assert.Equal(t, `horizon error: "Resource Missing" - check horizon.Error.Problem for more information`, herr.Error()) }
explode_data.jsonl/12137
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 564 }
[ 2830, 3393, 1454, 28651, 1155, 353, 8840, 836, 8, 341, 2405, 54739, 4600, 271, 197, 322, 7745, 4641, 6247, 1815, 25, 448, 279, 8311, 4960, 5043, 198, 9598, 615, 284, 4600, 515, 197, 197, 31198, 25, 3491, 1069, 515, 298, 92233, 25, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNoArgs(t *testing.T) { tests := []struct { name string args []string wantErr bool }{ { name: "no args", args: []string{}, wantErr: false, }, { name: "with args", args: []string{"fail"}, wantErr: true, }, } for _, tt := range tests { args := tt.args wantErr := tt.wantErr t.Run(tt.name, func(t *testing.T) { c := &cobra.Command{Use: "c", Args: NoArgs, Run: emptyRun} if _, err := executeCommand(c, args...); (err != nil) != wantErr { t.Errorf("NoArgs() error = %v, wantErr %v", err, wantErr) } }) } }
explode_data.jsonl/59317
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 291 }
[ 2830, 3393, 2753, 4117, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 31215, 262, 3056, 917, 198, 197, 50780, 7747, 1807, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 262, 330, 2152, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestWSClientPingPong(t *testing.T) { cl, err := client.NewWS(tcpAddr, websocketEndpoint) require.Nil(t, err) cl.SetLogger(log.TestingLogger()) err = cl.Start() require.Nil(t, err) t.Cleanup(func() { if err := cl.Stop(); err != nil { t.Error(err) } }) time.Sleep(6 * time.Second) }
explode_data.jsonl/62872
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 135 }
[ 2830, 3393, 7433, 2959, 69883, 47, 644, 1155, 353, 8840, 836, 8, 341, 39407, 11, 1848, 1669, 2943, 7121, 7433, 98203, 13986, 11, 58943, 27380, 340, 17957, 59678, 1155, 11, 1848, 340, 39407, 4202, 7395, 12531, 8787, 287, 7395, 2398, 9859...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTopDownIneqExpr(t *testing.T) { tests := []struct { note string rule string expected interface{} }{ {"noteq", `p = true { 0 != 1; a[i] = x; x != 2 }`, "true"}, {"gt", `p = true { 1 > 0; a[i] = x; x > 2 }`, "true"}, {"gteq", `p = true { 1 >= 1; a[i] = x; x >= 4 }`, "true"}, {"lt", `p = true { -1 < 0; a[i] = x; x < 5 }`, "true"}, {"lteq", `p = true { -1 <= 0; a[i] = x; x <= 1 }`, "true"}, {"undefined: noteq", `p = true { 0 != 0 }`, ""}, {"undefined: gt", `p = true { 1 > 2 }`, ""}, {"undefined: gteq", `p = true { 1 >= 2 }`, ""}, {"undefined: lt", `p = true { 1 < -1 }`, ""}, {"undefined: lteq", `p = true { 1 < -1 }`, ""}, } data := loadSmallTestData() for _, tc := range tests { runTopDownTestCase(t, data, tc.note, []string{tc.rule}, tc.expected) } }
explode_data.jsonl/25198
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 378 }
[ 2830, 3393, 5366, 4454, 641, 11006, 16041, 1155, 353, 8840, 836, 8, 1476, 78216, 1669, 3056, 1235, 341, 197, 9038, 1272, 257, 914, 198, 197, 7000, 1111, 257, 914, 198, 197, 42400, 3749, 16094, 197, 59403, 197, 197, 4913, 45299, 497, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDefaultBackoff(t *testing.T) { tt := []struct { nretry int retryAfter string // Retry-After header out time.Duration // expected min; max = min + jitter }{ {-1, "", time.Second}, // verify the lower bound is 1 {0, "", time.Second}, // verify the lower bound is 1 {100, "", 10 * time.Second}, // verify the ceiling {1, "3600", time.Hour}, // verify the header value is used {1, "", 1 * time.Second}, {2, "", 2 * time.Second}, {3, "", 4 * time.Second}, {4, "", 8 * time.Second}, } for i, test := range tt { r := httptest.NewRequest("GET", "/", nil) resp := &http.Response{Header: http.Header{}} if test.retryAfter != "" { resp.Header.Set("Retry-After", test.retryAfter) } d := defaultBackoff(test.nretry, r, resp) max := test.out + time.Second // + max jitter if d < test.out || max < d { t.Errorf("%d: defaultBackoff(%v) = %v; want between %v and %v", i, test.nretry, d, test.out, max) } } }
explode_data.jsonl/38185
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 422 }
[ 2830, 3393, 3675, 3707, 1847, 1155, 353, 8840, 836, 8, 341, 3244, 83, 1669, 3056, 1235, 341, 197, 9038, 44848, 257, 526, 198, 197, 17200, 1539, 6025, 914, 286, 442, 78870, 12, 6025, 4247, 198, 197, 13967, 286, 882, 33795, 442, 3601, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGetHistoryWithBash(t *testing.T) { commands, err := GetHistory("bash", 30) if err != nil && commands == nil { t.Error("Should have returned comamnds for Bash shell.") } }
explode_data.jsonl/48859
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 74 }
[ 2830, 3393, 1949, 13424, 2354, 33, 988, 1155, 353, 8840, 836, 8, 341, 262, 11293, 11, 1848, 1669, 2126, 13424, 445, 46216, 497, 220, 18, 15, 340, 262, 421, 1848, 961, 220, 2092, 1009, 11293, 621, 2092, 341, 286, 259, 6141, 445, 1499...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
3
func TestTransformFromCloudEventEmptyCloudEvent(t *testing.T) { var cloudEvent cloudevents.Event conv := NewConversion() continuePipeline, result := conv.TransformFromCloudEvent(context, cloudEvent) _, ok := result.(error) assert.True(t, ok) assert.NotNil(t, result) assert.False(t, continuePipeline) }
explode_data.jsonl/58246
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 102 }
[ 2830, 3393, 8963, 3830, 16055, 1556, 3522, 16055, 1556, 1155, 353, 8840, 836, 8, 341, 2405, 9437, 1556, 1185, 283, 450, 47664, 6904, 198, 197, 12027, 1669, 1532, 48237, 741, 11664, 34656, 11, 1102, 1669, 5686, 45165, 3830, 16055, 1556, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetPastPublicTrades(t *testing.T) { t.Parallel() _, err := b.GetPastPublicTrades(context.Background(), currency.NewPairWithDelimiter("BTCUSD", "PERP", "_"), 5, 0) if err != nil { t.Error(err) } }
explode_data.jsonl/76598
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 84 }
[ 2830, 3393, 1949, 50113, 12676, 1282, 3452, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 6878, 1848, 1669, 293, 2234, 50113, 12676, 1282, 3452, 5378, 19047, 1507, 11413, 7121, 12443, 2354, 91098, 445, 59118, 26749, 497, 330,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIssue282(t *testing.T) { db := openTestConn(t) defer db.Close() var searchPath string err := db.QueryRow(` SET LOCAL search_path TO pg_catalog; SET LOCAL search_path TO pg_catalog; SHOW search_path`).Scan(&searchPath) if err != nil { t.Fatal(err) } if searchPath != "pg_catalog" { t.Fatalf("unexpected search_path %s", searchPath) } }
explode_data.jsonl/73494
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 42006, 17, 23, 17, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 1787, 2271, 9701, 1155, 340, 16867, 2927, 10421, 2822, 2405, 2711, 1820, 914, 198, 9859, 1669, 2927, 15685, 3102, 61528, 197, 52134, 42501, 2711, 2638, 5146, 1749...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAccBalanceNotEnough(t *testing.T) { AdjustGasUsed = false trunk, root := prepareTruck() defer closeTestCtx(root) //only 1 runner e := NewEbpTxExec(5, 5, 2, 10, &testcase.DumbSigner{}) e.SetContext(prepareCtx(trunk)) //2 tx txs := prepareAccAndTx(e) e.SetContext(prepareCtx(trunk)) for _, tx := range txs { e.CollectTx(tx) } tx, _ := gethtypes.NewTransaction(1, to1, big.NewInt(20000_0000_0000), 100000, big.NewInt(1), nil).WithSignature(e.signer, from1.Bytes()) e.CollectTx(tx) require.Equal(t, 3, e.CollectedTxsCount()) e.Prepare(0, 0, DefaultTxGasLimit) e.SetContext(prepareCtx(trunk)) e.Execute(&types.BlockInfo{}) toAcc1 := e.cleanCtx.GetAccount(to1) fromAcc1 := e.cleanCtx.GetAccount(from1) require.Equal(t, uint64(100), toAcc1.Balance().Uint64()) require.Equal(t, uint64(10000_0000_0000-21000*2-100), fromAcc1.Balance().Uint64()) //tx which account cannot pay for transfer value also can commit require.Equal(t, 3, len(e.CommittedTxs())) e.SetContext(prepareCtx(trunk)) }
explode_data.jsonl/58974
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 423 }
[ 2830, 3393, 14603, 21190, 2623, 95801, 1155, 353, 8840, 836, 8, 341, 197, 38616, 58728, 22743, 284, 895, 198, 25583, 3122, 11, 3704, 1669, 10549, 1282, 1942, 741, 16867, 3265, 2271, 23684, 9206, 340, 197, 322, 3243, 220, 16, 22259, 198,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestExpandPrivateNiftyModifyAddressAttributeInput(t *testing.T) { rd := schema.TestResourceDataRaw(t, newSchema(), map[string]interface{}{ "ip_type": true, "private_ip": "192.168.0.1", "description": "test_description", }) rd.SetId("192.168.0.1") tests := []struct { name string args *schema.ResourceData want *computing.NiftyModifyAddressAttributeInput }{ { name: "expands the resource data", args: rd, want: &computing.NiftyModifyAddressAttributeInput{ PrivateIpAddress: nifcloud.String("192.168.0.1"), Attribute: "description", Value: nifcloud.String("test_description"), }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got := expandNiftyModifyAddressAttributeInput(tt.args) assert.Equal(t, tt.want, got) }) } }
explode_data.jsonl/35514
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 347 }
[ 2830, 3393, 38946, 16787, 45, 38624, 44427, 4286, 3907, 2505, 1155, 353, 8840, 836, 8, 341, 92356, 1669, 10802, 8787, 4783, 1043, 20015, 1155, 11, 501, 8632, 1507, 2415, 14032, 31344, 67066, 197, 197, 1, 573, 1819, 788, 257, 830, 345, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBlobberGRPCService_GetReferencePath_InvalidPaths(t *testing.T) { req := &blobbergrpc.GetReferencePathRequest{ Context: &blobbergrpc.RequestContext{ Client: "client", ClientKey: "", Allocation: "", }, Paths: `["something"]`, Path: "", Allocation: "", } mockStorageHandler := &storageHandlerI{} mockReferencePackage := &mocks.PackageHandler{} mockStorageHandler.On("verifyAllocation", mock.Anything, req.Allocation, false).Return(&allocation.Allocation{ ID: "allocationId", Tx: req.Allocation, OwnerID: "owner", }, nil) mockReferencePackage.On("GetReferencePathFromPaths", mock.Anything, mock.Anything, mock.Anything).Return(nil, errors.New("invalid paths")) svc := newGRPCBlobberService(mockStorageHandler, mockReferencePackage) _, err := svc.GetReferencePath(context.Background(), req) if err == nil { t.Fatal("expected error") } assert.Equal(t, err.Error(), "invalid paths") }
explode_data.jsonl/66835
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 359 }
[ 2830, 3393, 37985, 652, 8626, 4872, 1860, 13614, 8856, 1820, 62, 7928, 26901, 1155, 353, 8840, 836, 8, 341, 24395, 1669, 609, 35112, 652, 56585, 2234, 8856, 1820, 1900, 515, 197, 70871, 25, 609, 35112, 652, 56585, 9659, 1972, 515, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestLog_GlobalLogger_FilterLevel(t *testing.T) { defer SaveGlobalLoggerAndFilter(true)() assert.NoError(t, SetLevel("debug")) assert.NoError(t, SetGlobalLevelFilter(insolar.DebugLevel)) assertHelloWorld(t, capture(func() { Debug("HelloWorld") })) assert.NoError(t, SetGlobalLevelFilter(insolar.InfoLevel)) assert.Equal(t, "", capture(func() { Debug("HelloWorld") })) }
explode_data.jsonl/40296
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 131 }
[ 2830, 3393, 2201, 93132, 7395, 68935, 4449, 1155, 353, 8840, 836, 8, 341, 16867, 10255, 11646, 7395, 3036, 5632, 3715, 8, 2822, 6948, 35699, 1155, 11, 2573, 4449, 445, 8349, 5455, 6948, 35699, 1155, 11, 2573, 11646, 4449, 5632, 56337, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultipleCallsToStartAndStop(t *testing.T) { called := 0 cfg := &conf.AdvancedConfig{ SplitUpdateQueueSize: 10000, SegmentUpdateQueueSize: 10000, } logger := logging.NewLogger(nil) synchronizer := &pushMocks.LocalSyncMock{} token := &dtos.Token{ Token: `eyJhbGciOiJIUzI1NiIsImtpZCI6IjVZOU05US45QnJtR0EiLCJ0eXAiOiJKV1QifQ.eyJ4LWFibHktY2FwYWJpbGl0eSI6IntcIk56TTJNREk1TXpjMF9NVGd5TlRnMU1UZ3dOZz09X3NlZ21lbnRzXCI6W1wic3Vic2NyaWJlXCJdLFwiTnpNMk1ESTVNemMwX01UZ3lOVGcxTVRnd05nPT1fc3BsaXRzXCI6W1wic3Vic2NyaWJlXCJdLFwiY29udHJvbF9wcmlcIjpbXCJzdWJzY3JpYmVcIixcImNoYW5uZWwtbWV0YWRhdGE6cHVibGlzaGVyc1wiXSxcImNvbnRyb2xfc2VjXCI6W1wic3Vic2NyaWJlXCIsXCJjaGFubmVsLW1ldGFkYXRhOnB1Ymxpc2hlcnNcIl19IiwieC1hYmx5LWNsaWVudElkIjoiY2xpZW50SWQiLCJleHAiOjE2MTMzNDUyMzAsImlhdCI6MTYxMzM0MTYzMH0.Z3jKyiJq6t00hWFV_xIlh5w4xAYF3Rj0gfcTxgLjcOc`, PushEnabled: true, } authMock := &serviceMocks.MockAuthClient{ AuthenticateCall: func() (*dtos.Token, error) { return token, nil }, } feedback := make(chan int64, 100) telemetryStorageMock := mocks.MockTelemetryStorage{ RecordSuccessfulSyncCall: func(resource int, tm int64) { if resource != telemetry.TokenSync { t.Error("Resource should be token") } }, RecordSyncLatencyCall: func(resource int, latency int64) { if resource != telemetry.TokenSync { t.Error("Resource should be token") } }, RecordTokenRefreshesCall: func() {}, RecordStreamingEventCall: func(streamingEvent *dtos.StreamingEvent) { switch called { case 0: if streamingEvent.Type != telemetry.EventTypeTokenRefresh || streamingEvent.Data != 3000000 { t.Error("Should record next token refresh") } case 1: if streamingEvent.Type != telemetry.EventTypeSSEConnectionEstablished { t.Error("It should record connection established") } } called++ }, } manager, err := NewManager(logger, synchronizer, cfg, feedback, authMock, telemetryStorageMock, dtos.Metadata{}, nil) if err != nil { t.Error("no error should be returned upon manager instantiation", err) return } waiter := make(chan struct{}, 1) manager.sseClient = &sseMocks.StreamingClientMock{ ConnectStreamingCall: func(tok string, status chan int, channels []string, handler func(sse.IncomingMessage)) { if tok != token.Token { t.Error("incorrect token received.") } go func() { status <- sse.StatusFirstEventOk <-waiter status <- sse.StatusDisconnected }() }, StopStreamingCall: func() { waiter <- struct{}{} // free "sse" goroutine to make it end }, } if err := manager.Start(); err != nil { t.Error("first call to Start() should not return an error. Got:", err) } if err := manager.Start(); err == nil { t.Error("further calls to Start() should return an error.") } manager.statusTracker.NotifySSEShutdownExpected() message := <-feedback if message != StatusUp { t.Error("push manager should have proapgated a push up status. Got: ", message) } if manager.nextRefresh == nil { t.Error("a token refresh should have been scheduled after a successful connection.") } if err := manager.Stop(); err != nil { t.Error("no error should be returned on the first call to .Stop(). Got:", err) } if err := manager.Stop(); err == nil { t.Error("an error should be returned on further calls to .Stop()") } }
explode_data.jsonl/44444
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1410 }
[ 2830, 3393, 32089, 55292, 1249, 3479, 3036, 10674, 1155, 353, 8840, 836, 8, 341, 1444, 4736, 1669, 220, 15, 198, 50286, 1669, 609, 6135, 17865, 88087, 2648, 515, 197, 7568, 2292, 4289, 7554, 1695, 25, 256, 220, 16, 15, 15, 15, 15, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateClaims(t *testing.T) { claims, errClaims := createClaims(&testClaims, nil) if claims == nil { t.Fail() t.Logf("claims should not be nil") } if errClaims != nil { t.Fail() t.Logf(errClaims.Error()) } }
explode_data.jsonl/11046
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 99 }
[ 2830, 3393, 4021, 51133, 1155, 353, 8840, 836, 8, 341, 197, 48561, 11, 1848, 51133, 1669, 1855, 51133, 2099, 1944, 51133, 11, 2092, 340, 743, 8186, 621, 2092, 341, 197, 3244, 57243, 741, 197, 3244, 98954, 445, 48561, 1265, 537, 387, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTransportCancelBeforeResponseHeaders(t *testing.T) { t.Skip("Skipping flaky test; see Issue 11894") defer afterTest(t) serverConnCh := make(chan net.Conn, 1) tr := &Transport{ Dial: func(network, addr string) (net.Conn, error) { cc, sc := net.Pipe() serverConnCh <- sc return cc, nil }, } defer tr.CloseIdleConnections() errc := make(chan error, 1) req, _ := NewRequest("GET", "http://example.com/", nil) go func() { _, err := tr.RoundTrip(req) errc <- err }() sc := <-serverConnCh verb := make([]byte, 3) if _, err := io.ReadFull(sc, verb); err != nil { t.Errorf("Error reading HTTP verb from server: %v", err) } if string(verb) != "GET" { t.Errorf("server received %q; want GET", verb) } defer sc.Close() tr.CancelRequest(req) err := <-errc if err == nil { t.Fatalf("unexpected success from RoundTrip") } if err != ExportErrRequestCanceled { t.Errorf("RoundTrip error = %v; want ExportErrRequestCanceled", err) } }
explode_data.jsonl/4904
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 394 }
[ 2830, 3393, 27560, 9269, 10227, 2582, 10574, 1155, 353, 8840, 836, 8, 341, 3244, 57776, 445, 85945, 1320, 28100, 1273, 26, 1490, 25226, 220, 16, 16, 23, 24, 19, 1138, 16867, 1283, 2271, 1155, 692, 41057, 9701, 1143, 1669, 1281, 35190, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetDeploymentConfigError(t *testing.T) { mockRegistry := test.NewDeploymentConfigRegistry() mockRegistry.Err = fmt.Errorf("bad") storage := REST{registry: mockRegistry} deploymentConfig, err := storage.Get(kapi.NewDefaultContext(), "foo") if deploymentConfig != nil { t.Errorf("Unexpected non-nil deploymentConfig: %#v", deploymentConfig) } if err != mockRegistry.Err { t.Errorf("Expected %#v, got %#v", mockRegistry.Err, err) } }
explode_data.jsonl/66987
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 163 }
[ 2830, 3393, 1949, 75286, 2648, 1454, 1155, 353, 8840, 836, 8, 341, 77333, 15603, 1669, 1273, 7121, 75286, 2648, 15603, 741, 77333, 15603, 27862, 284, 8879, 13080, 445, 13855, 1138, 197, 16172, 1669, 25414, 90, 29172, 25, 7860, 15603, 630,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestLimitRangeList(t *testing.T) { ns := api.NamespaceDefault limitRangeList := &api.LimitRangeList{ Items: []api.LimitRange{ { ObjectMeta: api.ObjectMeta{Name: "foo"}, }, }, } c := &testClient{ Request: testRequest{ Method: "GET", Path: testapi.ResourcePath(getLimitRangesResourceName(), ns, ""), Query: buildQueryValues(nil), Body: nil, }, Response: Response{StatusCode: 200, Body: limitRangeList}, } response, err := c.Setup().LimitRanges(ns).List(labels.Everything()) c.Validate(t, response, err) }
explode_data.jsonl/69629
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 217 }
[ 2830, 3393, 16527, 6046, 852, 1155, 353, 8840, 836, 8, 341, 84041, 1669, 6330, 46011, 3675, 271, 8810, 2353, 6046, 852, 1669, 609, 2068, 1214, 2353, 6046, 852, 515, 197, 197, 4353, 25, 3056, 2068, 1214, 2353, 6046, 515, 298, 197, 515,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_builder_Unreachable(t *testing.T) { cases := []struct { name string unreachable *bool expected bool }{ { name: "unreachable unset", expected: false, }, { name: "unreachable true", unreachable: pointer.BoolPtr(true), expected: true, }, { name: "unreachable false", unreachable: pointer.BoolPtr(false), expected: false, }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { cd := testClusterDeployment() if tc.unreachable != nil { setUnreachable(cd, *tc.unreachable) } c := fakeClient(cd) builder := NewBuilder(c, cd, "test-controller-name") actual := builder.Unreachable() assert.Equal(t, tc.expected, actual, "unexpected unreachable") }) } }
explode_data.jsonl/35045
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 352 }
[ 2830, 3393, 28532, 40687, 46550, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 286, 914, 198, 197, 20479, 46550, 353, 2641, 198, 197, 42400, 262, 1807, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 257...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestShardBasic(t *testing.T) { storagetest.Test(t, func(t *testing.T) (sto blobstore.Storage, cleanup func()) { return newTestStorage(t).sto, nil }) }
explode_data.jsonl/61165
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 64 }
[ 2830, 3393, 2016, 567, 15944, 1155, 353, 8840, 836, 8, 341, 18388, 269, 351, 57824, 8787, 1155, 11, 2915, 1155, 353, 8840, 836, 8, 320, 33052, 23404, 4314, 43771, 11, 21290, 2915, 2140, 341, 197, 853, 501, 2271, 5793, 1155, 568, 33052...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestFormatterJSON(test *testing.T) { object := struct { Value int Message string }{ Value: 4, Message: "text", } formatted, err := formatter.Format("{p | json}", object) assert.NoError(test, err) assert.Equal(test, `{"Value":4,"Message":"text"}`, formatted) }
explode_data.jsonl/39800
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 109 }
[ 2830, 3393, 14183, 5370, 8623, 353, 8840, 836, 8, 341, 35798, 1669, 2036, 341, 197, 47399, 256, 526, 198, 197, 46733, 914, 198, 197, 59403, 197, 47399, 25, 256, 220, 19, 345, 197, 46733, 25, 330, 1318, 756, 197, 630, 37410, 12127, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMapInfoConstructor(t *testing.T) { v := sml.NewMapInfo() if v == nil { t.Errorf("sml.NewMapInfo must return a non-nil value") } if err := v.Validate(); err != nil { t.Errorf("newly constructed sml.MapInfo should validate: %s", err) } }
explode_data.jsonl/50962
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 2227, 1731, 13288, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 274, 1014, 7121, 2227, 1731, 741, 743, 348, 621, 2092, 341, 197, 3244, 13080, 445, 82, 1014, 7121, 2227, 1731, 1969, 470, 264, 2477, 83248, 897, 1138, 197, 532, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPortConflictNodeDaemonDoesNotLaunchPod(t *testing.T) { for _, strategy := range updateStrategies() { podSpec := v1.PodSpec{ NodeName: "port-conflict", Containers: []v1.Container{{ Ports: []v1.ContainerPort{{ HostPort: 666, }}, }}, } manager, podControl, _, err := newTestController() if err != nil { t.Fatalf("error creating DaemonSets controller: %v", err) } node := newNode("port-conflict", nil) manager.nodeStore.Add(node) manager.podStore.Add(&v1.Pod{ Spec: podSpec, }) ds := newDaemonSet("foo") ds.Spec.UpdateStrategy = *strategy ds.Spec.Template.Spec = podSpec manager.dsStore.Add(ds) syncAndValidateDaemonSets(t, manager, ds, podControl, 0, 0, 0) } }
explode_data.jsonl/50318
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 304 }
[ 2830, 3393, 7084, 57974, 1955, 89177, 21468, 2623, 32067, 23527, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 8282, 1669, 2088, 2647, 2580, 69388, 368, 341, 197, 3223, 347, 8327, 1669, 348, 16, 88823, 8327, 515, 298, 30217, 675, 25, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNodeID_hashAtDistance(t *testing.T) { // we don't use quick.Check here because its output isn't // very helpful when the test fails. cfg := quickcfg() for i := 0; i < cfg.MaxCount; i++ { a := gen(common.Hash{}, cfg.Rand).(common.Hash) dist := cfg.Rand.Intn(len(common.Hash{}) * 8) result := hashAtDistance(a, dist) actualdist := logdist(result, a) if dist != actualdist { t.Log("a: ", a) t.Log("result:", result) t.Fatalf("#%d: distance of result is %d, want %d", i, actualdist, dist) } } }
explode_data.jsonl/49023
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 217 }
[ 2830, 3393, 1955, 915, 8950, 1655, 14778, 1155, 353, 8840, 836, 8, 341, 197, 322, 582, 1513, 944, 990, 3974, 10600, 1588, 1576, 1181, 2550, 4436, 944, 198, 197, 322, 1602, 10950, 979, 279, 1273, 14525, 624, 50286, 1669, 3974, 14072, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSubdomain(t *testing.T) { //Test cases cases := map[string]string{ "http://google.com": "", "http://google.com/ding?true": "", "google.com/?ding=false": "", "google.com?ding=false": "", "nonexist.***": "", "google.com": "", "google.co.uk": "", "gama.google.com": "gama", "gama.google.co.uk": "gama", "beta.gama.google.co.uk": "beta.gama", "": "", } //Test each domain, some should fail (expected) for url, expectedSubdomain := range cases { subdomain := Subdomain(url) if subdomain != expectedSubdomain { t.Errorf("Url (%q) returned %q for Subdomain(), but %q was expected", url, subdomain, expectedSubdomain) } } }
explode_data.jsonl/30830
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 393 }
[ 2830, 3393, 3136, 12204, 1155, 353, 8840, 836, 8, 341, 197, 322, 2271, 5048, 198, 1444, 2264, 1669, 2415, 14032, 30953, 515, 197, 197, 76932, 1110, 17485, 905, 788, 1843, 8324, 197, 197, 76932, 1110, 17485, 905, 3446, 287, 30, 1866, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestParseRequirements(t *testing.T) { expReqs := []*Requirement{ { Name: "dep1", Constraint: "==", Version: "2.3.2", }, { Name: "dep2", Constraint: ">=", Version: "1.0", }, { Name: "dep3", Constraint: "", Version: "", }, { Name: "dep4", Constraint: "", Version: "", }, { Name: "dep5", Constraint: "==", Version: "2.3.2", }, { Name: "dep6", Constraint: ">=", Version: "7", }, { Name: "dep7", Constraint: "==", Version: "10", }, { Name: "dep8.subdep", Constraint: "==", Version: "1.2.3", }, { Name: "dep9", Constraint: ">", Version: "1", }, { Name: "dep9", Constraint: ">", Version: "1", }, { Name: "dep10", Constraint: "==", Version: "1", }, { Name: "dep10", Constraint: "", Version: "", }, } reqs, err := ParseRequirements(`dep1==2.3.2 dep2>=1.0 dep3 dep4 dep5 == 2.3.2 dep6>= 7 [this-is-a-heading] dep7 ==10 dep8.subdep==1.2.3 dep9>1 dep9 > 1 dep10[extradep]==1 dep10[extradep] `) if err != nil { t.Errorf("Error parsing requirements: %s", err) } else if !reflect.DeepEqual(reqs, expReqs) { t.Errorf("Requirements do not match: %v", pretty.Diff(reqs, expReqs)) } }
explode_data.jsonl/46891
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 798 }
[ 2830, 3393, 14463, 59202, 1155, 353, 8840, 836, 8, 341, 48558, 693, 26358, 1669, 29838, 75802, 515, 197, 197, 515, 298, 21297, 25, 981, 330, 14891, 16, 756, 298, 197, 17890, 25, 98651, 756, 298, 77847, 25, 262, 330, 17, 13, 18, 13, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestScheduleSerialisation(t *testing.T) { tests := []struct { in, out string err bool }{ {"true:48", "true:48", false}, {"false:48", "false:48", false}, {"true:1,false:46,true:1", "true:1,false:46,true:1", false}, // {"false:500,false:45", "false:48", false}, // {"false:47,true:1", "false:47,true:1", false}, // {"true:5,false:45", "true:5,false:43", false}, // {"false:3,false:45", "false:48", false}, } for i, test := range tests { s := &Schedule{} err := s.FromString(test.in) if err != nil { t.Error(err, "in test", i+1, "with text", test.in) } res := s.String() if res != test.out { t.Error("got", res, "but expected", test.out, "on", test.in) } } }
explode_data.jsonl/59727
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 327 }
[ 2830, 3393, 32210, 5915, 7923, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 17430, 11, 700, 914, 198, 197, 9859, 257, 1807, 198, 197, 59403, 197, 197, 4913, 1866, 25, 19, 23, 497, 330, 1866, 25, 19, 23, 497, 89...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestFieldStringValue(t *testing.T) { f := newField("Name", "C", 5, 0) f.Offset = 3 recordBuf := []byte(" Abc ") v := f.stringValue(recordBuf, nil) require.Equal(t, "Abc", v) }
explode_data.jsonl/79449
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 86 }
[ 2830, 3393, 1877, 82696, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 1877, 445, 675, 497, 330, 34, 497, 220, 20, 11, 220, 15, 340, 1166, 61958, 284, 220, 18, 198, 71952, 15064, 1669, 3056, 3782, 445, 256, 3680, 66, 262, 14167, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func Test_jsoniter_number(t *testing.T) { should := require.New(t) var arr []Number err := Unmarshal([]byte(`[1]`), &arr) should.Nil(err) should.Equal(Number("1"), arr[0]) str, isNumber := CastJsonNumber(arr[0]) should.True(isNumber) should.Equal("1", str) }
explode_data.jsonl/51208
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 9455, 2015, 5500, 1155, 353, 8840, 836, 8, 341, 197, 5445, 1669, 1373, 7121, 1155, 340, 2405, 2890, 3056, 2833, 198, 9859, 1669, 1230, 27121, 10556, 3782, 5809, 58, 16, 60, 63, 701, 609, 1118, 340, 197, 5445, 59678, 3964, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestJWKCacheEntry_Expired(t *testing.T) { t.Run("should return true when entry is cached for more than cachePeriod", func(t *testing.T) { // WHEN entry := jwkCacheEntry{ key: "dummy", expireAt: time.Now().Add(-1 * cachePeriod), } // THEN require.True(t, entry.IsExpired()) }) t.Run("should return false when entry is cached for no longer than cachePeriod", func(t *testing.T) { // WHEN entry := jwkCacheEntry{ key: "dummy", expireAt: time.Now().Add(cachePeriod), } // THEN require.False(t, entry.IsExpired()) }) }
explode_data.jsonl/29080
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 231 }
[ 2830, 3393, 41, 68316, 8233, 5874, 62, 54349, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 5445, 470, 830, 979, 4343, 374, 20579, 369, 803, 1091, 6500, 23750, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 197, 322, 33633, 198, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestShlibnameFiles(t *testing.T) { pkgs := append([]string{}, minpkgs...) pkgs = append(pkgs, "runtime/cgo") for _, pkg := range pkgs { shlibnamefile := filepath.Join(gorootInstallDir, pkg+".shlibname") contentsb, err := ioutil.ReadFile(shlibnamefile) if err != nil { t.Errorf("error reading shlibnamefile for %s: %v", pkg, err) continue } contents := strings.TrimSpace(string(contentsb)) if contents != soname { t.Errorf("shlibnamefile for %s has wrong contents: %q", pkg, contents) } } }
explode_data.jsonl/24189
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 213 }
[ 2830, 3393, 2016, 2740, 606, 10809, 1155, 353, 8840, 836, 8, 341, 3223, 74, 5857, 1669, 8737, 10556, 917, 22655, 1308, 20819, 5857, 31218, 3223, 74, 5857, 284, 8737, 39928, 5857, 11, 330, 22255, 2899, 3346, 1138, 2023, 8358, 24793, 1669...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestResourceNameFor(t *testing.T) { obj := &unstructured.Unstructured{ Object: map[string]interface{}{ "apiVersion": "tests/v1alpha1", "kind": "Test", "metadata": map[string]interface{}{ "name": "myname", "namespace": "mynamespace", }, }, } fake := &ktesting.Fake{ Resources: []*metav1.APIResourceList{ { GroupVersion: "tests/v1alpha1", APIResources: []metav1.APIResource{ { Name: "tests", Kind: "Test", }, }, }, }, } disco := &fakediscovery.FakeDiscovery{Fake: fake} if n := ResourceNameFor(disco, obj); n != "tests" { t.Errorf("Got resource name %q for %v", n, obj) } obj.SetKind("Unknown") if n := ResourceNameFor(disco, obj); n != "unknown" { t.Errorf("Got resource name %q for %v", n, obj) } obj.SetGroupVersionKind(schema.GroupVersionKind{Group: "unknown", Version: "noversion", Kind: "SomeKind"}) if n := ResourceNameFor(disco, obj); n != "somekind" { t.Errorf("Got resource name %q for %v", n, obj) } }
explode_data.jsonl/10849
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 447 }
[ 2830, 3393, 4783, 675, 2461, 1155, 353, 8840, 836, 8, 341, 22671, 1669, 609, 359, 51143, 10616, 51143, 515, 197, 23816, 25, 2415, 14032, 31344, 67066, 298, 197, 1, 2068, 5637, 788, 330, 23841, 5457, 16, 7141, 16, 756, 298, 197, 1, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestUserUsageStatistics_LogCodeIntelAction(t *testing.T) { setupForTest(t) user := types.User{ ID: 1, } err := LogActivity(true, user.ID, "test-cookie-id", "CODEINTEL") if err != nil { t.Fatal(err) } a, err := GetByUserID(user.ID) if err != nil { t.Fatal(err) } if want := int32(1); a.CodeIntelligenceActions != want { t.Errorf("got %d, want %d", a.CodeIntelligenceActions, want) } }
explode_data.jsonl/27990
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 176 }
[ 2830, 3393, 1474, 14783, 38599, 44083, 2078, 49099, 2512, 1155, 353, 8840, 836, 8, 341, 84571, 2461, 2271, 1155, 692, 19060, 1669, 4494, 7344, 515, 197, 29580, 25, 220, 16, 345, 197, 532, 9859, 1669, 2835, 4052, 3715, 11, 1196, 9910, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestEtcdCreatePod(t *testing.T) { ctx := api.NewDefaultContext() fakeClient := tools.NewFakeEtcdClient(t) fakeClient.TestIndex = true key, _ := makePodKey(ctx, "foo") fakeClient.Data[key] = tools.EtcdResponseWithError{ R: &etcd.Response{ Node: nil, }, E: tools.EtcdErrorNotFound, } fakeClient.Set("/registry/hosts/machine/kubelet", runtime.EncodeOrDie(latest.Codec, &api.ContainerManifestList{}), 0) registry := NewTestEtcdRegistry(fakeClient) err := registry.CreatePod(ctx, &api.Pod{ TypeMeta: api.TypeMeta{ ID: "foo", }, DesiredState: api.PodState{ Manifest: api.ContainerManifest{ Containers: []api.Container{ { Name: "foo", }, }, }, }, }) if err != nil { t.Fatalf("unexpected error: %v", err) } // Suddenly, a wild scheduler appears: err = registry.ApplyBinding(ctx, &api.Binding{PodID: "foo", Host: "machine", TypeMeta: api.TypeMeta{Namespace: api.NamespaceDefault}}) if err != nil { t.Fatalf("unexpected error: %v", err) } resp, err := fakeClient.Get(key, false, false) if err != nil { t.Fatalf("Unexpected error %v", err) } var pod api.Pod err = latest.Codec.DecodeInto([]byte(resp.Node.Value), &pod) if err != nil { t.Errorf("unexpected error: %v", err) } if pod.ID != "foo" { t.Errorf("Unexpected pod: %#v %s", pod, resp.Node.Value) } var manifests api.ContainerManifestList resp, err = fakeClient.Get("/registry/hosts/machine/kubelet", false, false) if err != nil { t.Errorf("unexpected error: %v", err) } err = latest.Codec.DecodeInto([]byte(resp.Node.Value), &manifests) if len(manifests.Items) != 1 || manifests.Items[0].ID != "foo" { t.Errorf("Unexpected manifest list: %#v", manifests) } }
explode_data.jsonl/8140
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 704 }
[ 2830, 3393, 31860, 4385, 4021, 23527, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 6330, 7121, 3675, 1972, 741, 1166, 726, 2959, 1669, 7375, 7121, 52317, 31860, 4385, 2959, 1155, 340, 1166, 726, 2959, 8787, 1552, 284, 830, 198, 23634, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestNewGrpcClient_EtcdContentsGarbled(t *testing.T) { controller := gomock.NewController(t) oldGrpcDialContext := grpc.DialContext mockDialer := mock_grpc.NewMockDialer(controller) mockKV := mock_etcd.NewMockKV(controller) grpcDialContext = mockDialer.DialContext defer func() { grpcDialContext = oldGrpcDialContext }() ctx := context.TODO() mockKV.EXPECT().Get(ctx, "/ns/service/test", gomock.Any()).Return( &clientv3.GetResponse{ Kvs: []*mvccpb.KeyValue{ &mvccpb.KeyValue{ Key: []byte("/ns/service/test"), Value: []byte("Whazzup bruh"), }, }, Count: 1, }, nil) conn, err := NewGrpcClient(ctx, mockKV, "test") if err == nil { t.Error("NewGrpcClient succeeds despite expected decoding errors?") } if conn != nil { t.Error("Received non-nil connection") } }
explode_data.jsonl/11504
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 337 }
[ 2830, 3393, 3564, 6464, 3992, 2959, 2089, 83, 4385, 14803, 43930, 37659, 1155, 353, 8840, 836, 8, 341, 61615, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 61828, 6464, 3992, 35, 530, 1972, 1669, 47900, 98462, 1972, 198, 77333, 35, 530, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPostfixIncDot(t *testing.T) { const SCRIPT = ` var o = {x: 42}; var trace = ""; function F1() { trace += "First!"; return o; } var rv = F1().x++; rv += trace + o.x; ` testScript(SCRIPT, asciiString("42First!43"), t) }
explode_data.jsonl/75250
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 115 }
[ 2830, 3393, 4133, 5743, 39245, 34207, 1155, 353, 8840, 836, 8, 341, 4777, 53679, 284, 22074, 2405, 297, 284, 314, 87, 25, 220, 19, 17, 2440, 2405, 11655, 284, 21238, 7527, 434, 16, 368, 341, 2760, 11655, 1421, 330, 5338, 26782, 2760, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetDaemonEndpointsForHostname2(t *testing.T) { // Invalid hostname - single form dEndpt, err := GetDaemonEndpointsFromString("XYZ:2000") assert.NotNil(t, err) assert.Nil(t, dEndpt) }
explode_data.jsonl/49944
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 74 }
[ 2830, 3393, 1949, 89177, 80786, 2461, 88839, 17, 1155, 353, 8840, 836, 8, 314, 442, 13882, 28115, 481, 3175, 1352, 198, 2698, 3727, 417, 11, 1848, 1669, 2126, 89177, 80786, 44491, 445, 32196, 25, 17, 15, 15, 15, 1138, 6948, 93882, 115...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestLoggedIn_Hydra_Errors_invalid_state(t *testing.T) { s, cfg, _, h, _, err := setupHydraTest(true) if err != nil { t.Fatalf("setupHydraTest() failed: %v", err) } pname := "dr_joe_elixir" tests := []struct { name string code string stateID string respStatus int }{ { name: "no state", code: pname, stateID: "", respStatus: http.StatusBadRequest, }, { name: "stateID invalid", code: pname, stateID: "invalid", respStatus: http.StatusBadRequest, }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { resp := sendLoggedIn(t, s, cfg, h, tc.code, "", tc.stateID, pb.ResourceTokenRequestState_DATASET) if resp.StatusCode != tc.respStatus { t.Errorf("resp.StatusCode wants %d got %d", tc.respStatus, resp.StatusCode) } }) } }
explode_data.jsonl/18498
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 405 }
[ 2830, 3393, 28559, 2039, 88, 22248, 93623, 1087, 31433, 4387, 1155, 353, 8840, 836, 8, 341, 1903, 11, 13286, 11, 8358, 305, 11, 8358, 1848, 1669, 6505, 30816, 22248, 2271, 3715, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAccountRetriever(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() mockNodeQuerier := mocks.NewMockNodeQuerier(mockCtrl) accRetr := NewAccountRetriever(mockNodeQuerier) addr := []byte("test") bs, err := ModuleCdc.MarshalJSON(NewQueryAccountParams(addr)) require.NoError(t, err) mockNodeQuerier.EXPECT().QueryWithData(gomock.Eq("custom/acc/account"), gomock.Eq(bs)).Return(nil, int64(0), dummyError).Times(1) _, err = accRetr.GetAccount(addr) require.Error(t, err) mockNodeQuerier.EXPECT().QueryWithData(gomock.Eq("custom/acc/account"), gomock.Eq(bs)).Return(nil, int64(0), dummyError).Times(1) n, s, err := accRetr.GetAccountNumberSequence(addr) require.Error(t, err) require.Equal(t, uint64(0), n) require.Equal(t, uint64(0), s) mockNodeQuerier.EXPECT().QueryWithData(gomock.Eq("custom/acc/account"), gomock.Eq(bs)).Return(nil, int64(0), dummyError).Times(1) require.Error(t, accRetr.EnsureExists(addr)) }
explode_data.jsonl/53897
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 396 }
[ 2830, 3393, 7365, 12020, 461, 2054, 1155, 353, 8840, 836, 8, 341, 77333, 15001, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 7860, 15001, 991, 18176, 2822, 77333, 1955, 2183, 261, 1268, 1669, 68909, 7121, 11571, 1955, 2183, 261, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFieldErrors_ViaFieldIndex(t *testing.T) { expected := cli.FieldErrors{ &field.Error{Field: "parent[2]"}, &field.Error{Field: "parent[2].field"}, &field.Error{Field: "parent[2][0]"}, } actual := cli.FieldErrors{ &field.Error{Field: "[]"}, &field.Error{Field: "field"}, &field.Error{Field: "[0]"}, }.ViaFieldIndex("parent", 2) if diff := cmp.Diff(expected, actual); diff != "" { t.Errorf("(-expected, +actual): %s", diff) } }
explode_data.jsonl/13213
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 186 }
[ 2830, 3393, 1877, 13877, 2334, 685, 1877, 1552, 1155, 353, 8840, 836, 8, 341, 42400, 1669, 21348, 17087, 13877, 515, 197, 197, 5, 2566, 6141, 90, 1877, 25, 330, 3765, 58, 17, 60, 7115, 197, 197, 5, 2566, 6141, 90, 1877, 25, 330, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPasswordAuth_Valid(t *testing.T) { req := bytes.NewBuffer(nil) req.Write([]byte{2, NoAuth, UserPassAuth}) req.Write([]byte{1, 3, 'f', 'o', 'o', 3, 'b', 'a', 'r'}) var resp bytes.Buffer cred := StaticCredentials{ "foo": "bar", } cator := UserPassAuthenticator{Credentials: cred} s, _ := New(&Config{AuthMethods: []Authenticator{cator}}) ctx, err := s.authenticate(&resp, req) if err != nil { t.Fatalf("err: %v", err) } if ctx.Method != UserPassAuth { t.Fatal("Invalid Context Method") } val, ok := ctx.Payload["Username"] if !ok { t.Fatal("Missing key Username in auth context's payload") } if val != "foo" { t.Fatal("Invalid Username in auth context's payload") } out := resp.Bytes() if !bytes.Equal(out, []byte{socks5Version, UserPassAuth, 1, authSuccess}) { t.Fatalf("bad: %v", out) } }
explode_data.jsonl/18559
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 336 }
[ 2830, 3393, 4876, 5087, 97279, 1155, 353, 8840, 836, 8, 341, 24395, 1669, 5820, 7121, 4095, 27907, 340, 24395, 4073, 10556, 3782, 90, 17, 11, 2308, 5087, 11, 2657, 12187, 5087, 3518, 24395, 4073, 10556, 3782, 90, 16, 11, 220, 18, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestIssue21447(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk1, tk2 := testkit.NewTestKit(t, store), testkit.NewTestKit(t, store) tk1.MustExec("use test") tk2.MustExec("use test") tk1.MustExec("drop table if exists t1") tk1.MustExec("create table t1(id int primary key, name varchar(40))") tk1.MustExec("insert into t1 values(1, 'abc')") tk1.MustExec("begin pessimistic") tk2.MustExec("begin pessimistic") tk2.MustExec("update t1 set name='xyz' where id=1") tk2.CheckExecResult(1, 0) tk2.MustQuery("select * from t1 where id = 1").Check(testkit.Rows("1 xyz")) tk2.MustExec("commit") tk1.MustExec("update t1 set name='xyz' where id=1") tk1.CheckExecResult(0, 0) tk1.MustQuery("select * from t1 where id = 1").Check(testkit.Rows("1 abc")) tk1.MustQuery("select * from t1 where id = 1 for update").Check(testkit.Rows("1 xyz")) tk1.MustQuery("select * from t1 where id in (1, 2)").Check(testkit.Rows("1 abc")) tk1.MustQuery("select * from t1 where id in (1, 2) for update").Check(testkit.Rows("1 xyz")) tk1.MustExec("commit") }
explode_data.jsonl/76270
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 425 }
[ 2830, 3393, 42006, 17, 16, 19, 19, 22, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 16, 11, 17162, 17, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRelinquishmentRequestGenerator(t *testing.T) { config := &active_mode.ActiveModeConfig{ Cbsd: &active_mode.Cbsd{ Id: "some_id", Grants: []*active_mode.Grant{{ Id: "some_grant_id", }}, }, } g := sas.NewRelinquishmentRequestGenerator() actual := g.GenerateRequests(config) expected := []*request{{ requestType: "relinquishmentRequest", data: `{ "cbsdId": "some_id", "grantId": "some_grant_id" }`, }} assertRequestsEqual(t, expected, actual) }
explode_data.jsonl/31661
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 204 }
[ 2830, 3393, 6740, 52888, 16025, 1900, 12561, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 609, 3028, 7302, 28755, 3636, 2648, 515, 197, 6258, 51835, 25, 609, 3028, 7302, 727, 51835, 515, 298, 67211, 25, 330, 14689, 842, 756, 298, 197, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestShouldRedirectWhenSessionInactiveForTooLongAndRDParamProvided(t *testing.T) { mock := mocks.NewMockAutheliaCtx(t) defer mock.Close() clock := mocks.TestingClock{} clock.Set(time.Now()) mock.Ctx.Configuration.Session.Inactivity = testInactivity // Reload the session provider since the configuration is indirect. mock.Ctx.Providers.SessionProvider = session.NewProvider(mock.Ctx.Configuration.Session, nil) assert.Equal(t, time.Second*10, mock.Ctx.Providers.SessionProvider.Inactivity) past := clock.Now().Add(-1 * time.Hour) userSession := mock.Ctx.GetSession() userSession.Username = testUsername userSession.AuthenticationLevel = authentication.TwoFactor userSession.LastActivity = past.Unix() err := mock.Ctx.SaveSession(userSession) require.NoError(t, err) mock.Ctx.QueryArgs().Add("rd", "https://login.example.com") mock.Ctx.Request.Header.Set("X-Original-URL", "https://two-factor.example.com") mock.Ctx.Request.Header.Set("X-Forwarded-Method", "GET") VerifyGet(verifyGetCfg)(mock.Ctx) assert.Equal(t, "Found. Redirecting to https://login.example.com?rd=https%3A%2F%2Ftwo-factor.example.com&rm=GET", string(mock.Ctx.Response.Body())) assert.Equal(t, 302, mock.Ctx.Response.StatusCode()) // Check the inactivity timestamp has been updated to current time in the new session. newUserSession := mock.Ctx.GetSession() assert.Equal(t, clock.Now().Unix(), newUserSession.LastActivity) }
explode_data.jsonl/20195
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 495 }
[ 2830, 3393, 14996, 17725, 4498, 5283, 72214, 2461, 31246, 6583, 3036, 36690, 2001, 35819, 291, 1155, 353, 8840, 836, 8, 341, 77333, 1669, 68909, 7121, 11571, 5087, 35929, 23684, 1155, 340, 16867, 7860, 10421, 2822, 84165, 1669, 68909, 8787,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStoreLoadWithContextOrDefaults(t *testing.T) { defaultsConfig := ConfigMapFromTestFile(t, DefaultsConfigName) config := FromContextOrDefaults(context.Background()) t.Run("defaults", func(t *testing.T) { expected, _ := NewDefaultsConfigFromConfigMap(defaultsConfig) if diff := cmp.Diff(expected, config.Defaults, ignoreStuff...); diff != "" { t.Errorf("Unexpected defaults config (-want, +got): %v", diff) } }) }
explode_data.jsonl/45714
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 147 }
[ 2830, 3393, 6093, 5879, 91101, 14188, 82, 1155, 353, 8840, 836, 8, 341, 11940, 82, 2648, 1669, 5532, 2227, 3830, 2271, 1703, 1155, 11, 35990, 2648, 675, 340, 25873, 1669, 5542, 1972, 14188, 82, 5378, 19047, 12367, 3244, 16708, 445, 2675...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPyModuleGetDict(t *testing.T) { fmt.Println(assert.CallerInfo()[0]) assert.Nil(t, pymodule.GetDict(nil)) sys := pyimport.ImportModule("sys") defer py.DecRef(sys) sysRefCnt := py.RefCnt(sys) defer func() { assert.Equal(t, sysRefCnt, py.RefCnt(sys)) }() assert.NotNil(t, sys) dic := pymodule.GetDict(sys) assert.True(t, pydict.Check(dic)) }
explode_data.jsonl/33467
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 162 }
[ 2830, 3393, 13828, 3332, 1949, 13448, 1155, 353, 8840, 836, 8, 341, 11009, 12419, 75846, 727, 13956, 1731, 10116, 15, 9604, 6948, 59678, 1155, 11, 45760, 1756, 2234, 13448, 27907, 4390, 41709, 1669, 4510, 474, 67275, 3332, 445, 7791, 1138...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLineComments(t *testing.T) { segs := segments if runtime.GOOS == "windows" { segs = append(segs, winsegments...) } else { segs = append(segs, unixsegments...) } // make source var src string for _, e := range segs { src += e.srcline } // verify scan var S Scanner file := fset.AddFile(filepath.Join("dir", "TestLineComments"), fset.Base(), len(src)) S.Init(file, []byte(src), nil, dontInsertSemis) for _, s := range segs { p, _, lit := S.Scan() pos := file.Position(p) checkPos(t, lit, p, token.Position{ Filename: s.filename, Offset: pos.Offset, Line: s.line, Column: pos.Column, }) } if S.ErrorCount != 0 { t.Errorf("found %d errors", S.ErrorCount) } }
explode_data.jsonl/1840
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 304 }
[ 2830, 3393, 2460, 17373, 1155, 353, 8840, 836, 8, 341, 84686, 5857, 1669, 20632, 198, 743, 15592, 97574, 3126, 621, 330, 27077, 1, 341, 197, 84686, 5857, 284, 8737, 10478, 5857, 11, 3164, 56829, 31218, 197, 92, 770, 341, 197, 84686, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestAdmitPreferNonmutatingWhenPossible(t *testing.T) { mutatingSCC := restrictiveSCC() mutatingSCC.Name = "mutating-scc" nonMutatingSCC := laxSCC() nonMutatingSCC.Name = "non-mutating-scc" simplePod := goodPod() simplePod.Spec.Containers[0].Name = "simple-pod" simplePod.Spec.Containers[0].Image = "test-image:0.1" modifiedPod := simplePod.DeepCopy() modifiedPod.Spec.Containers[0].Image = "test-image:0.2" tests := map[string]struct { oldPod *kapi.Pod newPod *kapi.Pod operation kadmission.Operation sccs []*securityapi.SecurityContextConstraints shouldPass bool expectedSCC string }{ "creation: the first SCC (even if it mutates) should be used": { newPod: simplePod.DeepCopy(), operation: kadmission.Create, sccs: []*securityapi.SecurityContextConstraints{mutatingSCC, nonMutatingSCC}, shouldPass: true, expectedSCC: mutatingSCC.Name, }, "updating: the first non-mutating SCC should be used": { oldPod: simplePod.DeepCopy(), newPod: modifiedPod.DeepCopy(), operation: kadmission.Update, sccs: []*securityapi.SecurityContextConstraints{mutatingSCC, nonMutatingSCC}, shouldPass: true, expectedSCC: nonMutatingSCC.Name, }, "updating: a pod should be rejected when there are only mutating SCCs": { oldPod: simplePod.DeepCopy(), newPod: modifiedPod.DeepCopy(), operation: kadmission.Update, sccs: []*securityapi.SecurityContextConstraints{mutatingSCC}, shouldPass: false, }, } for testCaseName, testCase := range tests { // We can't use testSCCAdmission() here because it doesn't support Update operation. // We can't use testSCCAdmit() here because it doesn't support Update operation and doesn't check for the SCC annotation. tc := setupClientSet() lister := createSCCLister(t, testCase.sccs) testAuthorizer := &sccTestAuthorizer{t: t} plugin := newTestAdmission(lister, tc, testAuthorizer) attrs := kadmission.NewAttributesRecord(testCase.newPod, testCase.oldPod, kapi.Kind("Pod").WithVersion("version"), testCase.newPod.Namespace, testCase.newPod.Name, kapi.Resource("pods").WithVersion("version"), "", testCase.operation, &user.DefaultInfo{}) err := plugin.(kadmission.MutationInterface).Admit(attrs) if testCase.shouldPass { if err != nil { t.Errorf("%s expected no errors but received %v", testCaseName, err) } else { validatedSCC, ok := testCase.newPod.Annotations[allocator.ValidatedSCCAnnotation] if !ok { t.Errorf("expected %q to find the validated annotation on the pod for the scc but found none", testCaseName) } else if validatedSCC != testCase.expectedSCC { t.Errorf("%q should have validated against %q but found %q", testCaseName, testCase.expectedSCC, validatedSCC) } } } else { if err == nil { t.Errorf("%s expected errors but received none", testCaseName) } } } }
explode_data.jsonl/25932
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1111 }
[ 2830, 3393, 2589, 1763, 4703, 802, 8121, 6984, 1095, 4498, 65222, 1155, 353, 8840, 836, 8, 1476, 2109, 332, 1095, 3540, 34, 1669, 56996, 3540, 34, 741, 2109, 332, 1095, 3540, 34, 2967, 284, 330, 6984, 1095, 1331, 638, 1837, 197, 6280,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestServiceTopologyReturnsCorrectLinksAndEquipment(t *testing.T) { r := newTestResolver(t) defer r.Close() ctx := viewertest.NewContext(context.Background(), r.client) mr := r.Mutation() locType, _ := mr.AddLocationType(ctx, models.AddLocationTypeInput{ Name: "Room", }) eqt, _ := mr.AddEquipmentType(ctx, models.AddEquipmentTypeInput{ Name: "Router", Ports: []*models.EquipmentPortInput{ {Name: "typ1_p1"}, {Name: "typ1_p2"}, }, }) loc, _ := mr.AddLocation(ctx, models.AddLocationInput{ Name: "Room2", Type: locType.ID, }) eq1, _ := mr.AddEquipment(ctx, models.AddEquipmentInput{ Name: "Router1", Type: eqt.ID, Location: &loc.ID, }) eq2, _ := mr.AddEquipment(ctx, models.AddEquipmentInput{ Name: "Router2", Type: eqt.ID, Location: &loc.ID, }) eq3, _ := mr.AddEquipment(ctx, models.AddEquipmentInput{ Name: "Router3", Type: eqt.ID, Location: &loc.ID, }) equipmentType := r.client.EquipmentType.GetX(ctx, eqt.ID) defs := equipmentType.QueryPortDefinitions().AllX(ctx) ep1 := eq1.QueryPorts().Where(equipmentport.HasDefinitionWith(equipmentportdefinition.ID(defs[0].ID))).OnlyX(ctx) l1, _ := mr.AddLink(ctx, models.AddLinkInput{ Sides: []*models.LinkSide{ {Equipment: eq1.ID, Port: defs[0].ID}, {Equipment: eq2.ID, Port: defs[0].ID}, }, }) l2, _ := mr.AddLink(ctx, models.AddLinkInput{ Sides: []*models.LinkSide{ {Equipment: eq2.ID, Port: defs[1].ID}, {Equipment: eq3.ID, Port: defs[1].ID}, }, }) st, _ := mr.AddServiceType(ctx, models.ServiceTypeCreateData{ Name: "Internet Access", HasCustomer: false, Endpoints: []*models.ServiceEndpointDefinitionInput{ { Name: "endpoint type1", Role: pointer.ToString("CONSUMER"), Index: 0, EquipmentTypeID: eqt.ID, }, }, }) s, err := mr.AddService(ctx, models.ServiceCreateData{ Name: "Internet Access Room 2", ServiceTypeID: st.ID, Status: pointerToServiceStatus(models.ServiceStatusPending), }) require.NoError(t, err) _, err = mr.AddServiceLink(ctx, s.ID, l1.ID) require.NoError(t, err) _, err = mr.AddServiceLink(ctx, s.ID, l2.ID) require.NoError(t, err) ept := st.QueryEndpointDefinitions().OnlyX(ctx) _, err = mr.AddServiceEndpoint(ctx, models.AddServiceEndpointInput{ ID: s.ID, EquipmentID: eq1.ID, PortID: pointer.ToInt(ep1.ID), Definition: ept.ID, }) require.NoError(t, err) res, err := r.Service().Topology(ctx, s) require.NoError(t, err) require.Len(t, res.Nodes, 3) require.Len(t, res.Links, 2) }
explode_data.jsonl/7205
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1172 }
[ 2830, 3393, 1860, 60954, 16446, 33092, 24089, 3036, 58276, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 501, 2271, 18190, 1155, 340, 16867, 435, 10421, 741, 20985, 1669, 1651, 83386, 7121, 1972, 5378, 19047, 1507, 435, 6581, 692, 2109, 81, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExtensionsGet(t *testing.T) { client, err := clients.NewIdentityV2Client() if err != nil { t.Fatalf("Unable to create an identity client: %v", err) } extension, err := extensions.Get(client, "OS-KSCRUD").Extract() if err != nil { t.Fatalf("Unable to get extension OS-KSCRUD: %v", err) } tools.PrintResource(t, extension) }
explode_data.jsonl/18363
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 131 }
[ 2830, 3393, 31282, 1949, 1155, 353, 8840, 836, 8, 341, 25291, 11, 1848, 1669, 8239, 7121, 18558, 53, 17, 2959, 741, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 17075, 311, 1855, 458, 9569, 2943, 25, 1018, 85, 497, 1848, 340, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAddElementInQueue(t *testing.T) { q := newQueue() q.Add("a") q.Add("b") q.Add("d") q.Add("e") fmt.Println("\n Queue elements ") q.Print() }
explode_data.jsonl/73784
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 76 }
[ 2830, 3393, 2212, 1691, 641, 7554, 1155, 353, 8840, 836, 8, 341, 18534, 1669, 501, 7554, 741, 18534, 1904, 445, 64, 1138, 18534, 1904, 445, 65, 1138, 18534, 1904, 445, 67, 1138, 18534, 1904, 445, 68, 1138, 11009, 12419, 4921, 77, 1874...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestPtrTypeToType(t *testing.T) { // Encode a *T, decode a T type Type1 struct { A uint } t1p := &Type1{17} var t1 Type1 if err := encAndDec(t1, t1p); err != nil { t.Error(err) } }
explode_data.jsonl/43384
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 94 }
[ 2830, 3393, 5348, 929, 1249, 929, 1155, 353, 8840, 836, 8, 341, 197, 322, 56562, 264, 353, 51, 11, 16895, 264, 350, 198, 13158, 3990, 16, 2036, 341, 197, 22985, 2622, 198, 197, 532, 3244, 16, 79, 1669, 609, 929, 16, 90, 16, 22, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestServer_SecurityHeaders_SelfServe(t *testing.T) { s := Server{ cfg: Config{ Developing: false, StaticURL: "/static", }, } rr := httptest.NewRecorder() req := httptest.NewRequest("GET", "/", nil) m := s.SecurityHeaders() h := m(http.HandlerFunc(fakeHandler)) h.ServeHTTP(rr, req) assert.Equal(t, "default-src *; img-src *; font-src *; style-src * 'unsafe-inline'; script-src 'none';", rr.Header().Get("Content-Security-Policy")) assert.Equal(t, "DENY", rr.Header().Get("X-Frame-Options")) assert.Equal(t, "1", rr.Header().Get("X-XSS-Protection")) assert.Equal(t, "nosniff", rr.Header().Get("X-Content-Type-Options")) assert.Equal(t, "no-referrer", rr.Header().Get("Referrer-Policy")) }
explode_data.jsonl/44435
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 292 }
[ 2830, 3393, 5475, 1098, 18429, 10574, 1098, 490, 60421, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 8422, 515, 197, 50286, 25, 5532, 515, 298, 197, 20444, 287, 25, 895, 345, 298, 197, 11690, 3144, 25, 220, 3521, 1978, 756, 197, 197, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRuntime_runServer(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() cfg := newDefaultStandaloneConfig(t) cfg.StorageBase.GRPC.Port = 3903 standalone := NewStandaloneRuntime("test-version", &cfg) s := standalone.(*runtime) storage := server.NewMockService(ctrl) s.storage = storage broker := server.NewMockService(ctrl) s.broker = broker storage.EXPECT().Run().Return(nil).AnyTimes() broker.EXPECT().Run().Return(fmt.Errorf("err")) err := s.runServer() assert.Error(t, err) storage.EXPECT().Stop().Return() broker.EXPECT().Stop().Return() s.Stop() }
explode_data.jsonl/3483
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 218 }
[ 2830, 3393, 15123, 14007, 5475, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 50286, 1669, 501, 3675, 623, 84112, 2648, 1155, 340, 50286, 43771, 3978, 1224, 29528, 43013,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIntegrationSend(t *testing.T) { queueName, queuesClient, cleanup := newTestQueue(t, "receive") defer cleanup() tests := []struct { label string data []string }{ { label: "3 send, small payload", data: []string{ "2Hey there!", "2Hi there!", "2Ho there!", }, }, } for _, tt := range tests { t.Run(tt.label, func(t *testing.T) { checkLeaks := leaktest.CheckTimeout(t, 60*time.Second) // Create client client := newClient(t, tt.label) defer client.Close() // Open a session session, err := client.NewSession() if err != nil { t.Fatal(err) } // Create a sender sender, err := session.NewSender( amqp.LinkTargetAddress(queueName), ) if err != nil { t.Fatal(err) } for i, data := range tt.data { ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) err = sender.Send(ctx, amqp.NewMessage([]byte(data))) cancel() if err != nil { t.Fatalf("Error after %d sends: %+v", i, err) return } } sender.Close() client.Close() // close before leak check checkLeaks() // this is done here because queuesClient starts additional goroutines // Wait for Azure to update stats time.Sleep(1 * time.Second) q, err := queuesClient.Get(context.Background(), resourceGroup, namespace, queueName) if err != nil { t.Fatal(err) } if amc := *q.CountDetails.ActiveMessageCount; int(amc) != len(tt.data) { t.Fatalf("Expected ActiveMessageCount to be 0, but it was %d", amc) } if dead := *q.CountDetails.DeadLetterMessageCount; dead > 0 { t.Fatalf("Expected DeadLetterMessageCount to be 0, but it was %d", dead) } }) } }
explode_data.jsonl/60648
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 702 }
[ 2830, 3393, 52464, 11505, 1155, 353, 8840, 836, 8, 341, 46993, 675, 11, 48094, 2959, 11, 21290, 1669, 501, 2271, 7554, 1155, 11, 330, 41893, 1138, 16867, 21290, 2822, 78216, 1669, 3056, 1235, 341, 197, 29277, 914, 198, 197, 8924, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestRuleGetSidMsg(t *testing.T) { for _, tt := range []struct { name string input Rule want string }{ { name: "rule", input: Rule{ SID: 1337, Description: "foo", }, want: `1337 || foo`, }, { name: "rule", input: Rule{ SID: 1337, Description: "foo", References: []*Reference{ { Type: "url", Value: "www.google.com", }, }, }, want: `1337 || foo || url,www.google.com`, }, { name: "rule", input: Rule{ SID: 1337, Description: "foo", References: []*Reference{ { Type: "url", Value: "www.google.com", }, { Type: "md5", Value: "2aee1c40199c7754da766e61452612cc", }, }, }, want: `1337 || foo || url,www.google.com || md5,2aee1c40199c7754da766e61452612cc`, }, } { got := tt.input.GetSidMsg() if got != tt.want { t.Fatalf("%s: got %v -- expected %v", tt.name, got, tt.want) } } }
explode_data.jsonl/59700
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 548 }
[ 2830, 3393, 11337, 1949, 67653, 6611, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 3056, 1235, 341, 197, 11609, 220, 914, 198, 197, 22427, 18100, 198, 197, 50780, 220, 914, 198, 197, 59403, 197, 197, 515, 298, 11609, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNetworkMemberString(t *testing.T) { tests := []struct { input NetworkMember expected string }{ { input: NetworkMember{Endpoint: "endpoint", InternalEndpoint: "internal-endpoint", PKIid: common.PKIidType{0, 1, 2, 3}, Metadata: nil}, expected: "Endpoint: endpoint, InternalEndpoint: internal-endpoint, PKI-ID: 00010203, Metadata: ", }, { input: NetworkMember{Endpoint: "endpoint", InternalEndpoint: "internal-endpoint", PKIid: common.PKIidType{0, 1, 2, 3}, Metadata: []byte{4, 5, 6, 7}}, expected: "Endpoint: endpoint, InternalEndpoint: internal-endpoint, PKI-ID: 00010203, Metadata: 04050607", }, } for _, tt := range tests { assert.Equal(t, tt.expected, tt.input.String()) } }
explode_data.jsonl/62256
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 282 }
[ 2830, 3393, 12320, 9366, 703, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 22427, 262, 8141, 9366, 198, 197, 42400, 914, 198, 197, 59403, 197, 197, 515, 298, 22427, 25, 262, 8141, 9366, 90, 27380, 25, 330, 32540, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSymbolizationPath(t *testing.T) { if runtime.GOOS == "windows" { t.Skip("test assumes Unix paths") } // Save environment variables to restore after test saveHome := os.Getenv(homeEnv()) savePath := os.Getenv("PPROF_BINARY_PATH") tempdir, err := ioutil.TempDir("", "home") if err != nil { t.Fatal("creating temp dir: ", err) } defer os.RemoveAll(tempdir) os.MkdirAll(filepath.Join(tempdir, "pprof", "binaries", "abcde10001"), 0700) os.Create(filepath.Join(tempdir, "pprof", "binaries", "abcde10001", "binary")) obj := testObj{tempdir} os.Setenv(homeEnv(), tempdir) for _, tc := range []struct { env, file, buildID, want string msgCount int }{ {"", "/usr/bin/binary", "", "/usr/bin/binary", 0}, {"", "/usr/bin/binary", "fedcb10000", "/usr/bin/binary", 0}, {"/usr", "/bin/binary", "", "/usr/bin/binary", 0}, {"", "/prod/path/binary", "abcde10001", filepath.Join(tempdir, "pprof/binaries/abcde10001/binary"), 0}, {"/alternate/architecture", "/usr/bin/binary", "", "/alternate/architecture/binary", 0}, {"/alternate/architecture", "/usr/bin/binary", "abcde10001", "/alternate/architecture/binary", 0}, {"/nowhere:/alternate/architecture", "/usr/bin/binary", "fedcb10000", "/usr/bin/binary", 1}, {"/nowhere:/alternate/architecture", "/usr/bin/binary", "abcde10002", "/usr/bin/binary", 1}, } { os.Setenv("PPROF_BINARY_PATH", tc.env) p := &profile.Profile{ Mapping: []*profile.Mapping{ { File: tc.file, BuildID: tc.buildID, }, }, } s := &source{} locateBinaries(p, s, obj, &proftest.TestUI{T: t, Ignore: tc.msgCount}) if file := p.Mapping[0].File; file != tc.want { t.Errorf("%s:%s:%s, want %s, got %s", tc.env, tc.file, tc.buildID, tc.want, file) } } os.Setenv(homeEnv(), saveHome) os.Setenv("PPROF_BINARY_PATH", savePath) }
explode_data.jsonl/36294
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 791 }
[ 2830, 3393, 15090, 2022, 1820, 1155, 353, 8840, 836, 8, 341, 743, 15592, 97574, 3126, 621, 330, 27077, 1, 341, 197, 3244, 57776, 445, 1944, 21484, 46995, 12716, 1138, 197, 630, 197, 322, 10255, 4573, 7332, 311, 14952, 1283, 1273, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestSGF(t *testing.T) { k := NewKifu() r := k.AddMove(NewMove(xy2posi(3, 3), Black), nil) k.AddMove(NewMove(xy2posi(15, 15), White), r) s := k.ToSGF() k1 := NewKifu() k1.LoadSGF(s) m1 := k1.GetMove(0, 1) m2 := k1.NextMove(m1) if m1.MoveNum != 1 || m1.Posi != xy2posi(3, 3) || m1.Color != Black || m2.MoveNum != 2 || m2.Posi != xy2posi(15, 15) || m2.Color != White { t.Errorf("simple sgf save&load failed") } }
explode_data.jsonl/67494
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 221 }
[ 2830, 3393, 7783, 37, 1155, 353, 8840, 836, 8, 341, 16463, 1669, 1532, 42, 20850, 741, 7000, 1669, 595, 1904, 9860, 35063, 9860, 93219, 17, 966, 72, 7, 18, 11, 220, 18, 701, 5235, 701, 2092, 340, 16463, 1904, 9860, 35063, 9860, 9321...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestGetStringFromEnv(t *testing.T) { t.Parallel() b := NewConfigReaderBuilder() reader := b.AttachEnvPrefix("simple").WithConfigFile("testdata/config.yaml").Build() os.Setenv("SIMPLE_GENCODE_DOWNSTREAM_FOO_SERVICEURL", "https://env.foo.example.com") fooURL, err := reader.GetString("genCode.downstream.foo.serviceURL") require.Nil(t, err) assert.Equal(t, "https://env.foo.example.com", fooURL) }
explode_data.jsonl/53788
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 48905, 3830, 14359, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 2233, 1669, 1532, 2648, 5062, 3297, 741, 61477, 1669, 293, 88284, 14359, 14335, 445, 22944, 1827, 2354, 2648, 1703, 445, 92425, 14730, 33406, 1827, 110...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMonitorNodeHealthMarkPodsNotReady(t *testing.T) { fakeNow := metav1.Date(2015, 1, 1, 12, 0, 0, 0, time.UTC) table := []struct { fakeNodeHandler *testutil.FakeNodeHandler timeToPass time.Duration newNodeStatus v1.NodeStatus expectedPodStatusUpdate bool }{ // Node created recently, without status. // Expect no action from node controller (within startup grace period). { fakeNodeHandler: &testutil.FakeNodeHandler{ Existing: []*v1.Node{ { ObjectMeta: metav1.ObjectMeta{ Name: "node0", CreationTimestamp: fakeNow, }, }, }, Clientset: fake.NewSimpleClientset(&v1.PodList{Items: []v1.Pod{*testutil.NewPod("pod0", "node0")}}), }, expectedPodStatusUpdate: false, }, // Node created long time ago, with status updated recently. // Expect no action from node controller (within monitor grace period). { fakeNodeHandler: &testutil.FakeNodeHandler{ Existing: []*v1.Node{ { ObjectMeta: metav1.ObjectMeta{ Name: "node0", CreationTimestamp: metav1.Date(2012, 1, 1, 0, 0, 0, 0, time.UTC), }, Status: v1.NodeStatus{ Conditions: []v1.NodeCondition{ { Type: v1.NodeReady, Status: v1.ConditionTrue, // Node status has just been updated. LastHeartbeatTime: fakeNow, LastTransitionTime: fakeNow, }, }, Capacity: v1.ResourceList{ v1.ResourceName(v1.ResourceCPU): resource.MustParse("10"), v1.ResourceName(v1.ResourceMemory): resource.MustParse("10G"), }, }, }, }, Clientset: fake.NewSimpleClientset(&v1.PodList{Items: []v1.Pod{*testutil.NewPod("pod0", "node0")}}), }, expectedPodStatusUpdate: false, }, // Node created long time ago, with status updated by kubelet exceeds grace period. // Expect pods status updated and Unknown node status posted from node controller { fakeNodeHandler: &testutil.FakeNodeHandler{ Existing: []*v1.Node{ { ObjectMeta: metav1.ObjectMeta{ Name: "node0", CreationTimestamp: metav1.Date(2012, 1, 1, 0, 0, 0, 0, time.UTC), }, Status: v1.NodeStatus{ Conditions: []v1.NodeCondition{ { Type: v1.NodeReady, Status: v1.ConditionTrue, // Node status hasn't been updated for 1hr. LastHeartbeatTime: metav1.Date(2015, 1, 1, 12, 0, 0, 0, time.UTC), LastTransitionTime: metav1.Date(2015, 1, 1, 12, 0, 0, 0, time.UTC), }, }, Capacity: v1.ResourceList{ v1.ResourceName(v1.ResourceCPU): resource.MustParse("10"), v1.ResourceName(v1.ResourceMemory): resource.MustParse("10G"), }, }, }, }, Clientset: fake.NewSimpleClientset(&v1.PodList{Items: []v1.Pod{*testutil.NewPod("pod0", "node0")}}), }, timeToPass: 1 * time.Minute, newNodeStatus: v1.NodeStatus{ Conditions: []v1.NodeCondition{ { Type: v1.NodeReady, Status: v1.ConditionTrue, // Node status hasn't been updated for 1hr. LastHeartbeatTime: metav1.Date(2015, 1, 1, 12, 0, 0, 0, time.UTC), LastTransitionTime: metav1.Date(2015, 1, 1, 12, 0, 0, 0, time.UTC), }, }, Capacity: v1.ResourceList{ v1.ResourceName(v1.ResourceCPU): resource.MustParse("10"), v1.ResourceName(v1.ResourceMemory): resource.MustParse("10G"), }, }, expectedPodStatusUpdate: true, }, } for i, item := range table { nodeController, _ := newNodeLifecycleControllerFromClient( nil, item.fakeNodeHandler, 5*time.Minute, testRateLimiterQPS, testRateLimiterQPS, testLargeClusterThreshold, testUnhealthyThreshold, testNodeMonitorGracePeriod, testNodeStartupGracePeriod, testNodeMonitorPeriod, false) nodeController.now = func() metav1.Time { return fakeNow } nodeController.recorder = testutil.NewFakeRecorder() if err := nodeController.syncNodeStore(item.fakeNodeHandler); err != nil { t.Errorf("unexpected error: %v", err) } if err := nodeController.monitorNodeHealth(); err != nil { t.Errorf("Case[%d] unexpected error: %v", i, err) } if item.timeToPass > 0 { nodeController.now = func() metav1.Time { return metav1.Time{Time: fakeNow.Add(item.timeToPass)} } item.fakeNodeHandler.Existing[0].Status = item.newNodeStatus if err := nodeController.syncNodeStore(item.fakeNodeHandler); err != nil { t.Errorf("unexpected error: %v", err) } if err := nodeController.monitorNodeHealth(); err != nil { t.Errorf("Case[%d] unexpected error: %v", i, err) } } podStatusUpdated := false for _, action := range item.fakeNodeHandler.Actions() { if action.GetVerb() == "update" && action.GetResource().Resource == "pods" && action.GetSubresource() == "status" { podStatusUpdated = true } } if podStatusUpdated != item.expectedPodStatusUpdate { t.Errorf("Case[%d] expect pod status updated to be %v, but got %v", i, item.expectedPodStatusUpdate, podStatusUpdated) } } }
explode_data.jsonl/9615
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2241 }
[ 2830, 3393, 30098, 1955, 14542, 8949, 23527, 82, 2623, 19202, 1155, 353, 8840, 836, 8, 341, 1166, 726, 7039, 1669, 77520, 16, 8518, 7, 17, 15, 16, 20, 11, 220, 16, 11, 220, 16, 11, 220, 16, 17, 11, 220, 15, 11, 220, 15, 11, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIdleTimeoutAndUpdate(t *testing.T) { setup() defer tearDown() defer func() { buffer.ConnReadTimeout = types.DefaultConnReadTimeout defaultIdleTimeout = types.DefaultIdleTimeout }() log.DefaultLogger.SetLogLevel(log.DEBUG) buffer.ConnReadTimeout = time.Second defaultIdleTimeout = 3 * time.Second addrStr := "127.0.0.1:8082" name := "listener3" // bas listener config have no idle timeout config, set the default value listenerConfig := baseListenerConfig(addrStr, name) if err := GetListenerAdapterInstance().AddOrUpdateListener(testServerName, listenerConfig); err != nil { t.Fatalf("add a new listener failed %v", err) } time.Sleep(time.Second) // wait listener start // 0. test default idle timeout func() { n := time.Now() conn, err := tls.Dial("tcp", addrStr, &tls.Config{ InsecureSkipVerify: true, }) if err != nil { t.Fatalf("dial failed, %v", err) } readChan := make(chan error) // try read go func() { buf := make([]byte, 10) _, err := conn.Read(buf) readChan <- err }() select { case err := <-readChan: // connection should be closed by server if err != io.EOF { t.Fatalf("connection read returns error: %v", err) } if time.Now().Sub(n) < defaultIdleTimeout { t.Fatal("connection closed too quickly") } case <-time.After(5 * time.Second): conn.Close() t.Fatal("connection should be closed, but not") } }() // Update idle timeout // 1. update as no idle timeout noIdle := baseListenerConfig(addrStr, name) noIdle.ConnectionIdleTimeout = &api.DurationConfig{ Duration: 0, } if err := GetListenerAdapterInstance().AddOrUpdateListener(testServerName, noIdle); err != nil { t.Fatalf("update listener failed, %v", err) } func() { conn, err := tls.Dial("tcp", addrStr, &tls.Config{ InsecureSkipVerify: true, }) if err != nil { t.Fatalf("dial failed, %v", err) } readChan := make(chan error) // try read go func() { buf := make([]byte, 10) _, err := conn.Read(buf) readChan <- err }() select { case err := <-readChan: t.Fatalf("receive an error: %v", err) case <-time.After(5 * time.Second): conn.Close() } }() // 2. update idle timeout with config cfgIdle := baseListenerConfig(addrStr, name) cfgIdle.ConnectionIdleTimeout = &api.DurationConfig{ Duration: 5 * time.Second, } if err := GetListenerAdapterInstance().AddOrUpdateListener(testServerName, cfgIdle); err != nil { t.Fatalf("update listener failed, %v", err) } func() { n := time.Now() conn, err := tls.Dial("tcp", addrStr, &tls.Config{ InsecureSkipVerify: true, }) if err != nil { t.Fatalf("dial failed, %v", err) } readChan := make(chan error) // try read go func() { buf := make([]byte, 10) _, err := conn.Read(buf) readChan <- err }() select { case err := <-readChan: // connection should be closed by server if err != io.EOF { t.Fatalf("connection read returns error: %v", err) } if time.Now().Sub(n) < 5*time.Second { t.Fatal("connection closed too quickly") } case <-time.After(8 * time.Second): conn.Close() t.Fatal("connection should be closed, but not") } }() }
explode_data.jsonl/9408
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1242 }
[ 2830, 3393, 41370, 7636, 56365, 1155, 353, 8840, 836, 8, 341, 84571, 741, 16867, 32825, 2822, 16867, 2915, 368, 341, 197, 31122, 50422, 4418, 7636, 284, 4494, 13275, 9701, 4418, 7636, 198, 197, 11940, 41370, 7636, 284, 4494, 13275, 41370,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConnectionUpsert(t *testing.T) { assert := assert.New(t) tx, err := Default().Begin() assert.Nil(err) defer tx.Rollback() err = createUpserObjectTable(tx) assert.Nil(err) obj := &upsertObj{ UUID: uuid.V4().String(), Timestamp: time.Now().UTC(), Category: uuid.V4().String(), } err = Default().UpsertInTx(obj, tx) assert.Nil(err) var verify upsertObj err = Default().GetInTx(&verify, tx, obj.UUID) assert.Nil(err) assert.Equal(obj.Category, verify.Category) obj.Category = "test" err = Default().UpsertInTx(obj, tx) assert.Nil(err) err = Default().GetInTx(&verify, tx, obj.UUID) assert.Nil(err) assert.Equal(obj.Category, verify.Category) }
explode_data.jsonl/33843
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 287 }
[ 2830, 3393, 4526, 98778, 529, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 46237, 11, 1848, 1669, 7899, 1005, 11135, 741, 6948, 59678, 3964, 340, 16867, 9854, 88918, 2822, 9859, 284, 1855, 2324, 799, 1190, 2556, 2730...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAccAWSS3BucketObject_updatesWithVersioning(t *testing.T) { var originalObj, modifiedObj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" rInt := acctest.RandInt() sourceInitial := testAccAWSS3BucketObjectCreateTempFile(t, "initial versioned object state") defer os.Remove(sourceInitial) sourceModified := testAccAWSS3BucketObjectCreateTempFile(t, "modified versioned object") defer os.Remove(sourceInitial) resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, Providers: testAccProviders, CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { Config: testAccAWSS3BucketObjectConfig_updateable(rInt, true, sourceInitial), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &originalObj), testAccCheckAWSS3BucketObjectBody(&originalObj, "initial versioned object state"), resource.TestCheckResourceAttr(resourceName, "etag", "cee4407fa91906284e2a5e5e03e86b1b"), ), }, { Config: testAccAWSS3BucketObjectConfig_updateable(rInt, true, sourceModified), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &modifiedObj), testAccCheckAWSS3BucketObjectBody(&modifiedObj, "modified versioned object"), resource.TestCheckResourceAttr(resourceName, "etag", "00b8c73b1b50e7cc932362c7225b8e29"), testAccCheckAWSS3BucketObjectVersionIdDiffers(&modifiedObj, &originalObj), ), }, }, }) }
explode_data.jsonl/64962
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 570 }
[ 2830, 3393, 14603, 14419, 1220, 18, 36018, 1190, 57829, 2354, 5637, 287, 1155, 353, 8840, 836, 8, 341, 2405, 4024, 5261, 11, 10807, 5261, 274, 18, 25618, 5097, 198, 50346, 675, 1669, 330, 8635, 643, 18, 38749, 5314, 6035, 698, 7000, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewMCP23017DriverOdr(t *testing.T) { b := NewMCP23017Driver(newI2cTestAdaptor(), WithMCP23017Odr(1)) gobottest.Assert(t, b.MCPConf.Odr, uint8(1)) }
explode_data.jsonl/42309
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 73 }
[ 2830, 3393, 3564, 44, 7123, 17, 18, 15, 16, 22, 11349, 46, 3612, 1155, 353, 8840, 836, 8, 341, 2233, 1669, 1532, 44, 7123, 17, 18, 15, 16, 22, 11349, 1755, 40, 17, 66, 2271, 2589, 32657, 1507, 3085, 44, 7123, 17, 18, 15, 16, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMountPropagation(t *testing.T) { sharedLookupMountFn := func(string) (mount.Info, error) { return mount.Info{ Mountpoint: "host-path", Optional: "shared:", }, nil } slaveLookupMountFn := func(string) (mount.Info, error) { return mount.Info{ Mountpoint: "host-path", Optional: "master:", }, nil } othersLookupMountFn := func(string) (mount.Info, error) { return mount.Info{ Mountpoint: "host-path", Optional: "others", }, nil } for desc, test := range map[string]struct { criMount *runtime.Mount fakeLookupMountFn func(string) (mount.Info, error) optionsCheck []string expectErr bool }{ "HostPath should mount as 'rprivate' if propagation is MountPropagation_PROPAGATION_PRIVATE": { criMount: &runtime.Mount{ ContainerPath: "container-path", HostPath: "host-path", Propagation: runtime.MountPropagation_PROPAGATION_PRIVATE, }, fakeLookupMountFn: nil, optionsCheck: []string{"rbind", "rprivate"}, expectErr: false, }, "HostPath should mount as 'rslave' if propagation is MountPropagation_PROPAGATION_HOST_TO_CONTAINER": { criMount: &runtime.Mount{ ContainerPath: "container-path", HostPath: "host-path", Propagation: runtime.MountPropagation_PROPAGATION_HOST_TO_CONTAINER, }, fakeLookupMountFn: slaveLookupMountFn, optionsCheck: []string{"rbind", "rslave"}, expectErr: false, }, "HostPath should mount as 'rshared' if propagation is MountPropagation_PROPAGATION_BIDIRECTIONAL": { criMount: &runtime.Mount{ ContainerPath: "container-path", HostPath: "host-path", Propagation: runtime.MountPropagation_PROPAGATION_BIDIRECTIONAL, }, fakeLookupMountFn: sharedLookupMountFn, optionsCheck: []string{"rbind", "rshared"}, expectErr: false, }, "HostPath should mount as 'rprivate' if propagation is illegal": { criMount: &runtime.Mount{ ContainerPath: "container-path", HostPath: "host-path", Propagation: runtime.MountPropagation(42), }, fakeLookupMountFn: nil, optionsCheck: []string{"rbind", "rprivate"}, expectErr: false, }, "Expect an error if HostPath isn't shared and mount propagation is MountPropagation_PROPAGATION_BIDIRECTIONAL": { criMount: &runtime.Mount{ ContainerPath: "container-path", HostPath: "host-path", Propagation: runtime.MountPropagation_PROPAGATION_BIDIRECTIONAL, }, fakeLookupMountFn: slaveLookupMountFn, expectErr: true, }, "Expect an error if HostPath isn't slave or shared and mount propagation is MountPropagation_PROPAGATION_HOST_TO_CONTAINER": { criMount: &runtime.Mount{ ContainerPath: "container-path", HostPath: "host-path", Propagation: runtime.MountPropagation_PROPAGATION_HOST_TO_CONTAINER, }, fakeLookupMountFn: othersLookupMountFn, expectErr: true, }, } { t.Logf("TestCase %q", desc) c := newTestCRIService() c.os.(*ostesting.FakeOS).LookupMountFn = test.fakeLookupMountFn config, _, _, _ := getCreateContainerTestData() var spec runtimespec.Spec spec.Linux = &runtimespec.Linux{} err := opts.WithMounts(c.os, config, []*runtime.Mount{test.criMount}, "")(context.Background(), nil, nil, &spec) if test.expectErr { require.Error(t, err) } else { require.NoError(t, err) checkMount(t, spec.Mounts, test.criMount.HostPath, test.criMount.ContainerPath, "bind", test.optionsCheck, nil) } } }
explode_data.jsonl/6412
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1508 }
[ 2830, 3393, 16284, 35172, 1155, 353, 8840, 836, 8, 1476, 197, 6100, 34247, 16284, 24911, 1669, 2915, 3609, 8, 320, 16557, 20132, 11, 1465, 8, 341, 197, 853, 6470, 20132, 515, 298, 9209, 629, 2768, 25, 330, 3790, 33095, 756, 298, 65364...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLargeNumberOfKeyspaces(t *testing.T) { dockerImages := []string{vttestserverMysql57image, vttestserverMysql80image} for _, image := range dockerImages { t.Run(image, func(t *testing.T) { var keyspaces []string var numShards []int for i := 0; i < 100; i++ { keyspaces = append(keyspaces, fmt.Sprintf("unsharded_ks%d", i)) numShards = append(numShards, 1) } vtest := newVttestserver(image, keyspaces, numShards, 100000, 33577) err := vtest.startDockerImage() require.NoError(t, err) defer vtest.teardown() // wait for the docker to be setup err = vtest.waitUntilDockerHealthy(15) require.NoError(t, err) ctx := context.Background() vttestParams := mysql.ConnParams{ Host: "localhost", Port: vtest.port, } conn, err := mysql.Connect(ctx, &vttestParams) require.NoError(t, err) defer conn.Close() // assert that all the keyspaces are correctly setup for _, keyspace := range keyspaces { _, err = execute(t, conn, "create table "+keyspace+".t1(id int)") require.NoError(t, err) _, err = execute(t, conn, "insert into "+keyspace+".t1(id) values (10),(20),(30)") require.NoError(t, err) assertMatches(t, conn, "select * from "+keyspace+".t1", `[[INT32(10)] [INT32(20)] [INT32(30)]]`) } }) } }
explode_data.jsonl/54437
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 551 }
[ 2830, 3393, 34253, 40619, 8850, 27338, 1155, 353, 8840, 836, 8, 341, 2698, 13659, 14228, 1669, 3056, 917, 90, 9708, 1944, 4030, 44, 14869, 20, 22, 1805, 11, 39105, 1944, 4030, 44, 14869, 23, 15, 1805, 532, 2023, 8358, 2168, 1669, 2088...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3