text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestExtractSelectors(t *testing.T) { for _, tc := range [...]struct { input string expected []string }{ { "foo", []string{`{__name__="foo"}`}, }, { `foo{bar="baz"}`, []string{`{bar="baz", __name__="foo"}`}, }, { `foo{bar="baz"} / flip{flop="flap"}`, []string{`{bar="baz", __name__="foo"}`, `{flop="flap", __name__="flip"}`}, }, { `rate(foo[5m])`, []string{`{__name__="foo"}`}, }, { `vector(1)`, []string{}, }, } { expr, err := ParseExpr(tc.input) require.NoError(t, err) var expected [][]*labels.Matcher for _, s := range tc.expected { selector, err := ParseMetricSelector(s) require.NoError(t, err) expected = append(expected, selector) } require.Equal(t, expected, ExtractSelectors(expr)) } }
explode_data.jsonl/3397
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 366 }
[ 2830, 3393, 28959, 96995, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 48179, 1235, 341, 197, 22427, 262, 914, 198, 197, 42400, 3056, 917, 198, 197, 59403, 197, 197, 515, 298, 197, 1, 7975, 756, 298, 197, 1294, 917, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGenerateBaseKubernetesApiUrl(t *testing.T) { t.Run("Generates correct URL when all elements are present", func(t *testing.T) { schemeHostAndPort := "gopher://some-server.example.com:661" url, err := generateBaseKubernetesApiUrl(schemeHostAndPort) if err != nil { t.Fatalf("Unexpected error starting proxy: %v", err) } expectedUrlString := "gopher://some-server.example.com:661/api/v1/" if url.String() != expectedUrlString { t.Fatalf("Expected generated URl to be [%s], but got [%s]", expectedUrlString, url.String()) } }) t.Run("Return error if invalid host and port", func(t *testing.T) { schemeHostAndPort := "ftp://some-server.exampl e.com:666" _, err := generateBaseKubernetesApiUrl(schemeHostAndPort) if err == nil { t.Fatalf("Expected error when tryiong to generate URL with extra path without leading slash, got nothing") } }) }
explode_data.jsonl/18880
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 326 }
[ 2830, 3393, 31115, 3978, 42, 29827, 6563, 2864, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 5531, 973, 4396, 5548, 979, 678, 5424, 525, 3042, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 1903, 8058, 9296, 3036, 7084, 1669, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPostUnmarshalWindowsCanonicalPaths(t *testing.T) { // Testing type conversions, bleh. At least the type conversion itself // doesn't look this messy. taskFromAcs := ecsacs.Task{ Arn: strptr("myArn"), DesiredStatus: strptr("RUNNING"), Family: strptr("myFamily"), Version: strptr("1"), Containers: []*ecsacs.Container{ { Name: strptr("myName"), MountPoints: []*ecsacs.MountPoint{ { ContainerPath: strptr(`C:/Container/Path`), SourceVolume: strptr("sourceVolume"), }, }, }, }, Volumes: []*ecsacs.Volume{ { Name: strptr("sourceVolume"), Host: &ecsacs.HostVolumeProperties{ SourcePath: strptr(`C:/Host/path`), }, }, }, } expectedTask := &Task{ Arn: "myArn", DesiredStatusUnsafe: TaskRunning, Family: "myFamily", Version: "1", Containers: []*Container{ { Name: "myName", MountPoints: []MountPoint{ { ContainerPath: `c:\container\path`, SourceVolume: "sourceVolume", }, }, }, }, Volumes: []TaskVolume{ { Name: "sourceVolume", Volume: &FSHostVolume{ FSSourcePath: `c:\host\path`, }, }, }, StartSequenceNumber: 42, } seqNum := int64(42) task, err := TaskFromACS(&taskFromAcs, &ecsacs.PayloadMessage{SeqNum: &seqNum}) assert.Nil(t, err, "Should be able to handle acs task") task.PostUnmarshalTask(nil) assert.Equal(t, expectedTask.Containers, task.Containers, "Containers should be equal") assert.Equal(t, expectedTask.Volumes, task.Volumes, "Volumes should be equal") }
explode_data.jsonl/80468
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 739 }
[ 2830, 3393, 4133, 1806, 27121, 13164, 70914, 26901, 1155, 353, 8840, 836, 8, 341, 197, 322, 26768, 943, 48722, 11, 12422, 71, 13, 2411, 3245, 279, 943, 14409, 5086, 198, 197, 322, 3171, 944, 1401, 419, 45846, 624, 49115, 3830, 32, 483...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParse_VoidPaymentNotification(t *testing.T) { result := MustParseFile("testdata/void_payment_notification.xml") if n, ok := result.(*webhooks.PaymentNotification); !ok { t.Fatalf("unexpected type: %T, result", n) } else if diff := cmp.Diff(n, &webhooks.PaymentNotification{ Type: webhooks.VoidPayment, Account: webhooks.Account{ XMLName: xml.Name{Local: "account"}, Code: "1", Username: "verena", Email: "verena@example.com", FirstName: "Verena", LastName: "Example", CompanyName: "Company, Inc.", }, Transaction: webhooks.Transaction{ XMLName: xml.Name{Local: "transaction"}, UUID: "a5143c1d3a6f4a8287d0e2cc1d4c0427", InvoiceNumber: 2059, SubscriptionUUID: "1974a098jhlkjasdfljkha898326881c", Action: "purchase", AmountInCents: 1000, Status: "void", Message: "Test Gateway: Successful test transaction", Reference: "reference", Source: "subscription", Test: recurly.NewBool(true), Voidable: recurly.NewBool(true), Refundable: recurly.NewBool(true), }, }); diff != "" { t.Fatal(diff) } }
explode_data.jsonl/76116
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 591 }
[ 2830, 3393, 14463, 2334, 588, 20188, 11196, 1155, 353, 8840, 836, 8, 341, 9559, 1669, 15465, 14463, 1703, 445, 92425, 14, 1004, 26696, 34296, 9028, 1138, 743, 308, 11, 5394, 1669, 1102, 41399, 2911, 38560, 70123, 11196, 1215, 753, 562, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCase5(t *testing.T) { golden := `v=0 o=- 1001 1 IN IP4 192.168.0.221 s=VCP IPC Realtime stream m=video 0 RTP/AVP 105 c=IN IP4 192.168.0.221 a=control:rtsp://192.168.0.221/media/video1/video a=rtpmap:105 H264/90000 a=fmtp:105 profile-level-id=64002a; packetization-mode=1; sprop-parameter-sets=Z2QAKq2EAQwgCGEAQwgCGEAQwgCEO1A8ARPyzcBAQFAAAD6AAAnECEA=,aO4xshs= a=recvonly m=application 0 RTP/AVP 107 c=IN IP4 192.168.0.221 a=control:rtsp://192.168.0.221/media/video1/metadata a=rtpmap:107 vnd.onvif.metadata/90000 a=fmtp:107 DecoderTag=h3c-v3 RTCP=0 a=recvonly` golden = strings.ReplaceAll(golden, "\n", "\r\n") ctx, err := ParseSdp2LogicContext([]byte(golden)) assert.Equal(t, nil, err) assert.Equal(t, false, ctx.hasAudio) assert.Equal(t, true, ctx.hasVideo) assert.Equal(t, 90000, ctx.VideoClockRate) assert.Equal(t, true, ctx.IsVideoPayloadTypeOrigin(105)) assert.Equal(t, base.AvPacketPtAvc, ctx.GetVideoPayloadTypeBase()) assert.Equal(t, "rtsp://192.168.0.221/media/video1/video", ctx.videoAControl) assert.Equal(t, nil, ctx.Vps) assert.IsNotNil(t, ctx.Sps) assert.IsNotNil(t, ctx.Pps) nazalog.Debugf("%+v", ctx) }
explode_data.jsonl/55601
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 557 }
[ 2830, 30573, 20, 1155, 353, 8840, 836, 8, 341, 3174, 813, 268, 1669, 1565, 85, 28, 15, 198, 78, 10829, 220, 16, 15, 15, 16, 220, 16, 1964, 6790, 19, 220, 16, 24, 17, 13, 16, 21, 23, 13, 15, 13, 17, 17, 16, 198, 82, 28, 53,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSketchSeriesMarshalSplitCompressItemTooBigIsDropped(t *testing.T) { oldSetting := config.Datadog.Get("serializer_max_uncompressed_payload_size") defer config.Datadog.Set("serializer_max_uncompressed_payload_size", oldSetting) config.Datadog.Set("serializer_max_uncompressed_payload_size", 100) sl := make(SketchSeriesList, 2) // A big item (to be dropped) sl[0] = Makeseries(0) // A small item (no dropped) sl[1] = SketchSeries{ Name: "small", Tags: []string{}, Host: "", Interval: 0, } payloads, err := sl.MarshalSplitCompress(marshaler.DefaultBufferContext()) assert.Nil(t, err) reader := bytes.NewReader(*payloads[0]) r, _ := zlib.NewReader(reader) decompressed, _ := ioutil.ReadAll(r) r.Close() pl := new(gogen.SketchPayload) if err := pl.Unmarshal(decompressed); err != nil { t.Fatal(err) } // Should only have 1 sketch because the the larger one was dropped. require.Len(t, pl.Sketches, 1) }
explode_data.jsonl/34962
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 378 }
[ 2830, 3393, 75288, 25544, 55438, 20193, 1092, 1873, 1234, 31246, 15636, 3872, 35, 41716, 1155, 353, 8840, 836, 8, 1476, 61828, 15400, 1669, 2193, 909, 266, 329, 538, 2234, 445, 52718, 6345, 4907, 45703, 32813, 2368, 1138, 16867, 2193, 909...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTeamsService_IsTeamRepoByID_true(t *testing.T) { client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/organizations/1/team/1/repos/owner/repo", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") wantAcceptHeaders := []string{mediaTypeOrgPermissionRepo} testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) fmt.Fprint(w, `{"id":1}`) }) ctx := context.Background() repo, _, err := client.Teams.IsTeamRepoByID(ctx, 1, 1, "owner", "repo") if err != nil { t.Errorf("Teams.IsTeamRepoByID returned error: %v", err) } want := &Repository{ID: Int64(1)} if !cmp.Equal(repo, want) { t.Errorf("Teams.IsTeamRepoByID returned %+v, want %+v", repo, want) } const methodName = "IsTeamRepoByID" testBadOptions(t, methodName, func() (err error) { _, _, err = client.Teams.IsTeamRepoByID(ctx, -1, -1, "\n", "\n") return err }) testNewRequestAndDoFailure(t, methodName, client, func() (*Response, error) { got, resp, err := client.Teams.IsTeamRepoByID(ctx, 1, 1, "owner", "repo") if got != nil { t.Errorf("testNewRequestAndDoFailure %v = %#v, want nil", methodName, got) } return resp, err }) }
explode_data.jsonl/4526
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 494 }
[ 2830, 3393, 60669, 1860, 31879, 14597, 25243, 60572, 16082, 1155, 353, 8840, 836, 8, 341, 25291, 11, 59807, 11, 8358, 49304, 1669, 6505, 741, 16867, 49304, 2822, 2109, 2200, 63623, 4283, 69253, 14, 16, 78015, 14, 16, 49505, 14, 8118, 10...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestScaleBounds(t *testing.T) { cases := []struct { name string min string max string reachability ReachabilityType wantMin int32 wantMax int32 }{{ name: "present", min: "1", max: "100", wantMin: 1, wantMax: 100, }, { name: "absent", wantMin: 0, wantMax: 0, }, { name: "only min", min: "1", wantMin: 1, wantMax: 0, }, { name: "only max", max: "1", wantMin: 0, wantMax: 1, }, { name: "reachable", min: "1", max: "100", reachability: ReachabilityReachable, wantMin: 1, wantMax: 100, }, { name: "unreachable", min: "1", max: "100", reachability: ReachabilityUnreachable, wantMin: 0, wantMax: 100, }, { name: "malformed", min: "ham", max: "sandwich", wantMin: 0, wantMax: 0, }} for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { pa := pa(map[string]string{}) if tc.min != "" { pa.Annotations[autoscaling.MinScaleAnnotationKey] = tc.min } if tc.max != "" { pa.Annotations[autoscaling.MaxScaleAnnotationKey] = tc.max } pa.Spec.Reachability = tc.reachability min, max := pa.ScaleBounds() if min != tc.wantMin { t.Errorf("got min: %v wanted: %v", min, tc.wantMin) } if max != tc.wantMax { t.Errorf("got max: %v wanted: %v", max, tc.wantMax) } }) } }
explode_data.jsonl/27228
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 766 }
[ 2830, 3393, 6947, 11394, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 260, 914, 198, 197, 25320, 688, 914, 198, 197, 22543, 688, 914, 198, 197, 17200, 610, 2897, 49842, 2897, 929, 198, 197, 50780, 6217,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestApkMetadata_pURL(t *testing.T) { tests := []struct { metadata ApkMetadata expected string }{ { metadata: ApkMetadata{ Package: "p", Version: "v", Architecture: "a", }, expected: "pkg:alpine/p@v?arch=a", }, // verify #351 { metadata: ApkMetadata{ Package: "g++", Version: "v84", Architecture: "am86", }, expected: "pkg:alpine/g++@v84?arch=am86", }, { metadata: ApkMetadata{ Package: "g plus plus", Version: "v84", Architecture: "am86", }, expected: "pkg:alpine/g%20plus%20plus@v84?arch=am86", }, } for _, test := range tests { t.Run(test.expected, func(t *testing.T) { actual := test.metadata.PackageURL() if actual != test.expected { dmp := diffmatchpatch.New() diffs := dmp.DiffMain(test.expected, actual, true) t.Errorf("diff: %s", dmp.DiffPrettyText(diffs)) } // verify packageurl can parse purl, err := packageurl.FromString(actual) if err != nil { t.Errorf("cannot re-parse purl: %s", actual) } if purl.Name != test.metadata.Package { dmp := diffmatchpatch.New() diffs := dmp.DiffMain(test.metadata.Package, purl.Name, true) t.Errorf("invalid purl name: %s", dmp.DiffPrettyText(diffs)) } if purl.Version != test.metadata.Version { dmp := diffmatchpatch.New() diffs := dmp.DiffMain(test.metadata.Version, purl.Version, true) t.Errorf("invalid purl version: %s", dmp.DiffPrettyText(diffs)) } if purl.Qualifiers.Map()["arch"] != test.metadata.Architecture { dmp := diffmatchpatch.New() diffs := dmp.DiffMain(test.metadata.Architecture, purl.Qualifiers.Map()["arch"], true) t.Errorf("invalid purl architecture: %s", dmp.DiffPrettyText(diffs)) } }) } }
explode_data.jsonl/41179
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 809 }
[ 2830, 3393, 10611, 74, 14610, 620, 3144, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 2109, 7603, 5232, 74, 14610, 198, 197, 42400, 914, 198, 197, 59403, 197, 197, 515, 298, 2109, 7603, 25, 5232, 74, 14610, 515, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestDoHashQueuesHash(t *testing.T) { state := makeState() index := state.doHash("foo", 0) testutil.AssertStringEqual(t, "", state.getHash(index), "Hash") testutil.AssertIntsEqual(t, 1, state.getPendingHashCt(), "Pending Count") }
explode_data.jsonl/67658
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 91 }
[ 2830, 3393, 5404, 6370, 25776, 1137, 6370, 1155, 353, 8840, 836, 8, 341, 24291, 1669, 1281, 1397, 741, 26327, 1669, 1584, 16521, 6370, 445, 7975, 497, 220, 15, 340, 18185, 1314, 11711, 703, 2993, 1155, 11, 7342, 1584, 670, 6370, 7195, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPiecewiseCubic(t *testing.T) { t.Parallel() const ( h = 1e-8 valueTol = 1e-13 derivTol = 1e-6 nPts = 100 ) for i, test := range []struct { xs []float64 f func(float64) float64 df func(float64) float64 }{ { xs: []float64{-1.001, 0.2, 2}, f: func(x float64) float64 { return x * x }, df: func(x float64) float64 { return 2 * x }, }, { xs: []float64{-1.2, -1.001, 0, 0.2, 2.01, 2.1}, f: func(x float64) float64 { return 4*math.Pow(x, 3) - 2*x*x + 10*x - 7 }, df: func(x float64) float64 { return 12*x*x - 4*x + 10 }, }, { xs: []float64{-1.001, 0.2, 10}, f: func(x float64) float64 { return 1.5*x - 1 }, df: func(x float64) float64 { return 1.5 }, }, { xs: []float64{-1.001, 0.2, 10}, f: func(x float64) float64 { return -1 }, df: func(x float64) float64 { return 0 }, }, } { ys := applyFunc(test.xs, test.f) dydxs := applyFunc(test.xs, test.df) var pc PiecewiseCubic pc.FitWithDerivatives(test.xs, ys, dydxs) n := len(test.xs) m := n - 1 x0 := test.xs[0] x1 := test.xs[m] x := x0 - 0.1 got := pc.Predict(x) want := ys[0] if got != want { t.Errorf("Mismatch in value extrapolated to the left for test case %d: got %v, want %g", i, got, want) } got = pc.PredictDerivative(x) want = dydxs[0] if got != want { t.Errorf("Mismatch in derivative extrapolated to the left for test case %d: got %v, want %g", i, got, want) } x = x1 + 0.1 got = pc.Predict(x) want = ys[m] if got != want { t.Errorf("Mismatch in value extrapolated to the right for test case %d: got %v, want %g", i, got, want) } got = pc.PredictDerivative(x) want = dydxs[m] if got != want { t.Errorf("Mismatch in derivative extrapolated to the right for test case %d: got %v, want %g", i, got, want) } for j := 0; j < n; j++ { x := test.xs[j] got := pc.Predict(x) want := test.f(x) if math.Abs(got-want) > valueTol { t.Errorf("Mismatch in interpolated value at x == %g for test case %d: got %v, want %g", x, i, got, want) } if j < m { got = pc.coeffs.At(j, 0) if math.Abs(got-want) > valueTol { t.Errorf("Mismatch in 0-th order interpolation coefficient in %d-th node for test case %d: got %v, want %g", j, i, got, want) } dx := (test.xs[j+1] - x) / nPts for k := 1; k < nPts; k++ { xk := x + float64(k)*dx got := pc.Predict(xk) want := test.f(xk) if math.Abs(got-want) > valueTol { t.Errorf("Mismatch in interpolated value at x == %g for test case %d: got %v, want %g", x, i, got, want) } got = pc.PredictDerivative(xk) want = discrDerivPredict(&pc, x0, x1, xk, h) if math.Abs(got-want) > derivTol { t.Errorf("Mismatch in interpolated derivative at x == %g for test case %d: got %v, want %g", x, i, got, want) } } } else { got = pc.lastY if math.Abs(got-want) > valueTol { t.Errorf("Mismatch in lastY for test case %d: got %v, want %g", i, got, want) } } if j > 0 { dx := test.xs[j] - test.xs[j-1] got = ((pc.coeffs.At(j-1, 3)*dx+pc.coeffs.At(j-1, 2))*dx+pc.coeffs.At(j-1, 1))*dx + pc.coeffs.At(j-1, 0) if math.Abs(got-want) > valueTol { t.Errorf("Interpolation coefficients in %d-th node produce mismatch in interpolated value at %g for test case %d: got %v, want %g", j-1, x, i, got, want) } } got = discrDerivPredict(&pc, x0, x1, x, h) want = test.df(x) if math.Abs(got-want) > derivTol { t.Errorf("Mismatch in numerical derivative of interpolated function at x == %g for test case %d: got %v, want %g", x, i, got, want) } got = pc.PredictDerivative(x) if math.Abs(got-want) > valueTol { t.Errorf("Mismatch in interpolated derivative value at x == %g for test case %d: got %v, want %g", x, i, got, want) } } } }
explode_data.jsonl/44079
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1867 }
[ 2830, 3393, 31209, 4482, 34, 41181, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 4777, 2399, 197, 9598, 286, 284, 220, 16, 68, 12, 23, 198, 197, 16309, 51, 337, 284, 220, 16, 68, 12, 16, 18, 198, 197, 197, 82495, 51, 33...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddFeedShouldShowErrorMessageWhenFeedIsMissingName(t *testing.T) { expected := command.MissingFeedInformation conf := config.Config{"redis", test.ValidRedisConnection} test.InitializeRedis(conf) actual := Add(conf, common.Feed{"", "https://www.google.com"}) test.AssertFailure(t, expected, actual) }
explode_data.jsonl/75097
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 98 }
[ 2830, 3393, 2212, 28916, 14996, 7812, 21349, 4498, 28916, 3872, 25080, 675, 1155, 353, 8840, 836, 8, 341, 42400, 1669, 3210, 1321, 13577, 28916, 14873, 198, 67850, 1669, 2193, 10753, 4913, 21748, 497, 1273, 47156, 48137, 4526, 532, 18185, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSendAlertFailureToSendSqsMessage(t *testing.T) { sqsMock := &mockSqs{} sqsClient = sqsMock mockRoundTripper := &mockRoundTripper{} httpClient = &http.Client{Transport: mockRoundTripper} mockRoundTripper.On("RoundTrip", mock.Anything).Return(generateResponse(testRuleResponse, http.StatusOK), nil).Once() sqsMock.On("SendMessage", mock.Anything).Return(&sqs.SendMessageOutput{}, errors.New("error")) assert.Error(t, SendAlert(testAlertDedupEvent)) sqsMock.AssertExpectations(t) mockRoundTripper.AssertExpectations(t) }
explode_data.jsonl/39039
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 196 }
[ 2830, 3393, 11505, 9676, 17507, 80576, 50, 26358, 2052, 1155, 353, 8840, 836, 8, 341, 1903, 26358, 11571, 1669, 609, 16712, 50, 26358, 16094, 1903, 26358, 2959, 284, 18031, 82, 11571, 271, 77333, 27497, 21884, 6922, 1669, 609, 16712, 2749...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsHexAddress(t *testing.T) { tests := []struct { str string exp bool }{ {"0x5aaeb6053f3e94c9b9a09f33669435e7ef1beaed", true}, {"5aaeb6053f3e94c9b9a09f33669435e7ef1beaed", true}, {"0X5aaeb6053f3e94c9b9a09f33669435e7ef1beaed", true}, {"0XAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", true}, {"0xAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA", true}, {"0x5aaeb6053f3e94c9b9a09f33669435e7ef1beaed1", false}, {"0x5aaeb6053f3e94c9b9a09f33669435e7ef1beae", false}, {"5aaeb6053f3e94c9b9a09f33669435e7ef1beaed11", false}, {"0xxaaeb6053f3e94c9b9a09f33669435e7ef1beaed", false}, } for _, test := range tests { if result := IsHexAddress(test.str); result != test.exp { t.Errorf("IsHexAddress(%s) == %v; expected %v", test.str, result, test.exp) } } }
explode_data.jsonl/35494
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 387 }
[ 2830, 3393, 3872, 20335, 4286, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11355, 914, 198, 197, 48558, 1807, 198, 197, 59403, 197, 197, 4913, 15, 87, 20, 5305, 3065, 21, 15, 20, 18, 69, 18, 68, 24, 19, 66, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCollectorWithIngesterNoOptionsStorageType(t *testing.T) { jaeger := &v1.Jaeger{ ObjectMeta: metav1.ObjectMeta{ Name: "my-instance", }, Spec: v1.JaegerSpec{ Strategy: v1.DeploymentStrategyStreaming, Storage: v1.JaegerStorageSpec{ Type: "elasticsearch", Options: v1.NewOptions(map[string]interface{}{ "kafka.brokers": "http://brokers", "es.server-urls": "http://somewhere", }), }, }, } collector := NewCollector(jaeger) dep := collector.Get() envvars := []corev1.EnvVar{ { Name: "SPAN_STORAGE_TYPE", Value: "kafka", }, { Name: "COLLECTOR_ZIPKIN_HOST_PORT", Value: ":9411", }, } assert.Equal(t, envvars, dep.Spec.Template.Spec.Containers[0].Env) assert.Len(t, dep.Spec.Template.Spec.Containers[0].Args, 2) assert.Equal(t, "--kafka.brokers=http://brokers", dep.Spec.Template.Spec.Containers[0].Args[0]) }
explode_data.jsonl/59532
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 411 }
[ 2830, 3393, 53694, 2354, 25416, 5191, 2753, 3798, 5793, 929, 1155, 353, 8840, 836, 8, 341, 197, 5580, 1878, 1669, 609, 85, 16, 3503, 64, 1878, 515, 197, 23816, 12175, 25, 77520, 16, 80222, 515, 298, 21297, 25, 330, 2408, 73655, 756, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPasswordStoreWithEnvvar(t *testing.T) { myurl, err := url.Parse("https://docker.io") require.NoError(t, err) ps := passwordStore{} creds := base64.StdEncoding.EncodeToString([]byte("me:mypassword")) os.Setenv("NOTARY_AUTH", creds) username, passwd := ps.Basic(myurl) require.Equal(t, "me", username) require.Equal(t, "mypassword", passwd) creds = base64.StdEncoding.EncodeToString([]byte(":mypassword")) os.Setenv("NOTARY_AUTH", creds) username, passwd = ps.Basic(myurl) require.Equal(t, "", username) require.Equal(t, "", passwd) os.Setenv("NOTARY_AUTH", "not-base64-encoded") username, passwd = ps.Basic(myurl) require.Equal(t, "", username) require.Equal(t, "", passwd) }
explode_data.jsonl/77499
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 288 }
[ 2830, 3393, 4876, 6093, 2354, 14359, 947, 1155, 353, 8840, 836, 8, 341, 13624, 1085, 11, 1848, 1669, 2515, 8937, 445, 2428, 1110, 28648, 4245, 1138, 17957, 35699, 1155, 11, 1848, 692, 35009, 1669, 3552, 6093, 31483, 197, 85734, 1669, 23...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEngineProcessor_Submit(t *testing.T) { sendCommandPoolMock := new(task.MockedPool) ctx := context.NewMockDefault() executerMock := executermocks.NewMockExecuter() creator := func(ctx context.T) executer.Executer { return executerMock } sendCommandPoolMock.On("Submit", ctx.Log(), "messageID", mock.Anything).Return(nil) sendCommandPoolMock.On("BufferTokensIssued").Return(0) docMock := new(DocumentMgrMock) processor := EngineProcessor{ executerCreator: creator, sendCommandPool: sendCommandPoolMock, context: ctx, documentMgr: docMock, startWorker: NewWorkerProcessorSpec(ctx, 1, contracts.StartSession, 0), } docState := contracts.DocumentState{} docState.DocumentInformation.MessageID = "messageID" docState.DocumentType = contracts.StartSession docMock.On("PersistDocumentState", mock.Anything, appconfig.DefaultLocationOfPending, docState) errorCode := processor.Submit(docState) assert.Equal(t, errorCode, ErrorCode("")) sendCommandPoolMock.AssertExpectations(t) }
explode_data.jsonl/524
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 356 }
[ 2830, 3393, 4571, 22946, 36359, 1763, 1155, 353, 8840, 836, 8, 341, 32817, 4062, 10551, 11571, 1669, 501, 17483, 24664, 291, 10551, 340, 20985, 1669, 2266, 7121, 11571, 3675, 741, 67328, 27951, 11571, 1669, 23494, 4195, 25183, 7121, 11571, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_noCancelCtx_Done(t *testing.T) { type fields struct { parent context.Context } tests := []struct { name string fields fields want <-chan struct{} }{ { name: "pass", fields: fields{ parent: context.Background(), }, want: nil, }, { name: "pass with context canceled", fields: fields{ parent: canceledContext(), }, want: nil, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { ctx := ContextWithoutCancel(tt.fields.parent) assert.Equalf(t, tt.want, ctx.Done(), "Done()") }) } }
explode_data.jsonl/19436
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 252 }
[ 2830, 3393, 6536, 9269, 23684, 1557, 603, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 24804, 2266, 9328, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 256, 914, 198, 197, 55276, 5043, 198, 197, 50780, 256, 91...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSuccess(t *testing.T) { data := []string{"test one", "test two", "test three", "test four"} response := Success(data) if response.Code != 200 { t.Error("Expected status code to be 200") } if response.Status != StatusSuccess { t.Error("Expected the status to be success") } if response.Message != "" { t.Error("Expected the message to be empty") } if reflect.DeepEqual(response.Data, data) != true { t.Error("Expeted the data to contain our data") } }
explode_data.jsonl/71746
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 163 }
[ 2830, 3393, 7188, 1155, 353, 8840, 836, 8, 1476, 8924, 1669, 3056, 917, 4913, 1944, 825, 497, 330, 1944, 1378, 497, 330, 1944, 2326, 497, 330, 1944, 3040, 63159, 21735, 1669, 13047, 2592, 692, 743, 2033, 20274, 961, 220, 17, 15, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGetDaemonEndpointsFromStringInvalid6(t *testing.T) { udpAddr := "127.0.0.2:2001" dAddr := "udp:" + udpAddr // no tcp address present dEndpt, err := GetDaemonEndpointsFromString(dAddr) assert.NotNil(t, err) assert.True(t, strings.Contains(fmt.Sprint(err), addrErr)) assert.Nil(t, dEndpt) }
explode_data.jsonl/49941
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 129 }
[ 2830, 3393, 1949, 89177, 80786, 44491, 7928, 21, 1155, 353, 8840, 836, 8, 341, 197, 31101, 13986, 1669, 330, 16, 17, 22, 13, 15, 13, 15, 13, 17, 25, 17, 15, 15, 16, 698, 2698, 13986, 1669, 330, 31101, 2974, 488, 49219, 13986, 442,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMsgCommands(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() Client := th.Client team := th.BasicTeam user1 := th.BasicUser user2 := th.BasicUser2 user3 := th.CreateUser() th.LinkUserToTeam(user3, team) Client.Must(Client.CreateDirectChannel(th.BasicUser.Id, user2.Id)) Client.Must(Client.CreateDirectChannel(th.BasicUser.Id, user3.Id)) rs1 := Client.Must(Client.ExecuteCommand(th.BasicChannel.Id, "/msg "+user2.Username)).(*model.CommandResponse) require.Condition(t, func() bool { return strings.HasSuffix(rs1.GotoLocation, "/"+team.Name+"/channels/"+user1.Id+"__"+user2.Id) || strings.HasSuffix(rs1.GotoLocation, "/"+team.Name+"/channels/"+user2.Id+"__"+user1.Id) }, "failed to create direct channel") rs2 := Client.Must(Client.ExecuteCommand(th.BasicChannel.Id, "/msg "+user3.Username+" foobar")).(*model.CommandResponse) require.Condition(t, func() bool { return strings.HasSuffix(rs2.GotoLocation, "/"+team.Name+"/channels/"+user1.Id+"__"+user3.Id) || strings.HasSuffix(rs2.GotoLocation, "/"+team.Name+"/channels/"+user3.Id+"__"+user1.Id) }, "failed to create second direct channel") result := Client.Must(Client.SearchPosts(th.BasicTeam.Id, "foobar", false)).(*model.PostList) require.NotEqual(t, 0, len(result.Order), "post did not get sent to direct message") rs3 := Client.Must(Client.ExecuteCommand(th.BasicChannel.Id, "/msg "+user2.Username)).(*model.CommandResponse) require.Condition(t, func() bool { return strings.HasSuffix(rs3.GotoLocation, "/"+team.Name+"/channels/"+user1.Id+"__"+user2.Id) || strings.HasSuffix(rs3.GotoLocation, "/"+team.Name+"/channels/"+user2.Id+"__"+user1.Id) }, "failed to go back to existing direct channel") Client.Must(Client.ExecuteCommand(th.BasicChannel.Id, "/msg "+th.BasicUser.Username+" foobar")) Client.Must(Client.ExecuteCommand(th.BasicChannel.Id, "/msg junk foobar")) }
explode_data.jsonl/26345
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 720 }
[ 2830, 3393, 6611, 30479, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 2822, 71724, 1669, 270, 11716, 198, 197, 9196, 1669, 270, 48868, 14597, 198, 19060, 16, 1669, 270, 48868...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCTEWithDML(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test;") tk.MustExec("drop table if exists t1;") tk.MustExec("create table t1(a int);") tk.MustExec("insert into t1 values(2),(3);") tk.MustQuery("with t1 as (select 36 as col from t1 where a=3) select * from t1;").Check(testkit.Rows("36")) tk.MustExec("insert into t1 with t1 as (select 36 as col from t1) select * from t1;") tk.MustQuery("select * from t1").Check(testkit.Rows("2", "3", "36", "36")) tk.MustExec("with cte1(a) as (select 36) update t1 set a = 1 where a in (select a from cte1);") tk.MustQuery("select * from t1").Check(testkit.Rows("2", "3", "1", "1")) tk.MustExec("with recursive cte(a) as (select 1 union select a + 1 from cte where a < 10) update cte, t1 set t1.a=1") tk.MustQuery("select * from t1").Check(testkit.Rows("1", "1", "1", "1")) tk.MustGetErrCode("with recursive cte(a) as (select 1 union select a + 1 from cte where a < 10) update cte set a=1", mysql.ErrNonUpdatableTable) tk.MustGetErrCode("with recursive cte(a) as (select 1 union select a + 1 from cte where a < 10) delete from cte", mysql.ErrNonUpdatableTable) tk.MustGetErrCode("with cte(a) as (select a from t1) delete from cte", mysql.ErrNonUpdatableTable) tk.MustGetErrCode("with cte(a) as (select a from t1) update cte set a=1", mysql.ErrNonUpdatableTable) tk.MustExec("drop table if exists t1;") tk.MustExec("create table t1(a int, b int, primary key(a));") tk.MustExec("insert into t1 values (1, 1),(2,1),(3,1);") tk.MustExec("replace into t1 with recursive cte(a,b) as (select 1, 1 union select a + 1,b+1 from cte where a < 5) select * from cte;") tk.MustQuery("select * from t1").Check(testkit.Rows("1 1", "2 2", "3 3", "4 4", "5 5")) }
explode_data.jsonl/65526
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 705 }
[ 2830, 3393, 1162, 36, 2354, 35, 2668, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 340, 3244, 74, 50463...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMetric(t *testing.T) { var testcases = []struct { name string resources []common.Resource metricType string expectErr error expectMetric common.Metric }{ { name: "ranch has no resource", resources: []common.Resource{}, metricType: "t", expectErr: &ResourceNotFound{"t"}, }, { name: "no matching resource", resources: []common.Resource{ { Name: "res", Type: "t", State: "s", Owner: "merlin", }, }, metricType: "foo", expectErr: &ResourceNotFound{"foo"}, }, { name: "one resource", resources: []common.Resource{ { Name: "res", Type: "t", State: "s", Owner: "merlin", }, }, metricType: "t", expectMetric: common.Metric{ Type: "t", Current: map[string]int{ "s": 1, }, Owners: map[string]int{ "merlin": 1, }, }, }, { name: "multiple resources", resources: []common.Resource{ { Name: "res-1", Type: "t", State: "s", Owner: "merlin", }, { Name: "res-2", Type: "t", State: "p", Owner: "pony", }, { Name: "res-2", Type: "t", State: "s", Owner: "pony", }, { Name: "res-3", Type: "foo", State: "s", Owner: "pony", }, { Name: "res-4", Type: "t", State: "d", Owner: "merlin", }, }, metricType: "t", expectMetric: common.Metric{ Type: "t", Current: map[string]int{ "s": 2, "d": 1, "p": 1, }, Owners: map[string]int{ "merlin": 2, "pony": 2, }, }, }, } for _, tc := range testcases { c := MakeTestRanch(tc.resources) metric, err := c.Metric(tc.metricType) if !AreErrorsEqual(err, tc.expectErr) { t.Errorf("%s - Got error %v, expect error %v", tc.name, err, tc.expectErr) continue } if err == nil { if !reflect.DeepEqual(metric, tc.expectMetric) { t.Errorf("%s - wrong metric, got %v, want %v", tc.name, metric, tc.expectMetric) } } } }
explode_data.jsonl/12950
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1134 }
[ 2830, 3393, 54310, 1155, 353, 8840, 836, 8, 341, 2405, 1273, 23910, 284, 3056, 1235, 341, 197, 11609, 260, 914, 198, 197, 10202, 2360, 262, 3056, 5464, 20766, 198, 197, 2109, 16340, 929, 256, 914, 198, 197, 24952, 7747, 262, 1465, 198...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestParse_NewAccountNotification(t *testing.T) { result := MustParseFile("testdata/new_account_notification.xml") if n, ok := result.(*webhooks.AccountNotification); !ok { t.Fatalf("unexpected type: %T, result", n) } else if diff := cmp.Diff(n, &webhooks.AccountNotification{ Type: webhooks.NewAccount, Account: webhooks.Account{ XMLName: xml.Name{Local: "account"}, Code: "1", Email: "verena@example.com", FirstName: "Verena", LastName: "Example", }, }); diff != "" { t.Fatal(diff) } }
explode_data.jsonl/76096
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 14463, 39582, 7365, 11196, 1155, 353, 8840, 836, 8, 341, 9559, 1669, 15465, 14463, 1703, 445, 92425, 25376, 13500, 34296, 9028, 1138, 743, 308, 11, 5394, 1669, 1102, 41399, 2911, 38560, 30877, 11196, 1215, 753, 562, 341, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNewIncludedWorkflowFromFile_UsesResourcesFromParent(t *testing.T) { parent := New() parent.workflowDir = "./test_data" included, err := parent.NewIncludedWorkflowFromFile("TestNewIncludedWorkflowFromFile_UsesResourcesFromParent.wf.json") if err != nil { t.Fatal(err) } assertEqual(t, parent.Cancel, included.Cancel, "Cancel") assertEqual(t, parent, included.parent, "parent") assertEqual(t, parent.disks, included.disks, "disks") assertEqual(t, parent.forwardingRules, included.forwardingRules, "forwardingRules") assertEqual(t, parent.images, included.images, "images") assertEqual(t, parent.machineImages, included.machineImages, "machineImages") assertEqual(t, parent.instances, included.instances, "instances") assertEqual(t, parent.networks, included.networks, "networks") assertEqual(t, parent.subnetworks, included.subnetworks, "subnetworks") assertEqual(t, parent.targetInstances, included.targetInstances, "targetInstances") assertEqual(t, parent.snapshots, included.snapshots, "snapshots") assertEqual(t, parent.objects, included.objects, "objects") }
explode_data.jsonl/3870
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 364 }
[ 2830, 3393, 3564, 84610, 62768, 43633, 62, 68965, 11277, 3830, 8387, 1155, 353, 8840, 836, 8, 341, 24804, 1669, 1532, 741, 24804, 72774, 6184, 284, 5924, 1944, 1769, 698, 17430, 10181, 11, 1848, 1669, 2681, 7121, 84610, 62768, 43633, 445,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFloat32(t *testing.T) { testCases := []struct { name string broker CeleryBroker backend CeleryBackend taskName string taskFunc interface{} inA float32 inB float32 expected float32 }{ { name: "float32 addition with redis broker/backend", broker: redisBroker, backend: redisBackend, taskName: uuid.Must(uuid.NewV4()).String(), taskFunc: addFloat32, inA: 3.4580, inB: 5.3688, expected: float32(8.8268), }, { name: "float32 addition with redis broker/backend with connection", broker: redisBrokerWithConn, backend: redisBackendWithConn, taskName: uuid.Must(uuid.NewV4()).String(), taskFunc: addFloat32, inA: 3.4580, inB: 5.3688, expected: float32(8.8268), }, { name: "float32 addition with amqp broker/backend", broker: amqpBroker, backend: amqpBackend, taskName: uuid.Must(uuid.NewV4()).String(), taskFunc: addFloat32, inA: 3.4580, inB: 5.3688, expected: 8.8268, }, } for _, tc := range testCases { cli, _ := NewCeleryClient(tc.broker, tc.backend, 1) cli.Register(tc.taskName, tc.taskFunc) cli.StartWorker() asyncResult, err := cli.Delay(tc.taskName, tc.inA, tc.inB) if err != nil { t.Errorf("test '%s': failed to get result for task %s: %+v", tc.name, tc.taskName, err) cli.StopWorker() continue } res, err := asyncResult.Get(TIMEOUT) if err != nil { t.Errorf("test '%s': failed to get result for task %s: %+v", tc.name, tc.taskName, err) cli.StopWorker() continue } if tc.expected != float32(res.(float64)) { t.Errorf("test '%s': returned result %+v is different from expected result %+v", tc.name, res, tc.expected) } cli.StopWorker() } }
explode_data.jsonl/77852
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 837 }
[ 2830, 3393, 5442, 18, 17, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 2233, 45985, 256, 46543, 722, 65545, 198, 197, 197, 20942, 220, 46543, 722, 29699, 198, 197, 49115, 675, 914,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestHistogramBucketSlice_Resize(t *testing.T) { es := generateTestHistogramBucketSlice() emptyVal := NewHistogramBucket() emptyVal.InitEmpty() // Test Resize less elements. const resizeSmallLen = 4 expectedEs := make(map[*otlpmetrics.HistogramDataPoint_Bucket]bool, resizeSmallLen) for i := 0; i < resizeSmallLen; i++ { expectedEs[*(es.At(i).orig)] = true } assert.EqualValues(t, resizeSmallLen, len(expectedEs)) es.Resize(resizeSmallLen) assert.EqualValues(t, resizeSmallLen, es.Len()) foundEs := make(map[*otlpmetrics.HistogramDataPoint_Bucket]bool, resizeSmallLen) for i := 0; i < es.Len(); i++ { foundEs[*(es.At(i).orig)] = true } assert.EqualValues(t, expectedEs, foundEs) // Test Resize more elements. const resizeLargeLen = 7 oldLen := es.Len() expectedEs = make(map[*otlpmetrics.HistogramDataPoint_Bucket]bool, oldLen) for i := 0; i < oldLen; i++ { expectedEs[*(es.At(i).orig)] = true } assert.EqualValues(t, oldLen, len(expectedEs)) es.Resize(resizeLargeLen) assert.EqualValues(t, resizeLargeLen, es.Len()) foundEs = make(map[*otlpmetrics.HistogramDataPoint_Bucket]bool, oldLen) for i := 0; i < oldLen; i++ { foundEs[*(es.At(i).orig)] = true } assert.EqualValues(t, expectedEs, foundEs) for i := oldLen; i < resizeLargeLen; i++ { assert.EqualValues(t, emptyVal, es.At(i)) } // Test Resize 0 elements. es.Resize(0) assert.EqualValues(t, NewHistogramBucketSlice(), es) }
explode_data.jsonl/19559
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 563 }
[ 2830, 3393, 77210, 36018, 33236, 62, 30561, 1155, 353, 8840, 836, 8, 341, 78966, 1669, 6923, 2271, 77210, 36018, 33236, 741, 197, 3194, 2208, 1669, 1532, 77210, 36018, 741, 197, 3194, 2208, 26849, 3522, 741, 197, 322, 3393, 63343, 2686, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestNotificationSuccess(t *testing.T) { toolchainConfig := commonconfig.NewToolchainConfigObjWithReset(t, testconfig.Notifications().DurationBeforeNotificationDeletion("10s")) // given t.Run("will not do anything and return requeue with shorter duration that 10s", func(t *testing.T) { // given ds, _ := mockDeliveryService(defaultTemplateLoader()) controller, cl := newController(t, ds, toolchainConfig) notification, err := NewNotificationBuilder(cl, test.HostOperatorNs).Create("jane@acme.com") require.NoError(t, err) notification.Status.Conditions = []toolchainv1alpha1.Condition{sentCond()} require.NoError(t, cl.Update(context.TODO(), notification)) // when result, err := reconcileNotification(controller, notification) // then require.NoError(t, err) assert.True(t, result.Requeue) assert.True(t, result.RequeueAfter < cast.ToDuration("10s")) assert.True(t, result.RequeueAfter > cast.ToDuration("1s")) ntest.AssertThatNotification(t, notification.Name, cl). HasConditions(sentCond()) }) t.Run("sent notification deleted when deletion timeout passed", func(t *testing.T) { // given ds, _ := mockDeliveryService(defaultTemplateLoader()) controller, cl := newController(t, ds, toolchainConfig) notification, err := NewNotificationBuilder(cl, test.HostOperatorNs).Create("jane@acme.com") require.NoError(t, err) notification.Status.Conditions = []toolchainv1alpha1.Condition{sentCond()} notification.Status.Conditions[0].LastTransitionTime = v1.Time{Time: time.Now().Add(-cast.ToDuration("10s"))} require.NoError(t, cl.Update(context.TODO(), notification)) // when result, err := reconcileNotification(controller, notification) // then require.NoError(t, err) assert.False(t, result.Requeue) AssertThatNotificationIsDeleted(t, cl, notification.Name) }) }
explode_data.jsonl/33133
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 615 }
[ 2830, 3393, 11196, 7188, 1155, 353, 8840, 836, 8, 341, 197, 14172, 8819, 2648, 1669, 4185, 1676, 7121, 7740, 8819, 2648, 5261, 2354, 14828, 1155, 11, 1273, 1676, 15000, 7029, 1005, 12945, 10227, 11196, 1912, 52625, 445, 16, 15, 82, 2807...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIntDataPointSlice_CopyTo(t *testing.T) { dest := NewIntDataPointSlice() // Test CopyTo to empty NewIntDataPointSlice().CopyTo(dest) assert.EqualValues(t, NewIntDataPointSlice(), dest) // Test CopyTo larger slice generateTestIntDataPointSlice().CopyTo(dest) assert.EqualValues(t, generateTestIntDataPointSlice(), dest) // Test CopyTo same size slice generateTestIntDataPointSlice().CopyTo(dest) assert.EqualValues(t, generateTestIntDataPointSlice(), dest) }
explode_data.jsonl/32707
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 161 }
[ 2830, 3393, 1072, 1043, 2609, 33236, 77637, 1249, 1155, 353, 8840, 836, 8, 341, 49616, 1669, 1532, 1072, 1043, 2609, 33236, 741, 197, 322, 3393, 14540, 1249, 311, 4287, 198, 197, 3564, 1072, 1043, 2609, 33236, 1005, 12106, 1249, 27010, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNoAttemptsAllowedError(t *testing.T) { t.Parallel() err := NoAttemptsAllowedError{ MaxAttempts: 0, } assert.Equal(t, "no attempts are allowed with max attempts set to 0", err.Error()) }
explode_data.jsonl/82070
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 2753, 81517, 35382, 1454, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 9859, 1669, 2308, 81517, 35382, 1454, 515, 197, 197, 5974, 81517, 25, 220, 15, 345, 197, 532, 6948, 12808, 1155, 11, 330, 2152, 13553, 525, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestParserDefaultPlaceholderValues(t *testing.T) { assertMarkerValue(t, "errorContext: `${1:err}`, error: $1", &text{}, &placeholder{}, &text{}, &placeholder{}) parsed := newSnippetParser().parse("errorContext: `${1:err}`, error:$1", false, false) assertMarkerTypes(t, (*parsed)[1], &placeholder{}) assertMarkerTypes(t, (*parsed)[3], &placeholder{}) p1, p2 := (*parsed)[1].(*placeholder), (*parsed)[3].(*placeholder) assertEqual(t, p1.index, 1) assertEqual(t, len(*p1.children()), 1) assertEqual(t, (*p1.children())[0].(*text).String(), "err") assertEqual(t, p2.index, 1) assertEqual(t, len(*p2.children()), 1) assertEqual(t, (*p2.children())[0].(*text).String(), "err") }
explode_data.jsonl/60281
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 281 }
[ 2830, 3393, 6570, 3675, 48305, 6227, 1155, 353, 8840, 836, 8, 341, 6948, 20613, 1130, 1155, 11, 330, 841, 1972, 25, 11518, 16, 25, 615, 28350, 1465, 25, 400, 16, 497, 609, 1318, 22655, 609, 12384, 22655, 609, 1318, 22655, 609, 12384, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateMemory(t *testing.T) { tcs := []struct { name string code []byte err error }{ { name: "memory.grow", code: []byte{ operators.I32Const, 3, operators.GrowMemory, 0, operators.Drop, }, err: nil, }, { name: "memory.grow invalid index", code: []byte{ operators.I32Const, 1, operators.GrowMemory, 1, operators.Drop, }, err: InvalidTableIndexError{"memory", 1}, }, { name: "memory.size", code: []byte{ operators.CurrentMemory, 0, operators.Drop, }, err: nil, }, { name: "memory.size invalid index", code: []byte{ operators.I32Const, 1, operators.CurrentMemory, 1, operators.Drop, }, err: InvalidTableIndexError{"memory", 1}, }, } for i := range tcs { tc := tcs[i] t.Run(tc.name, func(t *testing.T) { t.Parallel() mod := wasm.Module{} sig := wasm.FunctionSig{Form: 0x60 /* Must always be 0x60 */} fn := wasm.FunctionBody{Module: &mod, Code: tc.code} _, err := verifyBody(&sig, &fn, &mod) if err != tc.err { t.Fatalf("verify returned '%v', want '%v'", err, tc.err) } }) } }
explode_data.jsonl/16563
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 547 }
[ 2830, 3393, 17926, 10642, 1155, 353, 8840, 836, 8, 341, 3244, 4837, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 43343, 3056, 3782, 198, 197, 9859, 220, 1465, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 330, 17269, 1302, 651,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestChannelPullAccessControl(t *testing.T) { t.Parallel() // Scenario: We have 2 organizations in the channel: ORG1, ORG2 // The "acting peer" is from ORG1 and peers "1", "2", "3" are from // the following organizations: // ORG1: "1" // ORG2: "2", "3" // We test 2 cases: // 1) We don't respond for Hello messages from peers in foreign organizations // 2) We don't select peers from foreign organizations when doing pull cs := &cryptoService{} adapter := new(gossipAdapterMock) cs.Mock = mock.Mock{} cs.On("VerifyBlock", mock.Anything).Return(nil) pkiID1 := common.PKIidType("1") pkiID2 := common.PKIidType("2") pkiID3 := common.PKIidType("3") peer1 := discovery.NetworkMember{PKIid: pkiID1, InternalEndpoint: "1", Endpoint: "1"} peer2 := discovery.NetworkMember{PKIid: pkiID2, InternalEndpoint: "2", Endpoint: "2"} peer3 := discovery.NetworkMember{PKIid: pkiID3, InternalEndpoint: "3", Endpoint: "3"} adapter.On("GetOrgOfPeer", pkiIDInOrg1).Return(api.OrgIdentityType("ORG1")) adapter.On("GetOrgOfPeer", pkiID1).Return(api.OrgIdentityType("ORG1")) adapter.On("GetOrgOfPeer", pkiID2).Return(api.OrgIdentityType("ORG2")) adapter.On("GetOrgOfPeer", pkiID3).Return(api.OrgIdentityType("ORG2")) adapter.On("GetMembership").Return([]discovery.NetworkMember{peer1, peer2, peer3}) adapter.On("DeMultiplex", mock.Anything) adapter.On("Gossip", mock.Anything) adapter.On("Forward", mock.Anything) adapter.On("GetConf").Return(conf) sentHello := int32(0) adapter.On("Send", mock.Anything, mock.Anything).Run(func(arg mock.Arguments) { msg := arg.Get(0).(*proto.SignedGossipMessage) if !msg.IsHelloMsg() { return } atomic.StoreInt32(&sentHello, int32(1)) peerID := string(arg.Get(1).([]*comm.RemotePeer)[0].PKIID) assert.Equal(t, "1", peerID) assert.NotEqual(t, "2", peerID, "Sent hello to peer 2 but it's in a different org") assert.NotEqual(t, "3", peerID, "Sent hello to peer 3 but it's in a different org") }) jcm := &joinChanMsg{ members2AnchorPeers: map[string][]api.AnchorPeer{ "ORG1": {}, "ORG2": {}, }, } gc := NewGossipChannel(pkiIDInOrg1, orgInChannelA, cs, channelA, adapter, jcm, disabledMetrics) gc.HandleMessage(&receivedMsg{PKIID: pkiIDInOrg1, msg: createStateInfoMsg(100, pkiIDInOrg1, channelA)}) gc.HandleMessage(&receivedMsg{PKIID: pkiID1, msg: createStateInfoMsg(100, pkiID1, channelA)}) gc.HandleMessage(&receivedMsg{PKIID: pkiID2, msg: createStateInfoMsg(100, pkiID2, channelA)}) gc.HandleMessage(&receivedMsg{PKIID: pkiID3, msg: createStateInfoMsg(100, pkiID3, channelA)}) respondedChan := make(chan *proto.GossipMessage, 1) messageRelayer := func(arg mock.Arguments) { msg := arg.Get(0).(*proto.GossipMessage) respondedChan <- msg } gc.HandleMessage(&receivedMsg{msg: dataMsgOfChannel(5, channelA), PKIID: pkiIDInOrg1}) helloMsg := createHelloMsg(pkiID1) helloMsg.On("Respond", mock.Anything).Run(messageRelayer) go gc.HandleMessage(helloMsg) select { case <-respondedChan: case <-time.After(time.Second): assert.Fail(t, "Didn't reply to a hello within a timely manner") } helloMsg = createHelloMsg(pkiID2) helloMsg.On("Respond", mock.Anything).Run(messageRelayer) go gc.HandleMessage(helloMsg) select { case <-respondedChan: assert.Fail(t, "Shouldn't have replied to a hello, because the peer is from a foreign org") case <-time.After(time.Second): } // Sleep a bit to let the gossip channel send out its hello messages time.Sleep(time.Second * 3) // Make sure we sent at least 1 hello message, otherwise the test passed vacuously assert.Equal(t, int32(1), atomic.LoadInt32(&sentHello)) }
explode_data.jsonl/66317
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1370 }
[ 2830, 3393, 9629, 36068, 6054, 3273, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 322, 58663, 25, 1205, 614, 220, 17, 11104, 304, 279, 5496, 25, 2726, 38, 16, 11, 2726, 38, 17, 198, 197, 322, 576, 330, 34922, 14397, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPluginEndorserErrors(t *testing.T) { pluginMapper := &mocks.PluginMapper{} pluginFactory := &mocks.PluginFactory{} plugin := &mocks.Plugin{} plugin.On("Endorse", mock.Anything, mock.Anything) pluginMapper.On("PluginFactoryByName", endorser.PluginName("plugin")).Return(pluginFactory) pluginFactory.On("New").Return(plugin) sif := &mocks.SigningIdentityFetcher{} cs := &mocks.ChannelStateRetriever{} queryCreator := &mocks.QueryCreator{} cs.On("NewQueryCreator", "mychannel").Return(queryCreator, nil) pluginEndorser := endorser.NewPluginEndorser(&endorser.PluginSupport{ ChannelStateRetriever: cs, SigningIdentityFetcher: sif, PluginMapper: pluginMapper, TransientStoreRetriever: mockTransientStoreRetriever, }) // Failed initializing plugin t.Run("PluginInitializationFailure", func(t *testing.T) { plugin.On("Init", mock.Anything, mock.Anything).Return(errors.New("plugin initialization failed")).Once() endorsement, prpBytes, err := pluginEndorser.EndorseWithPlugin("plugin", "mychannel", nil, nil) assert.Nil(t, endorsement) assert.Nil(t, prpBytes) assert.Contains(t, err.Error(), "plugin initialization failed") }) }
explode_data.jsonl/1349
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 418 }
[ 2830, 3393, 11546, 3727, 269, 799, 13877, 1155, 353, 8840, 836, 8, 341, 197, 9138, 10989, 1669, 609, 16712, 82, 64378, 10989, 16094, 197, 9138, 4153, 1669, 609, 16712, 82, 64378, 4153, 16094, 197, 9138, 1669, 609, 16712, 82, 64378, 1609...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInMemory_Debug(t *testing.T) { m := containable.NewInMemory() for i := 0; i < 5; i++ { id := []byte(`eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9`) hashed := sha256.Sum256(strconv.AppendInt(id, int64(i), 10)) assert.NoError(t, m.Set(hashed[:], time.Second*time.Duration(i))) } buf := new(bytes.Buffer) m.Debug(buf) assert.Contains(t, buf.String(), `9addefe77982f9641233b4e5f59f3cc07111f96c753e3faf5d7c338116197050 => 20`) }
explode_data.jsonl/42547
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 215 }
[ 2830, 3393, 641, 10642, 77938, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 6644, 480, 7121, 641, 10642, 741, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 20, 26, 600, 1027, 341, 197, 15710, 1669, 3056, 3782, 5809, 84609, 49039, 38, 58...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDockerCopyToVolume(t *testing.T) { assert := asrt.New(t) err := RemoveVolume(t.Name()) assert.NoError(err) pwd, _ := os.Getwd() err = CopyToVolume(filepath.Join(pwd, "testdata", t.Name()), t.Name(), "", "0") assert.NoError(err) mainContainerID, out, err := RunSimpleContainer("busybox:latest", "", []string{"sh", "-c", "cd /mnt/" + t.Name() + " && ls -R"}, nil, nil, []string{t.Name() + ":/mnt/" + t.Name()}, "25", true, false, nil) assert.NoError(err) assert.Equal(`.: root.txt subdir1 ./subdir1: subdir1.txt `, out) err = CopyToVolume(filepath.Join(pwd, "testdata", t.Name()), t.Name(), "somesubdir", "501") assert.NoError(err) subdirContainerID, out, err := RunSimpleContainer("busybox:latest", "", []string{"sh", "-c", "cd /mnt/" + t.Name() + "/somesubdir && pwd && ls -R"}, nil, nil, []string{t.Name() + ":/mnt/" + t.Name()}, "0", true, false, nil) assert.NoError(err) assert.Equal(`/mnt/TestDockerCopyToVolume/somesubdir .: root.txt subdir1 ./subdir1: subdir1.txt `, out) t.Cleanup(func() { _ = RemoveContainer(mainContainerID, 0) assert.NoError(err) _ = RemoveContainer(subdirContainerID, 0) err = RemoveVolume(t.Name()) assert.NoError(err) }) }
explode_data.jsonl/41385
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 496 }
[ 2830, 3393, 35, 13659, 12106, 1249, 18902, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 438, 3342, 7121, 1155, 340, 9859, 1669, 10783, 18902, 1155, 2967, 2398, 6948, 35699, 3964, 692, 3223, 6377, 11, 716, 1669, 2643, 2234, 6377, 741, 9859,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBYORevisionPostUpgrade(t *testing.T) { t.Parallel() clients := e2e.Setup(t) names := test.ResourceNames{ Service: byoServiceName, } if _, err := v1test.UpdateServiceRouteSpec(t, clients, names, v1.RouteSpec{ Traffic: []v1.TrafficTarget{{ Tag: "example-tag", RevisionName: byoRevName, Percent: ptr.Int64(100), }}, }); err != nil { t.Fatalf("Failed to update Service: %v", err) } }
explode_data.jsonl/69489
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 187 }
[ 2830, 3393, 19912, 46, 33602, 4133, 43861, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 31869, 1669, 384, 17, 68, 39820, 1155, 340, 93940, 1669, 1273, 20766, 7980, 515, 197, 91619, 25, 553, 78, 1860, 675, 345, 197, 630, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDefaultByteBuf_WriteInt16(t *testing.T) { buf := EmptyByteBuf() buf.WriteInt16(math.MaxInt16) assert.EqualValues(t, math.MaxInt16, buf.ReadInt16()) }
explode_data.jsonl/1982
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 65 }
[ 2830, 3393, 3675, 7153, 15064, 31825, 1072, 16, 21, 1155, 353, 8840, 836, 8, 341, 26398, 1669, 22228, 7153, 15064, 741, 26398, 4073, 1072, 16, 21, 37270, 14535, 1072, 16, 21, 340, 6948, 12808, 6227, 1155, 11, 6888, 14535, 1072, 16, 21...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestBad(t *testing.T) { r := Newc.Reader(bytes.NewReader(badCPIO)) if _, err := r.ReadRecord(); err != io.EOF { t.Errorf("ReadRecord(badCPIO) got %v, want %v", err, io.EOF) } r = Newc.Reader(bytes.NewReader(badMagicCPIO)) if _, err := r.ReadRecord(); err == nil { t.Errorf("Wanted bad magic err, got nil") } }
explode_data.jsonl/25089
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 138 }
[ 2830, 3393, 17082, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1532, 66, 47431, 23158, 68587, 1883, 329, 34, 6338, 1171, 743, 8358, 1848, 1669, 435, 6503, 6471, 2129, 1848, 961, 6399, 86492, 341, 197, 3244, 13080, 445, 4418, 6471, 1883, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBuildsImportError(t *testing.T) { testClient(t, func(e *cli.Engine, i *mocksdk.Interface) { i.On("SystemGet").Return(fxSystem(), nil) i.On("BuildImport", "app1", mock.Anything).Return(nil, fmt.Errorf("err1")) res, err := testExecute(e, "builds import -a app1 -f testdata/build.tgz", nil) require.NoError(t, err) require.Equal(t, 1, res.Code) res.RequireStderr(t, []string{"ERROR: err1"}) res.RequireStdout(t, []string{"Importing build... "}) }) }
explode_data.jsonl/65795
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 195 }
[ 2830, 3393, 11066, 82, 11511, 1454, 1155, 353, 8840, 836, 8, 341, 18185, 2959, 1155, 11, 2915, 2026, 353, 19521, 54424, 11, 600, 353, 16712, 51295, 41065, 8, 341, 197, 8230, 8071, 445, 2320, 1949, 1827, 5598, 955, 87, 2320, 1507, 2092...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRemoveQuitPlayers(t *testing.T) { t.Parallel() g := blackjack.Game{ Players: []*blackjack.Player{ {Action: blackjack.ActionStand}, {Action: blackjack.ActionQuit}, {Action: blackjack.ActionQuit}, }, } g.Players = g.RemoveQuitPlayers() want := 1 got := len(g.Players) if want != got { t.Fatalf("wanted: %d, got: %d", want, got) } }
explode_data.jsonl/5924
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 13021, 42856, 24257, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 3174, 1669, 57262, 20940, 515, 197, 197, 24257, 25, 29838, 11453, 27134, 23756, 515, 298, 197, 90, 2512, 25, 57262, 11360, 48733, 1583, 298, 197, 90...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTaint(t *testing.T) { tests := []struct { description string oldTaints []api.Taint newTaints []api.Taint args []string expectFatal bool expectTaint bool }{ // success cases { description: "taints a node with effect NoSchedule", newTaints: []api.Taint{{ Key: "foo", Value: "bar", Effect: "NoSchedule", }}, args: []string{"node", "node-name", "foo=bar:NoSchedule"}, expectFatal: false, expectTaint: true, }, { description: "taints a node with effect PreferNoSchedule", newTaints: []api.Taint{{ Key: "foo", Value: "bar", Effect: "PreferNoSchedule", }}, args: []string{"node", "node-name", "foo=bar:PreferNoSchedule"}, expectFatal: false, expectTaint: true, }, { description: "update an existing taint on the node, change the value from bar to barz", oldTaints: []api.Taint{{ Key: "foo", Value: "bar", Effect: "NoSchedule", }}, newTaints: []api.Taint{{ Key: "foo", Value: "barz", Effect: "NoSchedule", }}, args: []string{"node", "node-name", "foo=barz:NoSchedule", "--overwrite"}, expectFatal: false, expectTaint: true, }, { description: "taints a node with two taints", newTaints: []api.Taint{{ Key: "dedicated", Value: "namespaceA", Effect: "NoSchedule", }, { Key: "foo", Value: "bar", Effect: "PreferNoSchedule", }}, args: []string{"node", "node-name", "dedicated=namespaceA:NoSchedule", "foo=bar:PreferNoSchedule"}, expectFatal: false, expectTaint: true, }, { description: "node has two taints with the same key but different effect, remove one of them by indicating exact key and effect", oldTaints: []api.Taint{{ Key: "dedicated", Value: "namespaceA", Effect: "NoSchedule", }, { Key: "dedicated", Value: "namespaceA", Effect: "PreferNoSchedule", }}, newTaints: []api.Taint{{ Key: "dedicated", Value: "namespaceA", Effect: "PreferNoSchedule", }}, args: []string{"node", "node-name", "dedicated:NoSchedule-"}, expectFatal: false, expectTaint: true, }, { description: "node has two taints with the same key but different effect, remove all of them with wildcard", oldTaints: []api.Taint{{ Key: "dedicated", Value: "namespaceA", Effect: "NoSchedule", }, { Key: "dedicated", Value: "namespaceA", Effect: "PreferNoSchedule", }}, newTaints: []api.Taint{}, args: []string{"node", "node-name", "dedicated-"}, expectFatal: false, expectTaint: true, }, { description: "node has two taints, update one of them and remove the other", oldTaints: []api.Taint{{ Key: "dedicated", Value: "namespaceA", Effect: "NoSchedule", }, { Key: "foo", Value: "bar", Effect: "PreferNoSchedule", }}, newTaints: []api.Taint{{ Key: "foo", Value: "barz", Effect: "PreferNoSchedule", }}, args: []string{"node", "node-name", "dedicated:NoSchedule-", "foo=barz:PreferNoSchedule", "--overwrite"}, expectFatal: false, expectTaint: true, }, // error cases { description: "invalid taint key", args: []string{"node", "node-name", "nospecialchars^@=banana:NoSchedule"}, expectFatal: true, expectTaint: false, }, { description: "invalid taint effect", args: []string{"node", "node-name", "foo=bar:NoExcute"}, expectFatal: true, expectTaint: false, }, { description: "duplicated taints with the same key and effect should be rejected", args: []string{"node", "node-name", "foo=bar:NoExcute", "foo=barz:NoExcute"}, expectFatal: true, expectTaint: false, }, { description: "can't update existing taint on the node, since 'overwrite' flag is not set", oldTaints: []api.Taint{{ Key: "foo", Value: "bar", Effect: "NoSchedule", }}, newTaints: []api.Taint{{ Key: "foo", Value: "bar", Effect: "NoSchedule", }}, args: []string{"node", "node-name", "foo=bar:NoSchedule"}, expectFatal: true, expectTaint: false, }, } for _, test := range tests { oldNode, expectNewNode := generateNodeAndTaintedNode(test.oldTaints, test.newTaints) new_node := &api.Node{} tainted := false f, tf, codec, ns := cmdtesting.NewAPIFactory() tf.Client = &fake.RESTClient{ NegotiatedSerializer: ns, Client: fake.CreateHTTPClient(func(req *http.Request) (*http.Response, error) { m := &MyReq{req} switch { case m.isFor("GET", "/version"): resp, err := genResponseWithJsonEncodedBody(serverVersion_1_5_0) if err != nil { t.Fatalf("error: failed to generate server version response: %#v\n", serverVersion_1_5_0) } return resp, nil case m.isFor("GET", "/nodes/node-name"): return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: objBody(codec, oldNode)}, nil case m.isFor("PATCH", "/nodes/node-name"), m.isFor("PUT", "/nodes/node-name"): tainted = true data, err := ioutil.ReadAll(req.Body) if err != nil { t.Fatalf("%s: unexpected error: %v", test.description, err) } defer req.Body.Close() if err := runtime.DecodeInto(codec, data, new_node); err != nil { t.Fatalf("%s: unexpected error: %v", test.description, err) } if !AnnotationsHaveEqualTaints(expectNewNode.Annotations, new_node.Annotations) { t.Fatalf("%s: expected:\n%v\nsaw:\n%v\n", test.description, expectNewNode.Annotations, new_node.Annotations) } return &http.Response{StatusCode: 200, Header: defaultHeader(), Body: objBody(codec, new_node)}, nil default: t.Fatalf("%s: unexpected request: %v %#v\n%#v", test.description, req.Method, req.URL, req) return nil, nil } }), } tf.ClientConfig = defaultClientConfig() buf := bytes.NewBuffer([]byte{}) cmd := NewCmdTaint(f, buf) saw_fatal := false func() { defer func() { // Recover from the panic below. _ = recover() // Restore cmdutil behavior cmdutil.DefaultBehaviorOnFatal() }() cmdutil.BehaviorOnFatal(func(e string, code int) { saw_fatal = true; panic(e) }) cmd.SetArgs(test.args) cmd.Execute() }() if test.expectFatal { if !saw_fatal { t.Fatalf("%s: unexpected non-error", test.description) } } if test.expectTaint { if !tainted { t.Fatalf("%s: node not tainted", test.description) } } if !test.expectTaint { if tainted { t.Fatalf("%s: unexpected taint", test.description) } } } }
explode_data.jsonl/23743
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 3000 }
[ 2830, 3393, 51, 1641, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 42407, 914, 198, 197, 61828, 51, 1641, 82, 256, 3056, 2068, 836, 1641, 198, 197, 8638, 51, 1641, 82, 256, 3056, 2068, 836, 1641, 198, 197, 31215,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestUrlParse(t *testing.T) { assert := assert.New(t) // make metric name as receiver metric := escape.Path("instance:cpu_utilization:ratio_avg") + "?" + escape.Query("dc") + "=" + escape.Query("qwe") + "&" + escape.Query("fqdn") + "=" + escape.Query("asd") + "&" + escape.Query("instance") + "=" + escape.Query("10.33.10.10:9100") + "&" + escape.Query("job") + "=" + escape.Query("node") assert.Equal("instance:cpu_utilization:ratio_avg?dc=qwe&fqdn=asd&instance=10.33.10.10%3A9100&job=node", metric) // original url.Parse m, err := url.Parse(metric) assert.NotNil(m) assert.NoError(err) assert.Equal("", m.Path) // from tagged uploader m, err = urlParse(metric) assert.NotNil(m) assert.NoError(err) assert.Equal("instance:cpu_utilization:ratio_avg", m.Path) }
explode_data.jsonl/3478
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 332 }
[ 2830, 3393, 2864, 14463, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 197, 322, 1281, 18266, 829, 438, 13964, 198, 2109, 16340, 1669, 12449, 17474, 445, 4851, 25, 16475, 18974, 2022, 25, 45358, 26631, 899, 3610, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCustomBuildSkipsLocalDocker(t *testing.T) { f := newIBDFixture(t, k8s.EnvKIND6) defer f.TearDown() sha := digest.Digest("sha256:11cd0eb38bc3ceb958ffb2f9bd70be3fb317ce7d255c8a4c3f4af30e298aa1aab") f.docker.Images["gcr.io/some-project-162817/sancho:tilt-build"] = types.ImageInspect{ID: string(sha)} cb := model.CustomBuild{ Command: model.ToHostCmd("exit 0"), Deps: []string{f.JoinPath("app")}, SkipsLocalDocker: true, Tag: "tilt-build", } manifest := manifestbuilder.New(f, "sancho"). WithK8sYAML(SanchoYAML). WithImageTarget(model.MustNewImageTarget(SanchoRef).WithBuildDetails(cb)). Build() _, err := f.ibd.BuildAndDeploy(f.ctx, f.st, buildTargets(manifest), store.BuildStateSet{}) assert.NoError(t, err) // We didn't try to build, tag, or push an image assert.Equal(t, 0, f.docker.BuildCount) assert.Equal(t, 0, f.docker.TagCount) assert.Equal(t, 0, f.kl.loadCount) assert.Equal(t, 0, f.docker.PushCount) }
explode_data.jsonl/38267
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 449 }
[ 2830, 3393, 10268, 11066, 19290, 3077, 7319, 35, 13659, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 3256, 5262, 12735, 1155, 11, 595, 23, 82, 81214, 42, 5245, 21, 340, 16867, 282, 836, 682, 4454, 741, 197, 15247, 1669, 20882, 909, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTests(t *testing.T) { tmpDir, err := ioutil.TempDir("", t.Name()) if err != nil { t.Fatalf("Failed to create directory: %v", err) } if os.Getenv("PRESERVE") == "" { defer os.RemoveAll(tmpDir) } log := base.StderrLog() ts := httptest.NewServer(GitHandler( tmpDir, NewGitProtocol(authorize, nil, true, OverallWallTimeHardLimit, fakeInteractiveSettingsCompiler, log), &base.NoOpMetrics{}, log, )) defer ts.Close() problemAlias := "sumas" repo, err := InitRepository(path.Join(tmpDir, problemAlias)) if err != nil { t.Fatalf("Failed to initialize git repository: %v", err) } defer repo.Free() for idx, testcase := range []struct { name string extraContents map[string]io.Reader status string }{ { "tests is not a directory", map[string]io.Reader{ "tests": strings.NewReader(""), }, "ng refs/heads/master tests-bad-layout: tests/ directory is not a tree\n", }, { "Missing tests/tests.json", map[string]io.Reader{ "tests/foo": strings.NewReader(""), }, "ng refs/heads/master tests-bad-layout: tests/tests.json is missing\n", }, { "Corrupt settings.json", map[string]io.Reader{ "tests/tests.json": strings.NewReader(""), }, "ng refs/heads/master json-parse-error: tests/tests.json: EOF\n", }, { "Unknown fields", map[string]io.Reader{ "tests/tests.json": strings.NewReader(`{ "foo": "bar" }`), }, "ng refs/heads/master json-parse-error: tests/tests.json: json: unknown field \"foo\"\n", }, { "Missing validator", map[string]io.Reader{ "tests/tests.json": strings.NewReader(`{ "solutions": [ { "filename": "foo.py" } ] }`), }, "ng refs/heads/master tests-bad-layout: tests/foo.py is missing: the path 'foo.py' does not exist in the given tree\n", }, { "Relative paths", map[string]io.Reader{ "tests/tests.json": strings.NewReader(`{ "solutions": [ { "filename": "../solutions/foo.py" } ] }`), }, "ng refs/heads/master tests-bad-layout: tests/../solutions/foo.py is missing: the path '..' does not exist in the given tree\n", }, { "Missing score_range and verdict", map[string]io.Reader{ "tests/tests.json": strings.NewReader(`{ "solutions": [ { "filename": "foo.py" } ] }`), "tests/foo.py": strings.NewReader("print 1"), }, "ng refs/heads/master tests-bad-layout: score_range or validator for foo.py in tests/tests.json should be set\n", }, { "Missing score_range", map[string]io.Reader{ "tests/tests.json": strings.NewReader(`{ "solutions": [ { "filename": "foo.py", "score_range": [1] } ] }`), "tests/foo.py": strings.NewReader("print 1"), }, "ng refs/heads/master json-parse-error: tests/tests.json: score_range should be an array with two numbers\n", }, { "Bad score_range", map[string]io.Reader{ "tests/tests.json": strings.NewReader(`{ "solutions": [ { "filename": "foo.py", "score_range": [-1, 10] } ] }`), "tests/foo.py": strings.NewReader("print 1"), }, "ng refs/heads/master json-parse-error: tests/tests.json: values for score_range should be in the interval [0, 1]\n", }, { "Bad verdict", map[string]io.Reader{ "tests/tests.json": strings.NewReader(`{ "solutions": [ { "filename": "foo.py", "score_range": [0, 1], "verdict": "COOL VERDICT, BRO." } ] }`), "tests/foo.py": strings.NewReader("print 1"), }, "ng refs/heads/master tests-bad-layout: verdict for foo.py in tests/tests.json is not valid\n", }, { "Bad validator", map[string]io.Reader{ "tests/tests.json": strings.NewReader(`{ "solutions": [ { "filename": "solutions/foo.py", "verdict": "AC" } ], "inputs": { "filename": "test-validator.py" } }`), "tests/solutions/foo.py": strings.NewReader("print 1"), }, "ng refs/heads/master tests-bad-layout: tests/test-validator.py is missing: the path 'test-validator.py' does not exist in the given tree\n", }, { "Valid", map[string]io.Reader{ "tests/tests.json": strings.NewReader(`{ "solutions": [ { "filename": "solutions/foo.py", "score_range": [1, 1], "verdict": "AC" } ] }`), "tests/solutions/foo.py": strings.NewReader("print 1"), }, "ok refs/heads/master\n", }, } { t.Run(fmt.Sprintf("%d %s", idx, testcase.name), func(t *testing.T) { contents := map[string]io.Reader{ "settings.json": strings.NewReader(gitservertest.DefaultSettingsJSON), "cases/0.in": strings.NewReader("1 2"), "cases/0.out": strings.NewReader("3"), "statements/es.markdown": strings.NewReader("Sumas"), } for name, r := range testcase.extraContents { contents[name] = r } newOid, packContents := createCommit( t, tmpDir, problemAlias, &git.Oid{}, contents, "Initial commit", log, ) push( t, tmpDir, adminAuthorization, problemAlias, "refs/heads/master", &git.Oid{}, newOid, packContents, []githttp.PktLineResponse{ {Line: "unpack ok\n", Err: nil}, {Line: testcase.status, Err: nil}, }, ts, ) }) } }
explode_data.jsonl/63171
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2586 }
[ 2830, 3393, 18200, 1155, 353, 8840, 836, 8, 341, 20082, 6184, 11, 1848, 1669, 43144, 65009, 6184, 19814, 259, 2967, 2398, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 9408, 311, 1855, 6220, 25, 1018, 85, 497, 1848, 340, 197, 532,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestConditions(t *testing.T) { txTime, err := time.Parse(time.RFC3339, "2013-05-03T14:45:00Z") require.NoError(t, err) testCases := []struct { s string conditions []query.Condition }{ {s: "tm.events.type='NewBlock'", conditions: []query.Condition{{Tag: "tm.events.type", Op: query.OpEqual, Operand: "NewBlock"}}}, {s: "tx.gas > 7 AND tx.gas < 9", conditions: []query.Condition{{Tag: "tx.gas", Op: query.OpGreater, Operand: int64(7)}, {Tag: "tx.gas", Op: query.OpLess, Operand: int64(9)}}}, {s: "tx.time >= TIME 2013-05-03T14:45:00Z", conditions: []query.Condition{{Tag: "tx.time", Op: query.OpGreaterEqual, Operand: txTime}}}, } for _, tc := range testCases { q, err := query.New(tc.s) require.Nil(t, err) assert.Equal(t, tc.conditions, q.Conditions()) } }
explode_data.jsonl/80926
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 338 }
[ 2830, 3393, 35435, 1155, 353, 8840, 836, 8, 341, 46237, 1462, 11, 1848, 1669, 882, 8937, 9730, 2013, 6754, 18, 18, 18, 24, 11, 330, 17, 15, 16, 18, 12, 15, 20, 12, 15, 18, 51, 16, 19, 25, 19, 20, 25, 15, 15, 57, 1138, 17957,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetKubeJWTAuthenticator(t *testing.T) { type want struct { k8sAPIServerURL string caCertPath string jwtPath string } tests := []struct { name string failFirst bool config *rest.Config configErr error want want wantErr bool jwtCalled int clusterCalled int }{ { name: "default method works", want: want{ k8sAPIServerURL: "https://kubernetes.default.svc/apis/authentication.k8s.io/v1/tokenreviews", caCertPath: caCertPath, jwtPath: jwtPath, }, jwtCalled: 1, clusterCalled: 0, }, { name: "default method works with valid clusterconfig", config: &rest.Config{ Host: "a", TLSClientConfig: rest.TLSClientConfig{ CAFile: "b", }, BearerTokenFile: "c", }, want: want{ k8sAPIServerURL: "https://kubernetes.default.svc/apis/authentication.k8s.io/v1/tokenreviews", caCertPath: caCertPath, jwtPath: jwtPath, }, jwtCalled: 1, clusterCalled: 0, }, { name: "default method works with invalid clusterconfig", configErr: fmt.Errorf("clusterconfig err"), want: want{ k8sAPIServerURL: "https://kubernetes.default.svc/apis/authentication.k8s.io/v1/tokenreviews", caCertPath: caCertPath, jwtPath: jwtPath, }, jwtCalled: 1, clusterCalled: 0, }, { name: "fallback on clusterconfig", failFirst: true, config: &rest.Config{ Host: "a", TLSClientConfig: rest.TLSClientConfig{ CAFile: "b", }, BearerTokenFile: "c", }, want: want{ k8sAPIServerURL: "a/apis/authentication.k8s.io/v1/tokenreviews", caCertPath: "b", jwtPath: "c", }, jwtCalled: 2, clusterCalled: 1, }, { name: "clusterconfig fails", failFirst: true, configErr: fmt.Errorf("foo"), wantErr: true, jwtCalled: 1, clusterCalled: 1, }, } defer func() { newKubeJWTAuthenticator = authenticate.NewKubeJWTAuthenticator inClusterConfig = rest.InClusterConfig }() for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { first := true newKubeJWTAuthenticator = func(mc *kubecontroller.Multicluster, k8sAPIServerURL, caCertPath, jwtPath, _, _, _ string) (*authenticate.KubeJWTAuthenticator, error) { tt.jwtCalled-- if tt.failFirst && first { first = false return nil, fmt.Errorf("jwt err") } assert.Equal(t, tt.want.k8sAPIServerURL, k8sAPIServerURL) assert.Equal(t, tt.want.caCertPath, caCertPath) assert.Equal(t, tt.want.jwtPath, jwtPath) assert.Equal(t, tt.want.k8sAPIServerURL, k8sAPIServerURL) return new(authenticate.KubeJWTAuthenticator), nil } inClusterConfig = func() (*rest.Config, error) { tt.clusterCalled-- return tt.config, tt.configErr } _, err := getKubeJWTAuthenticator(nil, "", "", "") if tt.wantErr { assert.Error(t, err) } else { assert.NoError(t, err) } assert.Equal(t, 0, tt.clusterCalled) assert.Equal(t, 0, tt.jwtCalled) }) } }
explode_data.jsonl/53140
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1572 }
[ 2830, 3393, 1949, 42, 3760, 55172, 5087, 61393, 1155, 353, 8840, 836, 8, 341, 13158, 1366, 2036, 341, 197, 16463, 23, 82, 2537, 1637, 2836, 3144, 914, 198, 197, 197, 924, 36934, 1820, 414, 914, 198, 197, 12428, 9306, 1820, 260, 914, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateProject(t *testing.T) { testCases := []struct { label string inp Project expectedError string mockdb *db.MockDB expected Project }{ { label: "Create Project", inp: Project{ MetaData: ProjectMetaData{ Name: "testProject", Description: "A sample Project used for unit testing", UserData1: "data1", UserData2: "data2", }, }, expected: Project{ MetaData: ProjectMetaData{ Name: "testProject", Description: "A sample Project used for unit testing", UserData1: "data1", UserData2: "data2", }, }, expectedError: "", mockdb: &db.MockDB{}, }, { label: "Failed Create Project", expectedError: "Error Creating Project", mockdb: &db.MockDB{ Err: pkgerrors.New("Error Creating Project"), }, }, { label: "Create Existing Project", inp: Project{ MetaData: ProjectMetaData{ Name: "testProject", Description: "A sample Project used for unit testing", UserData1: "data1", UserData2: "data2", }, }, expectedError: "Project already exists", mockdb: &db.MockDB{ Items: []map[string]map[string][]byte{ { ProjectKey{ProjectName: "testProject"}.String(): { "projectmetadata": []byte( "{" + "\"metadata\" : {" + "\"Name\":\"testProject\"," + "\"Description\":\"Test project for unit testing\"," + "\"UserData1\":\"userData1\"," + "\"UserData2\":\"userData2\"}" + "}"), }, }, }, }, }, } for _, testCase := range testCases { t.Run(testCase.label, func(t *testing.T) { db.DBconn = testCase.mockdb impl := NewProjectClient() got, err := impl.CreateProject(testCase.inp, false) if err != nil { if testCase.expectedError == "" { t.Fatalf("Create returned an unexpected error %s", err) } if strings.Contains(err.Error(), testCase.expectedError) == false { t.Fatalf("Create returned an unexpected error %s", err) } } else { if reflect.DeepEqual(testCase.expected, got) == false { t.Errorf("Create returned unexpected body: got %v;"+ " expected %v", got, testCase.expected) } } }) } }
explode_data.jsonl/20166
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1054 }
[ 2830, 3393, 4021, 7849, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 29277, 260, 914, 198, 197, 17430, 79, 1843, 5787, 198, 197, 42400, 1454, 914, 198, 197, 77333, 1999, 286, 353, 1999, 24664, 3506, 198, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestJsonSamples(t *testing.T) { bidder, buildErr := Builder(openrtb_ext.BidderEpom, config.Adapter{ Endpoint: "https://an.epom.com/ortb"}) if buildErr != nil { t.Fatalf("Builder returned unexpected error %v", buildErr) } adapterstest.RunJSONBidderTest(t, "epomtest", bidder) }
explode_data.jsonl/57716
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 117 }
[ 2830, 3393, 5014, 39571, 1155, 353, 8840, 836, 8, 341, 2233, 307, 1107, 11, 1936, 7747, 1669, 20626, 30981, 3342, 65, 9927, 1785, 307, 1107, 22373, 316, 11, 2193, 34190, 515, 197, 197, 27380, 25, 330, 2428, 1110, 276, 33376, 316, 905,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSuccessfulMount(t *testing.T) { ctx := context.Background() // Set up a temporary directory. dir, err := ioutil.TempDir("", "mount_test") if err != nil { t.Fatal("ioutil.TempDir: %v", err) } defer os.RemoveAll(dir) // Mount. fs := &minimalFS{} mfs, err := fuse.Mount( dir, fuseutil.NewFileSystemServer(fs), &fuse.MountConfig{}) if err != nil { t.Fatalf("fuse.Mount: %v", err) } defer func() { if err := mfs.Join(ctx); err != nil { t.Errorf("Joining: %v", err) } }() defer fuse.Unmount(mfs.Dir()) }
explode_data.jsonl/28606
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 239 }
[ 2830, 3393, 36374, 16284, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 2822, 197, 322, 2573, 705, 264, 13340, 6220, 624, 48532, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 16557, 4452, 1138, 743, 1848, 961, 2092, 341, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSubkeyRevocation(t *testing.T) { kring, _ := ReadKeyRing(readerFromHex(revokedSubkeyHex)) // revokedSubkeyHex contains these keys: // pub 1024R/4EF7E4BECCDE97F0 2014-03-25 // sub 1024R/D63636E2B96AE423 2014-03-25 // sub 1024D/DBCE4EE19529437F 2014-03-25 // sub 1024R/677815E371C2FD23 2014-03-25 [revoked: 2014-03-25] validKeys := []uint64{0x4EF7E4BECCDE97F0, 0xD63636E2B96AE423, 0xDBCE4EE19529437F} revokedKey := uint64(0x677815E371C2FD23) for _, id := range validKeys { keys := kring.KeysById(id) if len(keys) != 1 { t.Errorf("Expected KeysById to find key %X, but got %d matches", id, len(keys)) } keys = kring.KeysByIdUsage(id, 0) if len(keys) != 1 { t.Errorf("Expected KeysByIdUsage to find key %X, but got %d matches", id, len(keys)) } } keys := kring.KeysById(revokedKey) if len(keys) != 1 { t.Errorf("Expected KeysById to find key %X, but got %d matches", revokedKey, len(keys)) } keys = kring.KeysByIdUsage(revokedKey, 0) if len(keys) != 0 { t.Errorf("Expected KeysByIdUsage to filter out revoked key %X, but got %d matches", revokedKey, len(keys)) } }
explode_data.jsonl/14739
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 485 }
[ 2830, 3393, 3136, 792, 36184, 2276, 1155, 353, 8840, 836, 8, 341, 197, 9855, 287, 11, 716, 1669, 4457, 1592, 43466, 21987, 3830, 20335, 5801, 85, 10823, 3136, 792, 20335, 4390, 197, 322, 64346, 3136, 792, 20335, 5610, 1493, 6894, 510, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestFilterAPIOutput(t *testing.T) { state0 := SetupSim("LLLLLAAF", map[string]string{}, 25, 1, 1, t) RunCmd("1") RunCmd("w") RunCmd("s") apiRegex := "EOM.*5.*minute +1" // It has two spaces. SetOutputFilter(apiRegex) WaitBlocks(state0, 5) // The message-filter call we did above should have caused an election and SO, Node01 should not be a leader anymore. if GetFnodes()[1].State.Leader { t.Fatalf("Node01 should not be leader!") } CheckAuthoritySet(t) // Check Node01 Network Output logs to make sure there are no Dropped messaged besides the ones for our Regex out := SystemCall(`grep "Drop, matched filter Regex" fnode01_networkoutputs.txt | grep -Ev "` + apiRegex + `" | wc -l`) if strings.TrimSuffix(strings.Trim(string(out), " "), "\n") != string("0") { t.Fatalf("Filter missed let a message pass 1.") } // Checks Node01 Network Outputs to make sure there are no Sent broadcast including our Regex out2 := SystemCall(`grep "Send broadcast" fnode01_networkoutputs.txt | grep "` + apiRegex + `" | grep -v "EmbeddedMsg" | wc -l`) if strings.TrimSuffix(strings.Trim(string(out2), " "), "\n") != string("0") { t.Fatalf("Filter missed let a message pass 2.") } ShutDownEverything(t) }
explode_data.jsonl/390
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 433 }
[ 2830, 3393, 5632, 7082, 5097, 1155, 353, 8840, 836, 8, 1476, 24291, 15, 1669, 18626, 14027, 445, 49356, 43, 71038, 497, 2415, 14032, 30953, 22655, 220, 17, 20, 11, 220, 16, 11, 220, 16, 11, 259, 692, 85952, 15613, 445, 16, 1138, 859...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRawPrepare(t *testing.T) { var ( result sql.Result err error pre RawPreparer ) switch { case IsMysql || IsSqlite: pre, err = dORM.Raw("INSERT INTO tag (name) VALUES (?)").Prepare() assert.Nil(t, err) if pre != nil { result, err = pre.Exec("name1") assert.Nil(t, err) tid, err := result.LastInsertId() assert.Nil(t, err) assert.True(t, tid > 0) result, err = pre.Exec("name2") assert.Nil(t, err) id, err := result.LastInsertId() assert.Nil(t, err) assert.Equal(t, id, tid+1) result, err = pre.Exec("name3") assert.Nil(t, err) id, err = result.LastInsertId() assert.Nil(t, err) assert.Equal(t, id, tid+2) err = pre.Close() assert.Nil(t, err) res, err := dORM.Raw("DELETE FROM tag WHERE name IN (?, ?, ?)", []string{"name1", "name2", "name3"}).Exec() assert.Nil(t, err) num, err := res.RowsAffected() assert.Nil(t, err) assert.Equal(t, num, int64(3)) } case IsPostgres: pre, err = dORM.Raw(`INSERT INTO "tag" ("name") VALUES (?) RETURNING "id"`).Prepare() assert.Nil(t, err) if pre != nil { _, err = pre.Exec("name1") assert.Nil(t, err) _, err = pre.Exec("name2") assert.Nil(t, err) _, err = pre.Exec("name3") assert.Nil(t, err) err = pre.Close() assert.Nil(t, err) res, err := dORM.Raw(`DELETE FROM "tag" WHERE "name" IN (?, ?, ?)`, []string{"name1", "name2", "name3"}).Exec() assert.Nil(t, err) if err == nil { num, err := res.RowsAffected() assert.Nil(t, err) assert.Equal(t, num, int64(3)) } } } }
explode_data.jsonl/18151
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 762 }
[ 2830, 3393, 20015, 50590, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 9559, 5704, 18456, 198, 197, 9859, 262, 1465, 198, 197, 40346, 262, 23022, 4703, 61119, 198, 197, 340, 8961, 341, 2722, 2160, 44, 14869, 1369, 2160, 8269, 632, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestResource_Retrieve(t *testing.T) { type fields struct { session session.ServiceFormatter } type args struct { haltOnError bool requesters []Subrequester } tests := []struct { name string fields fields args args want Value wantErr bool }{ { name: "success", fields: fields{ session: &mockSessionFormatter{ url: "https://test.salesforce.com", client: mockHTTPClient(func(req *http.Request) *http.Response { if req.URL.String() != "https://test.salesforce.com/composite/batch" { return &http.Response{ StatusCode: 500, Status: "Invalid URL", Body: ioutil.NopCloser(strings.NewReader(req.URL.String())), Header: make(http.Header), } } resp := `{ "hasErrors" : false, "results" : [{ "statusCode" : 204, "result" : null },{ "statusCode" : 200, "result": { "attributes" : { "type" : "Account", "url" : "/services/data/v34.0/sobjects/Account/001D000000K0fXOIAZ" }, "Name" : "NewName", "BillingPostalCode" : "94105", "Id" : "001D000000K0fXOIAZ" } }] }` return &http.Response{ StatusCode: http.StatusOK, Status: "Good", Body: ioutil.NopCloser(strings.NewReader(resp)), Header: make(http.Header), } }), }, }, args: args{ haltOnError: false, requesters: []Subrequester{ &mockSubrequester{ url: "www.something.com", method: http.MethodGet, }, }, }, want: Value{ HasErrors: false, Results: []Subvalue{ { StatusCode: 204, }, { Result: map[string]interface{}{ "attributes": map[string]interface{}{ "type": "Account", "url": "/services/data/v34.0/sobjects/Account/001D000000K0fXOIAZ", }, "Name": "NewName", "BillingPostalCode": "94105", "Id": "001D000000K0fXOIAZ", }, StatusCode: 200, }, }, }, wantErr: false, }, { name: "Errors", fields: fields{ session: &mockSessionFormatter{ url: "https://test.salesforce.com", client: mockHTTPClient(func(req *http.Request) *http.Response { if req.URL.String() != "https://test.salesforce.com/composite/batch" { return &http.Response{ StatusCode: 500, Status: "Invalid URL", Body: ioutil.NopCloser(strings.NewReader(req.URL.String())), Header: make(http.Header), } } resp := `[ { "fields" : [ "Id" ], "message" : "Account ID: id value of incorrect type: 001900K0001pPuOAAU", "errorCode" : "MALFORMED_ID" } ]` return &http.Response{ StatusCode: http.StatusBadRequest, Status: "Bad", Body: ioutil.NopCloser(strings.NewReader(resp)), Header: make(http.Header), } }), }, }, args: args{ haltOnError: false, requesters: []Subrequester{ &mockSubrequester{ url: "www.something.com", method: http.MethodGet, }, }, }, want: Value{}, wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { r := &Resource{ session: tt.fields.session, } got, err := r.Retrieve(tt.args.haltOnError, tt.args.requesters) if (err != nil) != tt.wantErr { t.Errorf("Resource.Retrieve() error = %v, wantErr %v", err, tt.wantErr) return } if !reflect.DeepEqual(got, tt.want) { t.Errorf("Resource.Retrieve() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/19628
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1984 }
[ 2830, 3393, 4783, 2568, 295, 45004, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 25054, 3797, 13860, 14183, 198, 197, 532, 13158, 2827, 2036, 341, 197, 9598, 3145, 74945, 1807, 198, 197, 23555, 388, 220, 3056, 3136, 2035, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCertificateResolver_Configuration(t *testing.T) { t.Run("should return configuration", func(t *testing.T) { // given authenticator := &authenticationMocks.Authenticator{} authenticator.On("Authenticate", context.Background()).Return(clientId, nil) tokenService := &tokensMocks.Service{} tokenService.On("GetToken", mock.Anything, subject.CommonName).Return(token, nil) revokedCertsRepository := &revocationMocks.RevokedCertificatesRepository{} certificateResolver := NewCertificateResolver(authenticator, tokenService, nil, subject.CSRSubjectConsts, directorURL, certSecuredConnectorURL, revokedCertsRepository) // when configurationResult, err := certificateResolver.Configuration(context.Background()) // then require.NoError(t, err) assert.Equal(t, token, configurationResult.Token.Token) assert.Equal(t, &directorURL, configurationResult.ManagementPlaneInfo.DirectorURL) assert.Equal(t, &certSecuredConnectorURL, configurationResult.ManagementPlaneInfo.CertificateSecuredConnectorURL) assert.Equal(t, expectedSubject(subject.CSRSubjectConsts, subject.CommonName), configurationResult.CertificateSigningRequestInfo.Subject) assert.Equal(t, "rsa2048", configurationResult.CertificateSigningRequestInfo.KeyAlgorithm) mock.AssertExpectationsForObjects(t, tokenService, authenticator) }) t.Run("should return error when failed to generate token", func(t *testing.T) { // given authenticator := &authenticationMocks.Authenticator{} authenticator.On("Authenticate", context.Background()).Return(clientId, nil) tokenService := &tokensMocks.Service{} tokenService.On("GetToken", mock.Anything, subject.CommonName).Return("", apperrors.Internal("error")) revokedCertsRepository := &revocationMocks.RevokedCertificatesRepository{} certificateResolver := NewCertificateResolver(authenticator, tokenService, nil, subject.CSRSubjectConsts, directorURL, certSecuredConnectorURL, revokedCertsRepository) // when configurationResult, err := certificateResolver.Configuration(context.Background()) // then require.Error(t, err) require.Nil(t, configurationResult) mock.AssertExpectationsForObjects(t, tokenService, authenticator) }) t.Run("should return error when failed to authenticate", func(t *testing.T) { // given authenticator := &authenticationMocks.Authenticator{} authenticator.On("Authenticate", context.Background()).Return("", apperrors.Forbidden("Error")) tokenService := &tokensMocks.Service{} revokedCertsRepository := &revocationMocks.RevokedCertificatesRepository{} certificateResolver := NewCertificateResolver(authenticator, tokenService, nil, subject.CSRSubjectConsts, directorURL, certSecuredConnectorURL, revokedCertsRepository) // when configurationResult, err := certificateResolver.Configuration(context.Background()) // then require.Error(t, err) require.Nil(t, configurationResult) mock.AssertExpectationsForObjects(t, tokenService, authenticator) }) }
explode_data.jsonl/39523
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 909 }
[ 2830, 3393, 33202, 18190, 35412, 2017, 1155, 353, 8840, 836, 8, 1476, 3244, 16708, 445, 5445, 470, 6546, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 197, 322, 2661, 198, 197, 78011, 61393, 1669, 609, 45890, 72577, 25233, 61393, 16094,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCatalog_RegisterService_SkipNodeUpdate(t *testing.T) { t.Parallel() dir1, s1 := testServer(t) defer os.RemoveAll(dir1) defer s1.Shutdown() codec := rpcClient(t, s1) defer codec.Close() // Register a node arg := structs.RegisterRequest{ Datacenter: "dc1", Node: "foo", Address: "127.0.0.1", } var out struct{} err := msgpackrpc.CallWithCodec(codec, "Catalog.Register", &arg, &out) if err != nil { t.Fatal(err) } // Update it with a blank address, should fail. arg.Address = "" arg.Service = &structs.NodeService{ Service: "db", Port: 8000, } err = msgpackrpc.CallWithCodec(codec, "Catalog.Register", &arg, &out) if err == nil || err.Error() != "Must provide address if SkipNodeUpdate is not set" { t.Fatalf("got error %v want 'Must provide address...'", err) } // Set SkipNodeUpdate, should succeed arg.SkipNodeUpdate = true err = msgpackrpc.CallWithCodec(codec, "Catalog.Register", &arg, &out) if err != nil { t.Fatal(err) } }
explode_data.jsonl/49207
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 399 }
[ 2830, 3393, 41606, 73124, 1860, 1098, 13389, 1955, 4289, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 48532, 16, 11, 274, 16, 1669, 1273, 5475, 1155, 340, 16867, 2643, 84427, 14161, 16, 340, 16867, 274, 16, 10849, 18452, 741, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestNewService(t *testing.T) { g := NewGomegaWithT(t) scheme := runtime.NewScheme() _ = clusterv1.AddToScheme(scheme) _ = infrav1.AddToScheme(scheme) _ = infrav1exp.AddToScheme(scheme) cluster := &clusterv1.Cluster{ ObjectMeta: metav1.ObjectMeta{Name: "test-cluster"}, } client := fake.NewClientBuilder().WithScheme(scheme).WithObjects(cluster).Build() s, err := scope.NewClusterScope(context.Background(), scope.ClusterScopeParams{ AzureClients: scope.AzureClients{ Authorizer: autorest.NullAuthorizer{}, }, Client: client, Cluster: cluster, AzureCluster: &infrav1.AzureCluster{ Spec: infrav1.AzureClusterSpec{ Location: "test-location", ResourceGroup: "my-rg", SubscriptionID: "123", NetworkSpec: infrav1.NetworkSpec{ Vnet: infrav1.VnetSpec{Name: "my-vnet", ResourceGroup: "my-rg"}, }, }, }, }) g.Expect(err).ToNot(HaveOccurred()) mps, err := scope.NewMachinePoolScope(scope.MachinePoolScopeParams{ Client: client, Logger: s.Logger, MachinePool: new(clusterv1exp.MachinePool), AzureMachinePool: new(infrav1exp.AzureMachinePool), ClusterScope: s, }) g.Expect(err).ToNot(HaveOccurred()) actual := NewService(mps, resourceskus.NewStaticCache(nil, "")) g.Expect(actual).ToNot(BeNil()) }
explode_data.jsonl/78240
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 552 }
[ 2830, 3393, 3564, 1860, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1532, 38, 32696, 2354, 51, 1155, 340, 1903, 8058, 1669, 15592, 7121, 28906, 741, 197, 62, 284, 1185, 590, 648, 16, 1904, 1249, 28906, 1141, 8058, 340, 197, 62, 284, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseWithFormattingDisabled(t *testing.T) { DisableFormatting() defer EnableFormatting() output, err := Parse("plain <red>red <bold>bold red</bold></red> plain <green>green</green> plain") require.Nil(t, err) assert.Equal(t, "plain red bold red plain green plain", output) }
explode_data.jsonl/27688
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 98 }
[ 2830, 3393, 14463, 2354, 82135, 25907, 1155, 353, 8840, 836, 8, 341, 197, 25479, 82135, 741, 16867, 18567, 82135, 741, 21170, 11, 1848, 1669, 14775, 445, 20772, 366, 1151, 29, 1151, 366, 14824, 29, 14824, 2518, 522, 14824, 1472, 1151, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClient(t *testing.T) { server := &conn.PlainServer{ Host: "127.0.0.1", Port: 8899, } conn.InitServerSettings(server, 1000, time.Second*5) cache := list.New() // TODO bug fix error: connection pool is full for i := 0; i < 10000000; i++ { if i > 0 && i%500 == 0 { gox.WalkList(cache, func(item interface{}) bool { conn.ReturnConnection(server, item.(*net.Conn), "01092391231231231023sdkasdasdaksdkasjdajsdjasdjalsjdlasjdljalsd01092391231231231023sdkasdasdaksdkasjdajsdjasdjalsjdlasjdljalsd01092391231231231023sdkasdasdaksdkasjdajsdjasdjalsjdlasjdljalsd01092391231231231023sdkasdasdaksdkasjdajsdjasdjalsjdlasjdljalsd01092391231231231023sdkasdasdaksdkasjdajsdjasdjalsjdlasjdljalsd01092391231231231023sdkasdasdaksdkasjdajsdjasdjalsjdlasjdljalsd01092391231231231023sdkasdasdaksdkasjdajsdjasdjalsjdlasjdljalsd01092391231231231023sdkasdasdaksdkasjdajsdjasdjalsjdlasjdljalsd01092391231231231023sdkasdasdaksdkasjdajsdjasdjalsjdlasjdljalsd01092391231231231023sdkasdasdaksdkasjdajsdjasdjalsjdlasjdljalsd", false) return false }) cache = list.New() time.Sleep(time.Second * 7) } c, _, err := conn.GetConnection(server) if err != nil { logger.Error("error: ", err) } cache.PushBack(c) } }
explode_data.jsonl/60006
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 577 }
[ 2830, 3393, 2959, 1155, 353, 8840, 836, 8, 341, 41057, 1669, 609, 5148, 21368, 466, 5475, 515, 197, 197, 9296, 25, 330, 16, 17, 22, 13, 15, 13, 15, 13, 16, 756, 197, 98459, 25, 220, 23, 23, 24, 24, 345, 197, 532, 32917, 26849, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContextFormFile(t *testing.T) { e := New() buf := new(bytes.Buffer) mr := multipart.NewWriter(buf) w, err := mr.CreateFormFile("file", "test") if assert.NoError(t, err) { w.Write([]byte("test")) } mr.Close() req := test.NewRequest(POST, "/", buf) req.Header().Set(HeaderContentType, mr.FormDataContentType()) rec := test.NewResponseRecorder() c := e.NewContext(req, rec) f, err := c.FormFile("file") if assert.NoError(t, err) { assert.Equal(t, "test", f.Filename) } }
explode_data.jsonl/35875
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 200 }
[ 2830, 3393, 1972, 1838, 1703, 1155, 353, 8840, 836, 8, 341, 7727, 1669, 1532, 741, 26398, 1669, 501, 23158, 22622, 340, 2109, 81, 1669, 68058, 7121, 6492, 10731, 340, 6692, 11, 1848, 1669, 17317, 7251, 1838, 1703, 445, 1192, 497, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestExample(t *testing.T) { bd := ` { "servers": [ {"id": "s1", "name": "app", "protocols": ["https", "ssh"], "ports": ["p1", "p2", "p3"]}, {"id": "s2", "name": "db", "protocols": ["mysql"], "ports": ["p3"]}, {"id": "s3", "name": "cache", "protocols": ["memcache", "http"], "ports": ["p3"]}, {"id": "s4", "name": "dev", "protocols": ["http"], "ports": ["p1", "p2"]} ], "networks": [ {"id": "n1", "public": false}, {"id": "n2", "public": false}, {"id": "n3", "public": true} ], "ports": [ {"id": "p1", "networks": ["n1"]}, {"id": "p2", "networks": ["n3"]}, {"id": "p3", "networks": ["n2"]} ] } ` vd := `package opa.example import data.servers import data.networks import data.ports public_servers[server] { server = servers[_]; server.ports[_] = ports[i].id; ports[i].networks[_] = networks[j].id; networks[j].public = true } violations[server] { server = servers[_]; server.protocols[_] = "http"; public_servers[server] }` var doc map[string]interface{} if err := util.UnmarshalJSON([]byte(bd), &doc); err != nil { panic(err) } compiler := compileModules([]string{vd}) store := inmem.NewFromObject(doc) assertTopDownWithPath(t, compiler, store, "public servers", []string{"opa", "example", "public_servers"}, "{}", ` [ {"id": "s1", "name": "app", "protocols": ["https", "ssh"], "ports": ["p1", "p2", "p3"]}, {"id": "s4", "name": "dev", "protocols": ["http"], "ports": ["p1", "p2"]} ] `) assertTopDownWithPath(t, compiler, store, "violations", []string{"opa", "example", "violations"}, "{}", ` [ {"id": "s4", "name": "dev", "protocols": ["http"], "ports": ["p1", "p2"]} ] `) assertTopDownWithPath(t, compiler, store, "both", []string{"opa", "example"}, "{}", ` { "public_servers": [ {"id": "s1", "name": "app", "protocols": ["https", "ssh"], "ports": ["p1", "p2", "p3"]}, {"id": "s4", "name": "dev", "protocols": ["http"], "ports": ["p1", "p2"]} ], "violations": [ {"id": "s4", "name": "dev", "protocols": ["http"], "ports": ["p1", "p2"]} ] } `) }
explode_data.jsonl/25227
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1134 }
[ 2830, 3393, 13314, 1155, 353, 8840, 836, 8, 1476, 2233, 67, 1669, 22074, 286, 341, 310, 330, 67696, 788, 2278, 394, 5212, 307, 788, 330, 82, 16, 497, 330, 606, 788, 330, 676, 497, 330, 85412, 788, 4383, 2428, 497, 330, 25537, 7914, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMultiDefineBool(t *testing.T) { for _, ctName := range []string{ "charB1", "charB1Null", "charC1", "charC1Null", } { gen := _T_boolGen[ctName+"_true"] if gen == nil { continue } t.Run(ctName, func(t *testing.T) { t.Parallel() testMultiDefine( gen(), _T_colType[ctName], t, ) }) } }
explode_data.jsonl/48072
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 174 }
[ 2830, 3393, 20358, 35338, 11233, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 20251, 675, 1669, 2088, 3056, 917, 515, 197, 197, 1, 1762, 33, 16, 497, 330, 1762, 33, 16, 3280, 756, 197, 197, 1, 1762, 34, 16, 497, 330, 1762, 34, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMemRespDisclosurePol(t *testing.T) { pol := func(remotePeer *NetworkMember) (Sieve, EnvelopeFilter) { return func(_ *protoext.SignedGossipMessage) bool { return remotePeer.Endpoint != "localhost:7879" }, func(m *protoext.SignedGossipMessage) *proto.Envelope { return m.Envelope } } wasMembershipResponseReceived := func(msg *protoext.SignedGossipMessage) { assert.Nil(t, msg.GetMemRes()) } d1 := createDiscoveryInstanceThatGossips(7878, "d1", []string{}, true, pol, defaultTestConfig) defer d1.Stop() d2 := createDiscoveryInstanceThatGossipsWithInterceptors(7879, "d2", []string{"localhost:7878"}, true, noopPolicy, wasMembershipResponseReceived, defaultTestConfig) defer d2.Stop() d3 := createDiscoveryInstanceThatGossips(7880, "d3", []string{"localhost:7878"}, true, pol, defaultTestConfig) defer d3.Stop() // all peers know each other assertMembership(t, []*gossipInstance{d1, d2, d3}, 2) // d2 received some messages, but we asserted that none of them are membership responses. assert.NotZero(t, d2.receivedMsgCount()) assert.NotZero(t, d2.sentMsgCount()) }
explode_data.jsonl/62275
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 399 }
[ 2830, 3393, 18816, 36555, 91065, 14658, 1155, 353, 8840, 836, 8, 341, 3223, 337, 1669, 2915, 61381, 30888, 353, 12320, 9366, 8, 320, 50, 19155, 11, 2925, 18853, 5632, 8, 341, 197, 853, 2915, 2490, 353, 15110, 427, 808, 1542, 38, 41473...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGenerateConfigForDCAAPICandles(t *testing.T) { cfg := Config{ Nickname: "ExampleStrategyDCAAPICandles", Goal: "To demonstrate DCA strategy using API candles", StrategySettings: StrategySettings{ Name: dca, }, CurrencySettings: []CurrencySettings{ { ExchangeName: testExchange, Asset: asset.Spot.String(), Base: currency.BTC.String(), Quote: currency.USDT.String(), InitialQuoteFunds: initialQuoteFunds2, BuySide: minMax, SellSide: minMax, Leverage: Leverage{ CanUseLeverage: false, }, MakerFee: makerFee, TakerFee: takerFee, }, }, DataSettings: DataSettings{ Interval: kline.OneDay.Duration(), DataType: common.CandleStr, APIData: &APIData{ StartDate: startDate, EndDate: endDate, InclusiveEndDate: false, }, }, PortfolioSettings: PortfolioSettings{ BuySide: minMax, SellSide: minMax, Leverage: Leverage{ CanUseLeverage: false, }, }, StatisticSettings: StatisticSettings{ RiskFreeRate: decimal.NewFromFloat(0.03), }, } if saveConfig { result, err := json.MarshalIndent(cfg, "", " ") if err != nil { t.Fatal(err) } p, err := os.Getwd() if err != nil { t.Fatal(err) } err = ioutil.WriteFile(filepath.Join(p, "examples", "dca-api-candles.strat"), result, 0770) if err != nil { t.Error(err) } } }
explode_data.jsonl/58405
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 688 }
[ 2830, 3393, 31115, 2648, 2461, 35, 5049, 2537, 1317, 20125, 1155, 353, 8840, 836, 8, 341, 50286, 1669, 5532, 515, 197, 18317, 41052, 25, 330, 13314, 19816, 35, 5049, 2537, 1317, 20125, 756, 197, 9600, 78, 278, 25, 257, 330, 1249, 1986...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestChargesBad(t *testing.T) { _, err := client.Charge(&Charge{ Amount: "123000000", Description: "a test invoice", InternalID: "testb", }) if err == nil { t.Errorf(".Charge() should have returned an error") } if err.Error() != "The maximum Charge amount supported is 45,000 satoshis." { t.Errorf(".Charge() returned the wrong error") } _, err = client.Charge(&Charge{ Amount: "-120", Description: "a test invoice", InternalID: "testb", }) if err == nil { t.Errorf(".Charge() should have returned an error") } }
explode_data.jsonl/42387
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 209 }
[ 2830, 3393, 1143, 58240, 17082, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 2943, 6353, 2744, 2099, 55363, 515, 197, 197, 10093, 25, 414, 330, 16, 17, 18, 15, 15, 15, 15, 15, 15, 756, 197, 47414, 25, 330, 64, 1273, 24615,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestTooManyChannelsOnCreateSub(t *testing.T) { sOpts := GetDefaultOptions() sOpts.ID = clusterName sOpts.MaxChannels = 1 s := runServerWithOpts(t, sOpts, nil) defer s.Shutdown() sc := NewDefaultConnection(t) defer sc.Close() // That should create channel foo if _, err := sc.Subscribe("foo", func(_ *stan.Msg) {}); err != nil { t.Fatalf("Unexpected error on subscribe: %v", err) } // This should fail because we reached the limit if _, err := sc.Subscribe("bar", func(_ *stan.Msg) {}); err == nil { t.Fatalf("Expected error due to too many channels, got none") } }
explode_data.jsonl/23089
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 211 }
[ 2830, 3393, 31246, 8441, 35925, 1925, 4021, 3136, 1155, 353, 8840, 836, 8, 341, 1903, 43451, 1669, 2126, 3675, 3798, 741, 1903, 43451, 9910, 284, 10652, 675, 198, 1903, 43451, 14535, 35925, 284, 220, 16, 198, 1903, 1669, 1598, 5475, 235...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServiceManagerOverridesStepSuccess(t *testing.T) { ts := SMOverrideTestSuite{} tests := map[string]struct { requestParams internal.ProvisioningParameters overrideParams servicemanager.Config expCredentialsValues []*gqlschema.ConfigEntryInput }{ "always apply override for Service Manager credentials": { requestParams: ts.SMRequestParameters("req-url", "req-user", "req-pass"), overrideParams: ts.SMOverrideConfig(servicemanager.SMOverrideModeAlways, "over-url", "over-user", "over-pass"), expCredentialsValues: []*gqlschema.ConfigEntryInput{ {Key: "config.sm.url", Value: "over-url"}, {Key: "sm.user", Value: "over-user"}, {Key: "sm.password", Value: "over-pass", Secret: ptr.Bool(true)}, }, }, "never apply override for Service Manager credentials": { requestParams: ts.SMRequestParameters("req-url", "req-user", "req-pass"), overrideParams: ts.SMOverrideConfig(servicemanager.SMOverrideModeNever, "over-url", "over-user", "over-pass"), expCredentialsValues: []*gqlschema.ConfigEntryInput{ {Key: "config.sm.url", Value: "req-url"}, {Key: "sm.user", Value: "req-user"}, {Key: "sm.password", Value: "req-pass", Secret: ptr.Bool(true)}, }, }, "apply override for Service Manager credentials because they are not present in request": { requestParams: internal.ProvisioningParameters{}, overrideParams: ts.SMOverrideConfig(servicemanager.SMOverrideModeWhenNotSentInRequest, "over-url", "over-user", "over-pass"), expCredentialsValues: []*gqlschema.ConfigEntryInput{ {Key: "config.sm.url", Value: "over-url"}, {Key: "sm.user", Value: "over-user"}, {Key: "sm.password", Value: "over-pass", Secret: ptr.Bool(true)}, }, }, "do not apply override for Service Manager credentials because they are present in request": { requestParams: ts.SMRequestParameters("req-url", "req-user", "req-pass"), overrideParams: ts.SMOverrideConfig(servicemanager.SMOverrideModeWhenNotSentInRequest, "over-url", "over-user", "over-pass"), expCredentialsValues: []*gqlschema.ConfigEntryInput{ {Key: "config.sm.url", Value: "req-url"}, {Key: "sm.user", Value: "req-user"}, {Key: "sm.password", Value: "req-pass", Secret: ptr.Bool(true)}, }, }, } for tN, tC := range tests { t.Run(tN, func(t *testing.T) { // given inputCreatorMock := &automock.ProvisionerInputCreator{} inputCreatorMock.On("AppendOverrides", ServiceManagerComponentName, tC.expCredentialsValues). Return(nil).Once() inputCreatorMock.On("EnableOptionalComponent", mock.Anything). Return(nil) inputCreatorMock.On("DisableOptionalComponent", mock.Anything). Return(nil) factory := servicemanager.NewClientFactory(tC.overrideParams) operation := internal.UpgradeKymaOperation{ Operation: internal.Operation{ ProvisioningParameters: tC.requestParams, }, InputCreator: inputCreatorMock, SMClientFactory: factory, } memoryStorage := storage.NewMemoryStorage() smStep := NewServiceManagerOverridesStep(memoryStorage.Operations()) // when gotOperation, retryTime, err := smStep.Run(operation, NewLogDummy()) // then require.NoError(t, err) assert.Zero(t, retryTime) assert.Equal(t, operation, gotOperation) inputCreatorMock.AssertExpectations(t) }) } }
explode_data.jsonl/12552
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1251 }
[ 2830, 3393, 1860, 2043, 80010, 8304, 7188, 1155, 353, 8840, 836, 8, 341, 57441, 1669, 13716, 2177, 2271, 28000, 31483, 78216, 1669, 2415, 14032, 60, 1235, 341, 197, 23555, 4870, 286, 5306, 7763, 13013, 287, 9706, 198, 197, 50284, 4870, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMapProxy_AddEntryListenerToKeyWithPredicate(t *testing.T) { var wg *sync.WaitGroup = new(sync.WaitGroup) entryAdded := &AddEntry{wg: wg} registrationId, err := mp.AddEntryListenerToKeyWithPredicate(entryAdded, Equal("this", "value1"), "key1", true) AssertEqual(t, err, nil, nil) wg.Add(1) mp.Put("key1", "value1") timeout := WaitTimeout(wg, Timeout) AssertEqualf(t, nil, false, timeout, "AddEntryListenerToKeyWithPredicate failed") wg.Add(1) mp.Put("key1", "value2") timeout = WaitTimeout(wg, Timeout/20) AssertEqualf(t, nil, true, timeout, "AddEntryListenerToKeyWithPredicate failed") mp.RemoveEntryListener(registrationId) mp.Clear() }
explode_data.jsonl/57035
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 249 }
[ 2830, 3393, 2227, 16219, 21346, 5874, 2743, 1249, 1592, 2354, 36329, 1155, 353, 8840, 836, 8, 341, 2405, 63581, 353, 12996, 28384, 2808, 284, 501, 97233, 28384, 2808, 340, 48344, 19337, 1669, 609, 2212, 5874, 90, 45540, 25, 63581, 532, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAdmitSuccess(t *testing.T) { // create the annotated namespace and add it to the fake client namespace := admissiontesting.CreateNamespaceForTest() serviceAccount := admissiontesting.CreateSAForTest() serviceAccount.Namespace = namespace.Name tc := clientsetfake.NewSimpleClientset(namespace, serviceAccount) // used for cases where things are preallocated defaultGroup := int64(2) // create scc that requires allocation retrieval saSCC := saSCC() // create scc that has specific requirements that shouldn't match but is permissioned to // service accounts to test that even though this has matching priorities (0) and a // lower point value score (which will cause it to be sorted in front of scc-sa) it should not // validate the requests so we should try scc-sa. saExactSCC := saExactSCC() lister := createSCCLister(t, []*securityapi.SecurityContextConstraints{ saExactSCC, saSCC, }) testAuthorizer := &sccTestAuthorizer{t: t} // create the admission plugin p := newTestAdmission(lister, tc, testAuthorizer) // specifies a UID in the range of the preallocated UID annotation specifyUIDInRange := goodPod() var goodUID int64 = 3 specifyUIDInRange.Spec.Containers[0].SecurityContext.RunAsUser = &goodUID // specifies an mcs label that matches the preallocated mcs annotation specifyLabels := goodPod() specifyLabels.Spec.Containers[0].SecurityContext.SELinuxOptions = &kapi.SELinuxOptions{ Level: "s0:c1,c0", } // specifies an FSGroup in the range of preallocated sup group annotation specifyFSGroupInRange := goodPod() // group in the range of a preallocated fs group which, by default is a single digit range // based on the first value of the ns annotation. goodFSGroup := int64(2) specifyFSGroupInRange.Spec.SecurityContext.FSGroup = &goodFSGroup // specifies a sup group in the range of preallocated sup group annotation specifySupGroup := goodPod() // group is not the default but still in the range specifySupGroup.Spec.SecurityContext.SupplementalGroups = []int64{3} specifyPodLevelSELinux := goodPod() specifyPodLevelSELinux.Spec.SecurityContext.SELinuxOptions = &kapi.SELinuxOptions{ Level: "s0:c1,c0", } seLinuxLevelFromNamespace := namespace.Annotations[allocator.MCSAnnotation] testCases := map[string]struct { pod *kapi.Pod expectedPodSC *kapi.PodSecurityContext expectedContainerSC *kapi.SecurityContext }{ "specifyUIDInRange": { pod: specifyUIDInRange, expectedPodSC: podSC(seLinuxLevelFromNamespace, defaultGroup, defaultGroup), expectedContainerSC: containerSC(nil, goodUID), }, "specifyLabels": { pod: specifyLabels, expectedPodSC: podSC(seLinuxLevelFromNamespace, defaultGroup, defaultGroup), expectedContainerSC: containerSC(&seLinuxLevelFromNamespace, 1), }, "specifyFSGroup": { pod: specifyFSGroupInRange, expectedPodSC: podSC(seLinuxLevelFromNamespace, goodFSGroup, defaultGroup), expectedContainerSC: containerSC(nil, 1), }, "specifySupGroup": { pod: specifySupGroup, expectedPodSC: podSC(seLinuxLevelFromNamespace, defaultGroup, 3), expectedContainerSC: containerSC(nil, 1), }, "specifyPodLevelSELinuxLevel": { pod: specifyPodLevelSELinux, expectedPodSC: podSC(seLinuxLevelFromNamespace, defaultGroup, defaultGroup), expectedContainerSC: containerSC(nil, 1), }, } for i := 0; i < 2; i++ { for k, v := range testCases { v.pod.Spec.Containers, v.pod.Spec.InitContainers = v.pod.Spec.InitContainers, v.pod.Spec.Containers hasErrors := testSCCAdmission(v.pod, p, saSCC.Name, k, t) if hasErrors { continue } containers := v.pod.Spec.Containers if i == 0 { containers = v.pod.Spec.InitContainers } if !reflect.DeepEqual(v.expectedPodSC, v.pod.Spec.SecurityContext) { t.Errorf("%s unexpected pod SecurityContext diff:\n%s", k, diff.ObjectGoPrintSideBySide(v.expectedPodSC, v.pod.Spec.SecurityContext)) } if !reflect.DeepEqual(v.expectedContainerSC, containers[0].SecurityContext) { t.Errorf("%s unexpected container SecurityContext diff:\n%s", k, diff.ObjectGoPrintSideBySide(v.expectedContainerSC, containers[0].SecurityContext)) } } } }
explode_data.jsonl/25926
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1540 }
[ 2830, 3393, 2589, 1763, 7188, 1155, 353, 8840, 836, 8, 341, 197, 322, 1855, 279, 62851, 4473, 323, 912, 432, 311, 279, 12418, 2943, 198, 56623, 1669, 25293, 8840, 7251, 22699, 2461, 2271, 2822, 52934, 7365, 1669, 25293, 8840, 7251, 7778...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestUsesInfo(t *testing.T) { var tests = []struct { src string obj string want string }{ {`package p0; func _() { _ = x }; const x = 42`, `x`, `const p0.x untyped int`}, {`package p1; func _() { _ = x }; const x int = 42`, `x`, `const p1.x int`}, {`package p2; func _() { _ = x }; var x int`, `x`, `var p2.x int`}, {`package p3; func _() { type _ x }; type x int`, `x`, `type p3.x int`}, {`package p4; func _() { _ = f }; func f()`, `f`, `func p4.f()`}, } for _, test := range tests { info := Info{ Uses: make(map[*ast.Ident]Object), } name := mustTypecheck(t, "UsesInfo", test.src, &info) // find object var use Object for id, obj := range info.Uses { if id.Name == test.obj { use = obj break } } if use == nil { t.Errorf("package %s: %s not found", name, test.obj) continue } if got := use.String(); got != test.want { t.Errorf("package %s: got %s; want %s", name, got, test.want) } } }
explode_data.jsonl/55539
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 434 }
[ 2830, 3393, 68965, 1731, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 3056, 1235, 341, 197, 41144, 220, 914, 198, 197, 22671, 220, 914, 198, 197, 50780, 914, 198, 197, 59403, 197, 197, 90, 63, 1722, 281, 15, 26, 2915, 716, 368, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestBlockNumber(t *testing.T) { c := GetClient(t) // cannot use big.NewInt to construct json request // TODO: analysis the ethereum's big.NewInt bn, err := c.GetBlockNumber(context.Background()) if err != nil { t.Fatalf("block number not found: %v", err) } t.Logf("latest block number: \n%s", bn) }
explode_data.jsonl/46904
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 116 }
[ 2830, 3393, 4713, 2833, 1155, 353, 8840, 836, 8, 341, 262, 272, 1669, 2126, 2959, 1155, 340, 197, 322, 4157, 990, 2409, 7121, 1072, 311, 9245, 2951, 1681, 198, 197, 322, 5343, 25, 6358, 279, 84522, 594, 2409, 7121, 1072, 198, 2233, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestParsePointIntsFloats(t *testing.T) { pts, err := models.ParsePoints([]byte(`cpu,host=serverA,region=us-east int=10i,float=11.0,float2=12.1 1000000000`)) if err != nil { t.Fatalf(`ParsePoints() failed. got %s`, err) } if exp := 1; len(pts) != exp { t.Errorf("ParsePoint() len mismatch: got %v, exp %v", len(pts), exp) } pt := pts[0] fields, err := pt.Fields() if err != nil { t.Fatal(err) } if _, ok := fields["int"].(int64); !ok { t.Errorf("ParsePoint() int field mismatch: got %T, exp %T", fields["int"], int64(10)) } if _, ok := fields["float"].(float64); !ok { t.Errorf("ParsePoint() float field mismatch: got %T, exp %T", fields["float64"], float64(11.0)) } if _, ok := fields["float2"].(float64); !ok { t.Errorf("ParsePoint() float field mismatch: got %T, exp %T", fields["float64"], float64(12.1)) } }
explode_data.jsonl/16955
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 359 }
[ 2830, 3393, 14463, 2609, 1072, 82, 5442, 82, 1155, 353, 8840, 836, 8, 341, 197, 12754, 11, 1848, 1669, 4119, 8937, 11411, 10556, 3782, 5809, 16475, 11, 3790, 28, 4030, 32, 11, 3943, 28, 355, 39507, 526, 28, 16, 15, 72, 45509, 28, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestAll(t *testing.T) { if !checkDevHost() { return } cases := []struct { name string in map[string]interface{} expectLoadConfigError error expectWriteError error }{ { name: "required", in: map[string]interface{}{ "id": "influxdb_1", "addr": "http://10.200.7.21:8086", "precision": "ns", "database": "db0", }, }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { si := &SinkInfluxDB{} err := si.LoadConfig(tc.in) assert.Equal(t, tc.expectLoadConfigError, err) pts := getTestPoints(t, 1000, 42) var newPts []sinkcommon.ISinkPoint for _, v := range pts { newPts = append(newPts, sinkcommon.ISinkPoint(v)) } err = si.Write(newPts) assert.Equal(t, tc.expectWriteError, err) }) } }
explode_data.jsonl/42671
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 420 }
[ 2830, 3393, 2403, 1155, 353, 8840, 836, 8, 341, 743, 753, 2028, 14592, 9296, 368, 341, 197, 853, 198, 197, 630, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 1698, 914, 198, 197, 17430, 503, 2415, 14032, 31344, 16094, 197, 24952, 587...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetEC2InstanceIDIIDError(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() ec2MetadataClient := mock_ec2.NewMockEC2MetadataClient(ctrl) agent := &ecsAgent{ec2MetadataClient: ec2MetadataClient} ec2MetadataClient.EXPECT().InstanceID().Return("", errors.New("error")) ec2MetadataClient.EXPECT().InstanceID().Return("", errors.New("error")) ec2MetadataClient.EXPECT().InstanceID().Return("", errors.New("error")) assert.Equal(t, "", agent.getEC2InstanceID()) }
explode_data.jsonl/41592
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 175 }
[ 2830, 3393, 1949, 7498, 17, 2523, 32272, 915, 1454, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 197, 757, 17, 14610, 2959, 1669, 7860, 36844, 17, 7121, 11571, 7498, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRender(t *testing.T) { mod, err := utilities.ReadModule(".") assert.NoError(t, err) mod.PackagePathFilters = append(mod.PackagePathFilters, "_test") b, err := Render(mod) assert.NoError(t, err) assert.Equal(t, "\n## Install\n\n```bash\ngo get -u github.com/heindl/goethe\n```\n", string(b)) }
explode_data.jsonl/17767
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 125 }
[ 2830, 3393, 6750, 1155, 353, 8840, 836, 8, 1476, 42228, 11, 1848, 1669, 29419, 6503, 3332, 5680, 1138, 6948, 35699, 1155, 11, 1848, 692, 42228, 49834, 1820, 28351, 284, 8737, 23351, 49834, 1820, 28351, 11, 9000, 1944, 5130, 2233, 11, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCheck(t *testing.T) { counter := &metricsfakes.Counter{} counter.WithReturns(counter) m := metrics.Observer{CheckedCounter: counter} entry := zapcore.Entry{Level: zapcore.DebugLevel} checkedEntry := &zapcore.CheckedEntry{} m.Check(entry, checkedEntry) assert.Equal(t, 1, counter.WithCallCount()) assert.Equal(t, []string{"level", "debug"}, counter.WithArgsForCall(0)) assert.Equal(t, 1, counter.AddCallCount()) assert.Equal(t, float64(1), counter.AddArgsForCall(0)) }
explode_data.jsonl/1359
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 178 }
[ 2830, 3393, 3973, 1155, 353, 8840, 836, 8, 341, 58261, 1669, 609, 43262, 69, 2050, 86291, 16094, 58261, 26124, 16446, 41320, 692, 2109, 1669, 16734, 85721, 90, 12666, 14099, 25, 5546, 532, 48344, 1669, 32978, 2153, 22330, 90, 4449, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultipleRDN(t *testing.T) { block, _ := pem.Decode([]byte(certMultipleRDN)) cert, err := ParseCertificate(block.Bytes) if err != nil { t.Fatalf("certificate with two elements in an RDN failed to parse: %v", err) } if want := "eportal.mss.edus.si"; cert.Subject.CommonName != want { t.Errorf("got common name of %q, but want %q", cert.Subject.CommonName, want) } if want := "1236484010010"; cert.Subject.SerialNumber != want { t.Errorf("got serial number of %q, but want %q", cert.Subject.SerialNumber, want) } }
explode_data.jsonl/68013
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 197 }
[ 2830, 3393, 32089, 49, 31264, 1155, 353, 8840, 836, 8, 341, 47996, 11, 716, 1669, 54184, 56372, 10556, 3782, 87793, 32089, 49, 31264, 1171, 1444, 529, 11, 1848, 1669, 14775, 33202, 18682, 36868, 340, 743, 1848, 961, 2092, 341, 197, 3244...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAnonymityPrincipal(t *testing.T) { msp1, err := setup("testdata/idemix/MSP1OU1", "MSP1OU1") assert.NoError(t, err) id1, err := getDefaultSigner(msp1) assert.NoError(t, err) principalBytes, err := proto.Marshal(&msp.MSPIdentityAnonymity{AnonymityType: msp.MSPIdentityAnonymity_ANONYMOUS}) assert.NoError(t, err) principal := &msp.MSPPrincipal{ PrincipalClassification: msp.MSPPrincipal_ANONYMITY, Principal: principalBytes} err = id1.SatisfiesPrincipal(principal) assert.NoError(t, err) }
explode_data.jsonl/46038
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 228 }
[ 2830, 3393, 2082, 7831, 487, 31771, 1155, 353, 8840, 836, 8, 341, 47691, 79, 16, 11, 1848, 1669, 6505, 445, 92425, 38146, 336, 941, 10270, 4592, 16, 11922, 16, 497, 330, 44, 4592, 16, 11922, 16, 1138, 6948, 35699, 1155, 11, 1848, 69...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestQueryCancellation(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) json.NewEncoder(w).Encode(&stmtResponse{ Error: stmtError{ ErrorName: "USER_CANCELLED", }, }) })) defer ts.Close() db, err := sql.Open("presto", ts.URL) if err != nil { t.Fatal(err) } defer db.Close() _, err = db.Query("SELECT 1") if err != ErrQueryCancelled { t.Fatal("unexpected error:", err) } }
explode_data.jsonl/62440
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 215 }
[ 2830, 3393, 2859, 82298, 1155, 353, 8840, 836, 8, 341, 57441, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 6692, 69794, 19886, 52989, 340, 197, 30847, 7121, 19921, 3622, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGenerateDSNSupportsOldPasswords(t *testing.T) { os.Args = []string{ "cmd", "-hostname=dbhost", "-username=dbuser", "-password=dbpwd", "-port=1234", "-old_passwords", } _, err := integration.New(integrationName, integrationVersion, integration.Args(&args)) fatalIfErr(err) assert.Equal(t, "dbuser:dbpwd@tcp(dbhost:1234)/?allowOldPasswords=true", generateDSN(args)) flag.CommandLine = flag.NewFlagSet("cmd", flag.ContinueOnError) }
explode_data.jsonl/13421
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 180 }
[ 2830, 3393, 31115, 5936, 2448, 2800, 82, 18284, 84546, 1155, 353, 8840, 836, 8, 341, 25078, 51015, 284, 3056, 917, 515, 197, 197, 1, 8710, 756, 197, 197, 34294, 27806, 57752, 3790, 756, 197, 197, 34294, 5113, 57752, 872, 756, 197, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnmarshaller(t *testing.T) { u := unmarshaller{&testdataClient{}} list, err := u.jobsList(25, 0) require.NoError(t, err) assert.Equal(t, 2, len(list.Jobs)) activeRuns, err := u.activeJobRuns(25, 0) require.NoError(t, err) assert.Equal(t, 2, len(activeRuns.Runs)) completedRuns, err := u.completedJobRuns(288, 25, 0) require.NoError(t, err) assert.Equal(t, "SUCCESS", completedRuns.Runs[0].State.ResultState) }
explode_data.jsonl/22270
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 188 }
[ 2830, 3393, 87690, 1155, 353, 8840, 836, 8, 341, 10676, 1669, 650, 52541, 90, 5, 92425, 2959, 6257, 532, 14440, 11, 1848, 1669, 575, 73436, 852, 7, 17, 20, 11, 220, 15, 340, 17957, 35699, 1155, 11, 1848, 340, 6948, 12808, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCallingQuitMultipleTimesShouldNotBlock(t *testing.T) { worker := work.NewWorker(1, func(p work.Payload) interface{} { return fmt.Sprintf("%s.", p.Data) }, true) worker.Quit() worker.Quit() }
explode_data.jsonl/11916
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 79 }
[ 2830, 3393, 48853, 42856, 32089, 18889, 14996, 2623, 4713, 1155, 353, 8840, 836, 8, 341, 197, 21462, 1669, 975, 7121, 21936, 7, 16, 11, 2915, 1295, 975, 86432, 8, 3749, 6257, 341, 197, 853, 8879, 17305, 4430, 82, 10465, 281, 3336, 340...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestParseAlterStatementRenameTable(t *testing.T) { { parser := NewAlterTableParser() statement := "drop column b" err := parser.ParseAlterStatement(statement) test.S(t).ExpectNil(err) test.S(t).ExpectFalse(parser.isRenameTable) } { parser := NewAlterTableParser() statement := "rename as something_else" err := parser.ParseAlterStatement(statement) test.S(t).ExpectNil(err) test.S(t).ExpectTrue(parser.isRenameTable) } { parser := NewAlterTableParser() statement := "drop column b, rename as something_else" err := parser.ParseAlterStatement(statement) test.S(t).ExpectNil(err) test.S(t).ExpectEquals(parser.alterStatementOptions, statement) test.S(t).ExpectTrue(parser.isRenameTable) } { parser := NewAlterTableParser() statement := "engine=innodb rename as something_else" err := parser.ParseAlterStatement(statement) test.S(t).ExpectNil(err) test.S(t).ExpectTrue(parser.isRenameTable) } { parser := NewAlterTableParser() statement := "rename as something_else, engine=innodb" err := parser.ParseAlterStatement(statement) test.S(t).ExpectNil(err) test.S(t).ExpectTrue(parser.isRenameTable) } }
explode_data.jsonl/71266
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 457 }
[ 2830, 3393, 14463, 74290, 8636, 88757, 2556, 1155, 353, 8840, 836, 8, 1476, 197, 515, 197, 55804, 1669, 1532, 74290, 2556, 6570, 741, 197, 89566, 1669, 330, 6719, 3250, 293, 698, 197, 9859, 1669, 6729, 8937, 74290, 8636, 60971, 340, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInsertOffers(t *testing.T) { tt := test.Start(t) defer tt.Finish() test.ResetHorizonDB(t, tt.HorizonDB) q := &Q{tt.HorizonSession()} err := insertOffer(q, eurOffer) tt.Assert.NoError(err) err = insertOffer(q, twoEurOffer) tt.Assert.NoError(err) offers, err := q.GetAllOffers() tt.Assert.NoError(err) tt.Assert.Len(offers, 2) offersByID := map[int64]Offer{ offers[0].OfferID: offers[0], offers[1].OfferID: offers[1], } tt.Assert.Equal(offersByID[eurOffer.OfferID], eurOffer) tt.Assert.Equal(offersByID[twoEurOffer.OfferID], twoEurOffer) count, err := q.CountOffers() tt.Assert.NoError(err) tt.Assert.Equal(2, count) numRemoved, err := q.CompactOffers(12350) tt.Assert.NoError(err) tt.Assert.Equal(int64(0), numRemoved) seq, err := q.GetOfferCompactionSequence() tt.Assert.NoError(err) tt.Assert.Equal(uint32(12350), seq) afterCompactionCount, err := q.CountOffers() tt.Assert.NoError(err) tt.Assert.Equal(2, afterCompactionCount) afterCompactionOffers, err := q.GetAllOffers() tt.Assert.NoError(err) tt.Assert.Len(afterCompactionOffers, 2) }
explode_data.jsonl/69696
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 492 }
[ 2830, 3393, 13780, 95509, 1155, 353, 8840, 836, 8, 341, 3244, 83, 1669, 1273, 12101, 1155, 340, 16867, 17853, 991, 18176, 741, 18185, 36660, 39601, 16973, 3506, 1155, 11, 17853, 3839, 269, 16973, 3506, 340, 18534, 1669, 609, 48, 90, 556...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExpandUserCurrentUser(t *testing.T) { path, err := ExpandUser("~/rest/of/path") expected := os.Getenv("HOME") + "/rest/of/path" assert.NilError(t, err) assert.Equal(t, expected, path) }
explode_data.jsonl/34412
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 78 }
[ 2830, 3393, 38946, 1474, 78257, 1155, 353, 8840, 836, 8, 341, 26781, 11, 1848, 1669, 50141, 1474, 50919, 3927, 96745, 50976, 1138, 42400, 1669, 2643, 64883, 445, 27546, 899, 488, 3521, 3927, 96745, 50976, 1837, 6948, 59678, 1454, 1155, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestNewClient(t *testing.T) { const ( testAccessKey = "access granted" testSecretKey = "this is very secret" testHost = "certainly.a.unique.host" testUserAgent = "certainly.a.unique.userAgent" ) client := NewClient(ClientConfiguration{ AccessKey: testAccessKey, SecretKey: testSecretKey, Host: testHost, UserAgent: testUserAgent, }) expectSame(t, client.AccessKey, testAccessKey, "client.AccessKey") expectSame(t, client.SecretKey, testSecretKey, "client.SecretKey") expectSame(t, client.Host, testHost, "client.Host") expectSame(t, client.UserAgent, testUserAgent, "client.UserAgent") }
explode_data.jsonl/74264
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 229 }
[ 2830, 3393, 3564, 2959, 1155, 353, 8840, 836, 8, 341, 4777, 2399, 197, 18185, 6054, 1592, 284, 330, 5211, 11676, 698, 197, 18185, 19773, 1592, 284, 330, 574, 374, 1602, 6234, 698, 197, 18185, 9296, 414, 284, 330, 66, 7615, 398, 5849, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReplaceRegexp_ReplacesMatchesWithSpecifiedText(t *testing.T) { t.Parallel() input := "hello world" tcs := []struct { regex, replace, want string }{ { regex: "hel+o", replace: "bye", want: "bye world\n", }, { regex: "Does not .* in input", replace: "Will not appear in output", want: "hello world\n", }, { regex: "^([a-z]+) ([a-z]+)", replace: "$1 cruel $2", want: "hello cruel world\n", }, { regex: "hello{1}", replace: "Ж9", want: "Ж9 world\n", }, } for _, tc := range tcs { got, err := script.Echo(input).ReplaceRegexp(regexp.MustCompile(tc.regex), tc.replace).String() if err != nil { t.Fatal(err) } if tc.want != got { t.Error(cmp.Diff(tc.want, got)) } } }
explode_data.jsonl/51497
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 383 }
[ 2830, 3393, 23107, 3477, 4580, 50693, 26078, 42470, 2354, 8327, 1870, 1178, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 22427, 1669, 330, 14990, 1879, 698, 3244, 4837, 1669, 3056, 1235, 341, 197, 197, 26387, 11, 8290, 11, 1366...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestFacetRegex(t *testing.T) { // This is a possible solution for auto-complete out, _ := Search("github").Size("0").Facet( Facet().Regex("repository.name", "no.*").Size("8"), ).Result() if out == nil || &out.Hits == nil { t.Fail() return } //Debug(string(out.Facets)) fh := gou.NewJsonHelper([]byte(out.Facets)) facets := fh.Helpers("/repository.name/terms") assert.T(t, len(facets) == 8, fmt.Sprintf("Should have 8? but was %v", len(facets))) // for _, f := range facets { // Debug(f) // } }
explode_data.jsonl/73256
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 213 }
[ 2830, 3393, 28702, 295, 32464, 1155, 353, 8840, 836, 8, 1476, 197, 322, 1096, 374, 264, 3204, 6291, 369, 3233, 74414, 198, 13967, 11, 716, 1669, 7542, 445, 5204, 1827, 1695, 445, 15, 1827, 28702, 295, 1006, 197, 12727, 67223, 1005, 32...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInterpretStringAccess(t *testing.T) { t.Parallel() inter := parseCheckAndInterpret(t, ` fun test(): Type { let c: Character = "x"[0] return c.getType() } `) result, err := inter.Invoke("test") require.NoError(t, err) require.Equal(t, interpreter.TypeValue{Type: interpreter.PrimitiveStaticTypeCharacter}, result, ) }
explode_data.jsonl/73420
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 150 }
[ 2830, 3393, 3306, 8043, 703, 6054, 1155, 353, 8840, 836, 8, 1476, 3244, 41288, 7957, 2822, 58915, 1669, 4715, 3973, 3036, 3306, 8043, 1155, 11, 22074, 262, 2464, 1273, 4555, 3990, 341, 286, 1077, 272, 25, 15633, 284, 330, 87, 36864, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDate(t *testing.T) { t.Parallel() ctx := NewIsolate().NewContext() res, err := ctx.Eval(`new Date("2018-05-08T08:16:46.918Z")`, "date.js") if err != nil { t.Fatal(err) } tm, err := res.Date() if err != nil { t.Error(err) } else if tm.UnixNano() != 1525767406918*1e6 { t.Errorf("Wrong date: %q", tm) } }
explode_data.jsonl/81592
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 160 }
[ 2830, 3393, 1916, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 20985, 1669, 1532, 3872, 33066, 1005, 3564, 1972, 2822, 10202, 11, 1848, 1669, 5635, 5142, 831, 5809, 931, 2631, 445, 17, 15, 16, 23, 12, 15, 20, 12, 15, 23, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestBadRequestBody_2(t *testing.T) { ts := httptest.NewServer(NewHandler()) r := &csr.CertificateRequest{} csrBytes, err := json.Marshal(r) if err != nil { t.Fatal(err) } data := bytes.NewReader(csrBytes) req, _ := http.NewRequest("POST", ts.URL, data) resp, _ := http.DefaultClient.Do(req) if resp.StatusCode == http.StatusOK { t.Fatal(resp.Status) } }
explode_data.jsonl/4730
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 17082, 33334, 62, 17, 1155, 353, 8840, 836, 8, 341, 57441, 1669, 54320, 70334, 7121, 5475, 35063, 3050, 2398, 7000, 1669, 609, 77851, 727, 20962, 1900, 16094, 1444, 15094, 7078, 11, 1848, 1669, 2951, 37271, 2601, 340, 743, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSaveChainStatus(t *testing.T) { defer os.RemoveAll("temp") testDB := dbm.NewDB("testdb", "leveldb", "temp") store := NewStore(testDB) node := &state.BlockNode{Height: 100, Hash: bc.Hash{V0: 0, V1: 1, V2: 2, V3: 3}} view := &state.UtxoViewpoint{ Entries: map[bc.Hash]*storage.UtxoEntry{ bc.Hash{V0: 1, V1: 2, V2: 3, V3: 4}: &storage.UtxoEntry{IsCoinBase: false, BlockHeight: 100, Spent: false}, bc.Hash{V0: 1, V1: 2, V2: 3, V3: 4}: &storage.UtxoEntry{IsCoinBase: true, BlockHeight: 100, Spent: true}, bc.Hash{V0: 1, V1: 1, V2: 3, V3: 4}: &storage.UtxoEntry{IsCoinBase: false, BlockHeight: 100, Spent: true}, }, } if err := store.SaveChainStatus(node, view); err != nil { t.Fatal(err) } expectStatus := &protocol.BlockStoreState{Height: node.Height, Hash: &node.Hash} if !testutil.DeepEqual(store.GetStoreStatus(), expectStatus) { t.Errorf("got block status:%v, expect block status:%v", store.GetStoreStatus(), expectStatus) } for hash, utxo := range view.Entries { if utxo.Spent && !utxo.IsCoinBase { continue } gotUtxo, err := store.GetUtxo(&hash) if err != nil { t.Fatal(err) } if !testutil.DeepEqual(utxo, gotUtxo) { t.Errorf("got utxo entry:%v, expect utxo entry:%v", gotUtxo, utxo) } } }
explode_data.jsonl/49976
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 560 }
[ 2830, 3393, 8784, 18837, 2522, 1155, 353, 8840, 836, 8, 341, 16867, 2643, 84427, 445, 3888, 1138, 18185, 3506, 1669, 2927, 76, 7121, 3506, 445, 1944, 1999, 497, 330, 3449, 783, 65, 497, 330, 3888, 1138, 57279, 1669, 1532, 6093, 8623, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestMatchError(t *testing.T) { named, err := ParseAnyReference("foo") if err != nil { t.Fatal(err) } _, err = FamiliarMatch("[-x]", named) if err == nil { t.Fatalf("expected an error, got nothing") } }
explode_data.jsonl/8780
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 89 }
[ 2830, 3393, 8331, 1454, 1155, 353, 8840, 836, 8, 341, 9038, 3606, 11, 1848, 1669, 14775, 8610, 8856, 445, 7975, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 197, 6878, 1848, 284, 33601, 9538, 8331, 445, 7609...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestKeyValuesAndSizes(t *testing.T) { e, _ := NewRedisExporter( os.Getenv("TEST_REDIS_URI"), Options{Namespace: "test", CheckSingleKeys: dbNumStrFull + "=" + url.QueryEscape(keys[0])}, ) setupDBKeys(t, os.Getenv("TEST_REDIS_URI")) defer deleteKeysFromDB(t, os.Getenv("TEST_REDIS_URI")) chM := make(chan prometheus.Metric) go func() { e.Collect(chM) close(chM) }() want := map[string]bool{"test_key_size": false, "test_key_value": false} for m := range chM { for k := range want { if strings.Contains(m.Desc().String(), k) { want[k] = true } } } for k, found := range want { if !found { t.Errorf("didn't find %s", k) } } }
explode_data.jsonl/46986
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 297 }
[ 2830, 3393, 1592, 6227, 3036, 34930, 1155, 353, 8840, 836, 8, 341, 7727, 11, 716, 1669, 1532, 48137, 88025, 1006, 197, 25078, 64883, 445, 10033, 2192, 21202, 23116, 4461, 197, 197, 3798, 90, 22699, 25, 330, 1944, 497, 4248, 10888, 8850,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetLatestSpotPrice(t *testing.T) { t.Parallel() _, err := b.GetLatestSpotPrice(context.Background(), currency.NewPair(currency.BTC, currency.USDT)) if err != nil { t.Error("Binance GetLatestSpotPrice() error", err) } }
explode_data.jsonl/76651
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 83 }
[ 2830, 3393, 1949, 31992, 47049, 6972, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 6878, 1848, 1669, 293, 2234, 31992, 47049, 6972, 5378, 19047, 1507, 11413, 7121, 12443, 90475, 1785, 7749, 11, 11413, 67672, 10599, 1171, 74...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func Test_write_float32(t *testing.T) { vals := []float32{0, 1, -1, 99, 0xff, 0xfff, 0xffff, 0xfffff, 0xffffff, 0x4ffffff, 0xfffffff, -0x4ffffff, -0xfffffff, 1.2345, 1.23456, 1.234567, 1.001} for _, val := range vals { t.Run(fmt.Sprintf("%v", val), func(t *testing.T) { stream := jsoniter.NewStream() stream.WriteFloat32(val) output, err := json.Marshal(val) if err != nil { t.Fatal() } if string(output) != string(stream.Buffer()) { t.Fatal() } }) t.Run(fmt.Sprintf("%v", val), func(t *testing.T) { stream := jsoniter.NewStream() stream.WriteInterface(val) output, err := json.Marshal(val) if err != nil { t.Fatal() } if string(output) != string(stream.Buffer()) { t.Fatal() } }) } }
explode_data.jsonl/34818
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 367 }
[ 2830, 3393, 9165, 17586, 18, 17, 1155, 353, 8840, 836, 8, 341, 19302, 82, 1669, 3056, 3649, 18, 17, 90, 15, 11, 220, 16, 11, 481, 16, 11, 220, 24, 24, 11, 220, 15, 9020, 11, 220, 15, 87812, 11, 220, 15, 20518, 11, 220, 15, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFetchAutoMigsZonal(t *testing.T) { server := NewHttpServerMock() defer server.Close() server.On("handle", "/project1/zones/"+zoneB+"/instanceGroups").Return(listInstanceGroups(zoneB)).Once() server.On("handle", "/project1/zones/"+zoneB+"/instanceGroupManagers/"+gceMigA).Return(getInstanceGroupManagerNamed(gceMigA, zoneB)).Once() server.On("handle", "/project1/zones/"+zoneB+"/instanceGroupManagers/"+gceMigB).Return(getInstanceGroupManagerNamed(gceMigB, zoneB)).Once() server.On("handle", "/project1/global/instanceTemplates/"+gceMigA).Return(instanceTemplate).Once() server.On("handle", "/project1/global/instanceTemplates/"+gceMigB).Return(instanceTemplate).Once() server.On("handle", "/project1/zones/"+zoneB+"/instanceGroupManagers/"+gceMigA).Return(getInstanceGroupManagerNamed(gceMigA, zoneB)).Once() server.On("handle", "/project1/zones/"+zoneB+"/instanceGroupManagers/"+gceMigA+"/listManagedInstances").Return(getManagedInstancesResponse1Named(gceMigA, zoneB)).Once() server.On("handle", "/project1/zones/"+zoneB+"/instanceGroupManagers/"+gceMigB).Return(getInstanceGroupManagerNamed(gceMigB, zoneB)).Once() server.On("handle", "/project1/zones/"+zoneB+"/instanceGroupManagers/"+gceMigB+"/listManagedInstances").Return(getManagedInstancesResponse2Named(gceMigB, zoneB)).Once() regional := false g := newTestGceManager(t, server.URL, regional) min, max := 0, 100 g.migAutoDiscoverySpecs = []cloudprovider.MIGAutoDiscoveryConfig{ {Re: regexp.MustCompile("UNUSED"), MinSize: min, MaxSize: max}, } assert.NoError(t, g.fetchAutoMigs()) migs := g.GetMigs() assert.Equal(t, 2, len(migs)) validateMig(t, migs[0].Config, zoneB, gceMigA, min, max) validateMig(t, migs[1].Config, zoneB, gceMigB, min, max) mock.AssertExpectationsForObjects(t, server) }
explode_data.jsonl/69004
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 685 }
[ 2830, 3393, 20714, 13253, 44, 14462, 57, 24202, 1155, 353, 8840, 836, 8, 341, 41057, 1669, 1532, 2905, 5475, 11571, 741, 16867, 3538, 10421, 2822, 41057, 8071, 445, 8192, 497, 3521, 4987, 16, 31082, 3154, 33778, 8684, 33, 27569, 4851, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNoDigestIfAlarmIsClearedOutOfOrder(t *testing.T) { require.Len(t, runLocalShardTest(t, change(user1, alarm1, wire.StatusCleared, time2), change(user1, alarm1, wire.StatusCritical, time1), send(user1), ), 0) }
explode_data.jsonl/3257
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 92 }
[ 2830, 3393, 2753, 45217, 2679, 43444, 3872, 34, 92333, 31731, 4431, 1155, 353, 8840, 836, 8, 341, 17957, 65819, 1155, 11, 1598, 7319, 2016, 567, 2271, 1155, 345, 197, 68380, 4277, 16, 11, 16624, 16, 11, 9067, 10538, 34, 92333, 11, 882...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNil(t *testing.T) { var err error Handlef("foo: %w", &err) if err != nil { t.Errorf("err == %v, want nil", err) } }
explode_data.jsonl/15635
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 60 }
[ 2830, 3393, 19064, 1155, 353, 8840, 836, 8, 341, 2405, 1848, 1465, 198, 197, 6999, 69, 445, 7975, 25, 1018, 86, 497, 609, 615, 340, 743, 1848, 961, 2092, 341, 197, 3244, 13080, 445, 615, 621, 1018, 85, 11, 1366, 2092, 497, 1848, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func Test_write_array(t *testing.T) { should := require.New(t) buf := &bytes.Buffer{} stream := jsoner.NewStream(jsoner.Config{IndentionStep: 2}.Froze(), buf, 4096) stream.WriteArrayStart() stream.WriteInt(1) stream.WriteMore() stream.WriteInt(2) stream.WriteArrayEnd() stream.Flush() should.Nil(stream.Error) should.Equal("[\n 1,\n 2\n]", buf.String()) }
explode_data.jsonl/57878
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 149 }
[ 2830, 3393, 9165, 3858, 1155, 353, 8840, 836, 8, 341, 197, 5445, 1669, 1373, 7121, 1155, 340, 26398, 1669, 609, 9651, 22622, 16094, 44440, 1669, 2951, 261, 7121, 3027, 9304, 261, 10753, 90, 1425, 2939, 8304, 25, 220, 17, 7810, 37, 299...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1