text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestMetadataParsing(t *testing.T) { data := ` { "interface": [ { "ipv4": { "ipAddress": [ { "privateIpAddress": "10.0.1.4", "publicIpAddress": "X.X.X.X" } ], "subnet": [ { "address": "10.0.1.0", "prefix": "24" } ] }, "ipv6": { "ipAddress": [ ] }, "macAddress": "002248020E1E" } ] } ` network := NetworkMetadata{} if err := json.Unmarshal([]byte(data), &network); err != nil { t.Errorf("Unexpected error: %v", err) } ip := network.Interface[0].IPV4.IPAddress[0].PrivateIP if ip != "10.0.1.4" { t.Errorf("Unexpected value: %s, expected 10.0.1.4", ip) } server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { fmt.Fprintln(w, data) })) defer server.Close() metadata := &InstanceMetadata{ baseURL: server.URL, } networkJSON := NetworkMetadata{} if err := metadata.Object("/some/path", &networkJSON); err != nil { t.Errorf("Unexpected error: %v", err) } if !reflect.DeepEqual(network, networkJSON) { t.Errorf("Unexpected inequality:\n%#v\nvs\n%#v", network, networkJSON) } }
explode_data.jsonl/50419
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 655 }
[ 2830, 3393, 14610, 68839, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 22074, 515, 262, 330, 4970, 788, 2278, 414, 341, 286, 330, 42676, 19, 788, 341, 688, 330, 573, 4286, 788, 2278, 310, 341, 1060, 330, 1996, 98567, 788, 330, 16, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestChequebookAddress(t *testing.T) { address := common.HexToAddress("0xabcd") ownerAdress := common.HexToAddress("0xfff") chequebookService, err := chequebook.New( transactionmock.New(), address, ownerAdress, nil, &chequeSignerMock{}, erc20mock.New(), ) if err != nil { t.Fatal(err) } if chequebookService.Address() != address { t.Fatalf("returned wrong address. wanted %x, got %x", address, chequebookService.Address()) } }
explode_data.jsonl/41438
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 182 }
[ 2830, 3393, 26843, 591, 2190, 4286, 1155, 353, 8840, 836, 8, 341, 63202, 1669, 4185, 91538, 1249, 4286, 445, 15, 52616, 4385, 1138, 197, 8118, 2589, 673, 1669, 4185, 91538, 1249, 4286, 445, 15, 87812, 1138, 197, 1528, 591, 2190, 1860, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRewriteReferenceFailedDuringParseNamed(t *testing.T) { for _, c := range []struct{ inputRef, prefix, location string }{ // Invalid reference format {"example.com/foo/image:latest", "example.com/foo", "example.com/path/"}, {"example.com/foo@sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "example.com/foo", "example.com"}, {"example.com:5000/image:latest", "example.com", ""}, {"example.com:5000/image:latest", "example.com", "example.com:5000"}, // Malformed prefix {"example.com/foo/image:latest", "example.com//foo", "example.com/path"}, {"example.com/image:latest", "image", "anotherimage"}, {"example.com/foo/image:latest", "example.com/foo/", "example.com"}, {"example.com/foo/image", "example.com/fo", "example.com/foo"}, {"example.com/foo:latest", "example.com/fo", "example.com/foo"}, {"example.com/foo@sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "example.com/fo", "example.com/foo"}, {"docker.io/library/image", "example.com", "example.com"}, {"docker.io/library/image", "*.com", "example.com"}, {"foo.docker.io/library/image", "*.example.com", "example.com/image"}, {"foo.docker.io/library/image", "*.docker.com", "example.com/image"}, } { ref := toNamedRef(t, c.inputRef) testEndpoint := Endpoint{Location: c.location} out, err := testEndpoint.rewriteReference(ref, c.prefix) assert.NotNil(t, err) assert.Nil(t, out) } }
explode_data.jsonl/62237
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 535 }
[ 2830, 3393, 58465, 1247, 8856, 9408, 16014, 14463, 15810, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 272, 1669, 2088, 3056, 1235, 90, 1946, 3945, 11, 9252, 11, 3728, 914, 335, 515, 197, 197, 322, 13882, 5785, 3561, 198, 197, 197, 4913,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAuditPath(t *testing.T) { data := make([][]byte, 0) for _, a := range googleTestLeaves() { data = append(data, a) for i := 0; i < len(data); i++ { assert.True(t, bytes.Equal(MerkleTreeHash(data), RootFromAuditPath(data[i], i, len(data), AuditPath(i, data))), "fail for index %d", i) } } assert.False(t, bytes.Equal(MerkleTreeHash(data), RootFromAuditPath(nil, 0, len(data), nil)), "verified an empty audit path") assert.False(t, bytes.Equal(MerkleTreeHash(data), RootFromAuditPath(nil, len(data)-1, len(data), AuditPath(0, data))), "verified audit path with wrong index") }
explode_data.jsonl/45103
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 250 }
[ 2830, 3393, 74516, 1820, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 1281, 10556, 1294, 3782, 11, 220, 15, 340, 2023, 8358, 264, 1669, 2088, 11558, 2271, 2304, 4693, 368, 341, 197, 8924, 284, 8737, 2592, 11, 264, 340, 197, 2023, 600, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNewESCRNoTolerations(t *testing.T) { expTolerations := []v1.Toleration{} cluster := &logging.ClusterLogging{ Spec: logging.ClusterLoggingSpec{ LogStore: &logging.LogStoreSpec{ Type: "elasticsearch", ElasticsearchSpec: logging.ElasticsearchSpec{}, }, }, } cr := &ClusterLoggingRequest{ Cluster: cluster, } existing := &elasticsearch.Elasticsearch{} elasticsearchCR := cr.newElasticsearchCR("test-app-name", existing) tolerations := elasticsearchCR.Spec.Spec.Tolerations if !utils.AreTolerationsSame(tolerations, expTolerations) { t.Errorf("Exp. the tolerations to be %v but was %v", expTolerations, tolerations) } }
explode_data.jsonl/72381
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 263 }
[ 2830, 3393, 3564, 1570, 8973, 2753, 51, 22072, 804, 1155, 353, 8840, 836, 8, 341, 48558, 51, 22072, 804, 1669, 3056, 85, 16, 836, 337, 20927, 31483, 197, 18855, 1669, 609, 25263, 72883, 34575, 515, 197, 7568, 992, 25, 8392, 72883, 345...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestValueBinder_Times(t *testing.T) { exampleTime, _ := time.Parse(time.RFC3339, "2020-12-23T09:45:31+02:00") exampleTime2, _ := time.Parse(time.RFC3339, "2000-01-02T09:45:31+00:00") var testCases = []struct { name string givenFailFast bool givenBindErrors []error whenURL string whenMust bool whenLayout string expectValue []time.Time expectError string }{ { name: "ok, binds value", whenURL: "/search?param=2020-12-23T09:45:31%2B02:00&param=2000-01-02T09:45:31%2B00:00", whenLayout: time.RFC3339, expectValue: []time.Time{exampleTime, exampleTime2}, }, { name: "ok, params values empty, value is not changed", whenURL: "/search?nope=1", expectValue: []time.Time(nil), }, { name: "nok, previous errors fail fast without binding value", givenFailFast: true, givenBindErrors: []error{errors.New("previous error")}, whenURL: "/search?param=1&param=100", expectValue: []time.Time(nil), expectError: "previous error", }, { name: "ok (must), binds value", whenMust: true, whenURL: "/search?param=2020-12-23T09:45:31%2B02:00&param=2000-01-02T09:45:31%2B00:00", whenLayout: time.RFC3339, expectValue: []time.Time{exampleTime, exampleTime2}, }, { name: "ok (must), params values empty, returns error, value is not changed", whenMust: true, whenURL: "/search?nope=1", expectValue: []time.Time(nil), expectError: "code=400, message=required field value is empty, field=param", }, { name: "nok (must), previous errors fail fast without binding value", givenFailFast: true, givenBindErrors: []error{errors.New("previous error")}, whenMust: true, whenURL: "/search?param=1&param=100", expectValue: []time.Time(nil), expectError: "previous error", }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { c := createTestContext(tc.whenURL, nil, nil) b := QueryParamsBinder(c).FailFast(tc.givenFailFast) b.errors = tc.givenBindErrors layout := time.RFC3339 if tc.whenLayout != "" { layout = tc.whenLayout } var dest []time.Time var err error if tc.whenMust { err = b.MustTimes("param", &dest, layout).BindError() } else { err = b.Times("param", &dest, layout).BindError() } assert.Equal(t, tc.expectValue, dest) if tc.expectError != "" { assert.EqualError(t, err, tc.expectError) } else { assert.NoError(t, err) } }) } }
explode_data.jsonl/82555
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1198 }
[ 2830, 3393, 1130, 44055, 1139, 1733, 1155, 353, 8840, 836, 8, 341, 8122, 1516, 1462, 11, 716, 1669, 882, 8937, 9730, 2013, 6754, 18, 18, 18, 24, 11, 330, 17, 15, 17, 15, 12, 16, 17, 12, 17, 18, 51, 15, 24, 25, 19, 20, 25, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestPICTtoCfg(t *testing.T) { tests := []struct { name string inputs PICTMetricInputs cfg MetricsCfg }{ { name: "none", inputs: PICTMetricInputs{ NumResourceAttrs: AttrsNone, NumPtsPerMetric: NumPtsPerMetricOne, MetricType: MetricTypeIntGauge, NumPtLabels: LabelsNone, }, cfg: MetricsCfg{ NumResourceAttrs: 0, NumPtsPerMetric: 1, MetricDescriptorType: pmetric.MetricDataTypeGauge, MetricValueType: pmetric.MetricValueTypeInt, NumPtLabels: 0, }, }, { name: "one", inputs: PICTMetricInputs{ NumResourceAttrs: AttrsOne, NumPtsPerMetric: NumPtsPerMetricOne, MetricType: MetricTypeDoubleGauge, NumPtLabels: LabelsOne, }, cfg: MetricsCfg{ NumResourceAttrs: 1, NumPtsPerMetric: 1, MetricDescriptorType: pmetric.MetricDataTypeGauge, MetricValueType: pmetric.MetricValueTypeDouble, NumPtLabels: 1, }, }, { name: "many", inputs: PICTMetricInputs{ NumResourceAttrs: AttrsTwo, NumPtsPerMetric: NumPtsPerMetricMany, MetricType: MetricTypeDoubleExemplarsHistogram, NumPtLabels: LabelsMany, }, cfg: MetricsCfg{ NumResourceAttrs: 2, NumPtsPerMetric: 16, MetricDescriptorType: pmetric.MetricDataTypeHistogram, NumPtLabels: 16, }, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { actual := pictToCfg(test.inputs) expected := test.cfg assert.Equal(t, expected.NumResourceAttrs, actual.NumResourceAttrs) assert.Equal(t, expected.NumPtsPerMetric, actual.NumPtsPerMetric) assert.Equal(t, expected.MetricDescriptorType, actual.MetricDescriptorType) assert.Equal(t, expected.MetricValueType, actual.MetricValueType) assert.Equal(t, expected.NumPtLabels, actual.NumPtLabels) }) } }
explode_data.jsonl/31244
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 940 }
[ 2830, 3393, 1893, 1162, 983, 42467, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 256, 914, 198, 197, 22427, 82, 393, 14805, 54310, 31946, 198, 197, 50286, 262, 54190, 42467, 198, 197, 59403, 197, 197, 515, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEtcdSDGetServers(t *testing.T) { t.Parallel() for _, table := range etcdSDTablesMultipleServers { config := config.NewDefaultEtcdServiceDiscoveryConfig() c, cli := helpers.GetTestEtcd(t) defer c.Terminate(t) e := getEtcdSD(t, *config, &Server{}, cli) e.Init() for _, server := range table.servers { e.bootstrapServer(server) } serverList := e.GetServers() var checkList []*Server checkList = append(table.servers, &Server{}) assert.ElementsMatch(t, checkList, serverList) } }
explode_data.jsonl/61563
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 208 }
[ 2830, 3393, 31860, 4385, 5491, 1949, 78139, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 2023, 8358, 1965, 1669, 2088, 1842, 4385, 5491, 21670, 32089, 78139, 341, 197, 25873, 1669, 2193, 7121, 3675, 31860, 4385, 1860, 67400, 2648...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_New_Package(t *testing.T) { r := require.New(t) g, err := New(&Options{ Name: "widget", Path: "models/admin", }) r.NoError(err) run := gentest.NewRunner() run.With(g) r.NoError(run.Run()) res := run.Results() r.Len(res.Commands, 0) r.Len(res.Files, 2) f, err := res.Find("models/admin/widget.go") r.NoError(err) r.Contains(f.String(), "package admin") }
explode_data.jsonl/6879
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 170 }
[ 2830, 3393, 39582, 1088, 1434, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1373, 7121, 1155, 692, 3174, 11, 1848, 1669, 1532, 2099, 3798, 515, 197, 21297, 25, 330, 9797, 756, 197, 69640, 25, 330, 6507, 17402, 756, 197, 3518, 7000, 35699...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReconcileMaxEndpointsPerSlice(t *testing.T) { namespace := "test" svc, _ := newServiceAndendpointMeta("foo", namespace) // start with 250 pods pods := []*corev1.Pod{} for i := 0; i < 250; i++ { ready := !(i%3 == 0) pods = append(pods, newPod(i, namespace, ready, 1)) } testCases := []struct { maxEndpointsPerSlice int32 expectedSliceLengths []int }{ { maxEndpointsPerSlice: int32(50), expectedSliceLengths: []int{50, 50, 50, 50, 50}, }, { maxEndpointsPerSlice: int32(80), expectedSliceLengths: []int{80, 80, 80, 10}, }, { maxEndpointsPerSlice: int32(150), expectedSliceLengths: []int{150, 100}, }, { maxEndpointsPerSlice: int32(250), expectedSliceLengths: []int{250}, }, { maxEndpointsPerSlice: int32(500), expectedSliceLengths: []int{250}, }, } for _, testCase := range testCases { client := newClientset() r := newReconciler(client, []*corev1.Node{{ObjectMeta: metav1.ObjectMeta{Name: "node-1"}}}, testCase.maxEndpointsPerSlice) reconcileHelper(t, r, &svc, pods, []*discovery.EndpointSlice{}, time.Now()) expectUnorderedSlicesWithLengths(t, fetchEndpointSlices(t, client, namespace), testCase.expectedSliceLengths) } }
explode_data.jsonl/76061
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 504 }
[ 2830, 3393, 693, 40446, 457, 5974, 80786, 3889, 33236, 1155, 353, 8840, 836, 8, 341, 56623, 1669, 330, 1944, 698, 1903, 7362, 11, 716, 1669, 501, 1860, 3036, 32540, 12175, 445, 7975, 497, 4473, 692, 197, 322, 1191, 448, 220, 17, 20, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetPVCFromName(t *testing.T) { tests := []struct { name string pvcName string wantErr bool }{ { name: "storage 10Gi", pvcName: "postgresql", wantErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { fakeClient, fakeClientSet := FakeNew() fakeClientSet.Kubernetes.PrependReactor("get", "persistentvolumeclaims", func(action ktesting.Action) (bool, runtime.Object, error) { return true, nil, nil }) _, err := fakeClient.GetPVCFromName(tt.pvcName) //Checks for error in positive cases if !tt.wantErr == (err != nil) { t.Errorf(" client.GetPVCFromName(name) unexpected error %v, wantErr %v", err, tt.wantErr) } // Check for validating actions performed if (len(fakeClientSet.Kubernetes.Actions()) != 1) && (tt.wantErr != true) { t.Errorf("expected 1 action in GetPVCFromName got: %v", fakeClientSet.Kubernetes.Actions()) } // Check for value with which the function has called PVCname := fakeClientSet.Kubernetes.Actions()[0].(ktesting.GetAction).GetName() if PVCname != tt.pvcName { t.Errorf("Get action is performed with wrong pvcName, expected: %s, got %s", tt.pvcName, PVCname) } }) } }
explode_data.jsonl/65142
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 500 }
[ 2830, 3393, 1949, 47, 11287, 3830, 675, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 3223, 7362, 675, 914, 198, 197, 50780, 7747, 1807, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_MapToMaps3(t *testing.T) { type Ids struct { Id int Uid int } type Base struct { Ids Time string } type User struct { Base Name string } params := g.MapIntAny{ 100: g.Slice{ g.Map{"id": 1, "name": "john"}, g.Map{"id": 2, "name": "smith"}, }, 200: g.Slice{ g.Map{"id": 3, "name": "green"}, g.Map{"id": 4, "name": "jim"}, }, } gtest.C(t, func(t *gtest.T) { m := make(map[string][]*User) err := gconv.MapToMaps(params, &m) t.Assert(err, nil) t.Assert(len(m), 2) t.Assert(m["100"][0].Id, 1) t.Assert(m["100"][1].Id, 2) t.Assert(m["100"][0].Name, "john") t.Assert(m["100"][1].Name, "smith") t.Assert(m["200"][0].Id, 3) t.Assert(m["200"][1].Id, 4) t.Assert(m["200"][0].Name, "green") t.Assert(m["200"][1].Name, "jim") }) }
explode_data.jsonl/41430
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 427 }
[ 2830, 3393, 56992, 1249, 36562, 18, 1155, 353, 8840, 836, 8, 341, 13158, 5223, 82, 2036, 341, 197, 67211, 220, 526, 198, 197, 15980, 307, 526, 198, 197, 532, 13158, 5351, 2036, 341, 197, 197, 12701, 198, 197, 67567, 914, 198, 197, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFailingSender(t *testing.T) { world, ri, sender, _, listener, _, tlf := setupTest(t, 1) defer world.Cleanup() u := world.GetUsers()[0] trip := newConvTriple(t, tlf, u.Username) res, err := ri.NewConversationRemote2(context.TODO(), chat1.NewConversationRemote2Arg{ IdTriple: trip, TLFMessage: chat1.MessageBoxed{ ClientHeader: chat1.MessageClientHeader{ Conv: trip, TlfName: u.Username, TlfPublic: false, }, KeyGeneration: 1, }, }) require.NoError(t, err) tc := userTc(t, world, u) tc.G.MessageDeliverer.(*Deliverer).SetSender(FailingSender{}) // Send nonblock var obids []chat1.OutboxID for i := 0; i < 5; i++ { obid, _, _, err := sender.Send(context.TODO(), res.ConvID, chat1.MessagePlaintext{ ClientHeader: chat1.MessageClientHeader{ Conv: trip, Sender: u.User.GetUID().ToBytes(), TlfName: u.Username, TlfPublic: false, }, }, 0) require.NoError(t, err) obids = append(obids, obid) } for i := 0; i < deliverMaxAttempts; i++ { tc.G.MessageDeliverer.ForceDeliverLoop(context.TODO()) } var recvd []chat1.OutboxRecord for i := 0; i < 5; i++ { select { case fid := <-listener.failing: recvd = append(recvd, fid...) case <-time.After(20 * time.Second): require.Fail(t, "event not received") } } require.Equal(t, len(obids), len(recvd), "invalid length") recordCompare(t, obids, recvd) state, err := recvd[0].State.State() require.NoError(t, err) require.Equal(t, chat1.OutboxStateType_ERROR, state, "wrong state type") }
explode_data.jsonl/50756
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 678 }
[ 2830, 3393, 37, 14277, 20381, 1155, 353, 8840, 836, 8, 1476, 76508, 11, 24185, 11, 4646, 11, 8358, 11446, 11, 8358, 259, 11008, 1669, 6505, 2271, 1155, 11, 220, 16, 340, 16867, 1879, 727, 60639, 2822, 10676, 1669, 1879, 2234, 7137, 10...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestClientServer(t *testing.T) { tstest.PanicOnLog() tstest.ResourceCheck(t) b := &FakeBackend{} var bs *BackendServer var bc *BackendClient serverToClientCh := make(chan []byte, 16) defer close(serverToClientCh) go func() { for b := range serverToClientCh { bc.GotNotifyMsg(b) } }() serverToClient := func(n Notify) { b, err := json.Marshal(n) if err != nil { panic(err.Error()) } serverToClientCh <- append([]byte{}, b...) } clientToServer := func(b []byte) { bs.GotCommandMsg(context.TODO(), b) } slogf := func(fmt string, args ...interface{}) { t.Logf("s: "+fmt, args...) } clogf := func(fmt string, args ...interface{}) { t.Logf("c: "+fmt, args...) } bs = NewBackendServer(slogf, b, serverToClient) // Verify that this doesn't break bs's callback: NewBackendServer(slogf, b, nil) bc = NewBackendClient(clogf, clientToServer) ch := make(chan Notify, 256) notify := func(n Notify) { ch <- n } h, err := NewHandle(bc, clogf, notify, Options{ Prefs: &Prefs{ ControlURL: "http://example.com/fake", }, }) if err != nil { t.Fatalf("NewHandle error: %v\n", err) } notes := Notify{} nn := []Notify{} processNote := func(n Notify) { nn = append(nn, n) if n.State != nil { t.Logf("state change: %v", *n.State) notes.State = n.State } if n.Prefs != nil { notes.Prefs = n.Prefs } if n.NetMap != nil { notes.NetMap = n.NetMap } if n.Engine != nil { notes.Engine = n.Engine } if n.BrowseToURL != nil { notes.BrowseToURL = n.BrowseToURL } } notesState := func() State { if notes.State != nil { return *notes.State } return NoState } flushUntil := func(wantFlush State) { t.Helper() timer := time.NewTimer(1 * time.Second) loop: for { select { case n := <-ch: processNote(n) if notesState() == wantFlush { break loop } case <-timer.C: t.Fatalf("timeout waiting for state %v, got %v", wantFlush, notes.State) } } timer.Stop() loop2: for { select { case n := <-ch: processNote(n) default: break loop2 } } if got, want := h.State(), notesState(); got != want { t.Errorf("h.State()=%v, notes.State=%v (on flush until %v)\n", got, want, wantFlush) } } flushUntil(NeedsLogin) h.StartLoginInteractive() flushUntil(Running) if notes.NetMap == nil && h.NetMap() != nil { t.Errorf("notes.NetMap == nil while h.NetMap != nil\nnotes:\n%v", nn) } h.UpdatePrefs(func(p *Prefs) { p.WantRunning = false }) flushUntil(Stopped) h.Logout() flushUntil(NeedsLogin) h.Login(&tailcfg.Oauth2Token{ AccessToken: "google_id_token", TokenType: GoogleIDTokenType, }) flushUntil(Running) }
explode_data.jsonl/64515
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1180 }
[ 2830, 3393, 2959, 5475, 1155, 353, 8840, 836, 8, 341, 3244, 267, 477, 1069, 31270, 1925, 2201, 741, 3244, 267, 477, 20766, 3973, 1155, 692, 2233, 1669, 609, 52317, 29699, 16094, 2405, 17065, 353, 29699, 5475, 198, 2405, 17916, 353, 2969...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestOperateWorkflowPanicRecover(t *testing.T) { defer func() { if r := recover(); r != nil { t.Fail() } }() cancel, controller := newController() defer cancel() // intentionally set clientset to nil to induce panic controller.kubeclientset = nil wf := unmarshalWF(helloWorldWf) ctx := context.Background() _, err := controller.wfclientset.ArgoprojV1alpha1().Workflows("").Create(ctx, wf, metav1.CreateOptions{}) assert.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) }
explode_data.jsonl/70942
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 197 }
[ 2830, 3393, 5494, 349, 62768, 47, 31270, 693, 3688, 1155, 353, 8840, 836, 8, 341, 16867, 2915, 368, 341, 197, 743, 435, 1669, 11731, 2129, 435, 961, 2092, 341, 298, 3244, 57243, 741, 197, 197, 532, 197, 69826, 84441, 11, 6461, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGarbageCollectProperties(t *testing.T) { r := newJobsTestResolver(t) defer r.drv.Close() client := r.client ctx := viewertest.NewContext(context.Background(), client) locationType := client.LocationType.Create(). SetName("LocationType"). SaveX(ctx) location1 := client.Location.Create(). SetName("Location1"). SetType(locationType). SaveX(ctx) location2 := client.Location.Create(). SetName("Location2"). SetType(locationType). SaveX(ctx) propTypeToDelete := client.PropertyType.Create(). SetName("PropToDelete"). SetLocationType(locationType). SetType(propertytype.TypeString). SetDeleted(true). SaveX(ctx) propTypeToDelete2 := client.PropertyType.Create(). SetName("PropToDelete2"). SetLocationType(locationType). SetType(propertytype.TypeBool). SetDeleted(true). SaveX(ctx) propType := client.PropertyType.Create(). SetName("Prop"). SetLocationType(locationType). SetType(propertytype.TypeInt). SaveX(ctx) _ = client.Location.Create(). SetName("Location"). SetType(locationType). SaveX(ctx) propToDelete1 := client.Property.Create(). SetType(propTypeToDelete). SetStringVal("Prop1"). SetLocation(location1). SaveX(ctx) propToDelete2 := client.Property.Create(). SetType(propTypeToDelete). SetStringVal("Prop2"). SetLocation(location2). SaveX(ctx) propToDelete3 := client.Property.Create(). SetType(propTypeToDelete2). SetBoolVal(true). SetLocation(location1). SaveX(ctx) prop := client.Property.Create(). SetType(propType). SetIntVal(28). SetLocation(location1). SaveX(ctx) err := r.jobsRunner.collectProperties(ctx) require.NoError(t, err) require.False(t, client.PropertyType.Query().Where(propertytype.ID(propTypeToDelete.ID)).ExistX(ctx)) require.False(t, client.Property.Query().Where(property.ID(propToDelete1.ID)).ExistX(ctx)) require.False(t, client.Property.Query().Where(property.ID(propToDelete2.ID)).ExistX(ctx)) require.False(t, client.Property.Query().Where(property.ID(propToDelete3.ID)).ExistX(ctx)) require.True(t, client.PropertyType.Query().Where(propertytype.ID(propType.ID)).ExistX(ctx)) require.True(t, client.Property.Query().Where(property.ID(prop.ID)).ExistX(ctx)) }
explode_data.jsonl/38407
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 804 }
[ 2830, 3393, 43930, 20652, 47504, 7903, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 501, 40667, 2271, 18190, 1155, 340, 16867, 435, 950, 10553, 10421, 741, 25291, 1669, 435, 6581, 198, 20985, 1669, 1651, 83386, 7121, 1972, 5378, 19047, 1507,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHelmRepoDiffLocal(t *testing.T) { SkipOnEnv(t, "HELM") helmTmp, err := ioutil.TempDir("", "argocd-helm-repo-diff-local-test") assert.NoError(t, err) Given(t). CustomCACertAdded(). HelmRepoAdded("custom-repo"). RepoURLType(RepoURLTypeHelm). Chart("helm"). Revision("1.0.0"). When(). Create(). Then(). When(). Sync(). Then(). Expect(OperationPhaseIs(OperationSucceeded)). Expect(HealthIs(health.HealthStatusHealthy)). Expect(SyncStatusIs(SyncStatusCodeSynced)). And(func(app *Application) { _ = os.Setenv("XDG_CONFIG_HOME", helmTmp) FailOnErr(Run("", "helm", "repo", "add", "custom-repo", GetEnvWithDefault("ARGOCD_E2E_HELM_SERVICE", RepoURL(RepoURLTypeHelm)), "--username", GitUsername, "--password", GitPassword, "--cert-file", "../fixture/certs/argocd-test-client.crt", "--key-file", "../fixture/certs/argocd-test-client.key", "--ca-file", "../fixture/certs/argocd-test-ca.crt", )) diffOutput := FailOnErr(RunCli("app", "diff", app.Name, "--local", "testdata/helm")).(string) assert.Empty(t, diffOutput) }) }
explode_data.jsonl/69417
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 488 }
[ 2830, 3393, 39, 23162, 25243, 21751, 7319, 1155, 353, 8840, 836, 8, 341, 7568, 13389, 1925, 14359, 1155, 11, 330, 1799, 10994, 1138, 9598, 23162, 35986, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 858, 509, 67, 2832, 23162, 5504, 53...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEval(t *testing.T) { view := NewBlockView(123455) colTypes := mock.MockColTypes(14) rows := uint64(64) attrs1 := make([]int, 0) vecs1 := make([]vector.IVector, 0) for i, colType := range colTypes { attrs1 = append(attrs1, i) vec := vector.MockVector(colType, rows) vec.ResetReadonly() vecs1 = append(vecs1, vec) } bat, err := batch.NewBatch(attrs1, vecs1) assert.Nil(t, err) view.Raw = bat view.UpdateMasks[1] = &roaring.Bitmap{} view.UpdateMasks[1].Add(3) view.UpdateVals[1] = make(map[uint32]interface{}) view.UpdateVals[1][3] = int16(7) view.UpdateMasks[13] = &roaring.Bitmap{} view.UpdateMasks[13].Add(4) view.UpdateVals[13] = make(map[uint32]interface{}) view.UpdateVals[13][4] = []byte("testEval") view.Eval() vec1, err := view.AppliedIBatch.GetVectorByAttr(1) assert.Nil(t, err) val, err := vec1.GetValue(3) assert.Nil(t, err) assert.Equal(t, int16(7), val) t.Logf("%v", vec1) vec2, err := view.AppliedIBatch.GetVectorByAttr(13) assert.Nil(t, err) val, err = vec2.GetValue(4) assert.Nil(t, err) assert.Equal(t, []byte("testEval"), val) val, err = vec2.GetValue(5) assert.Nil(t, err) assert.Equal(t, []byte("str5"), val) t.Logf("%v", vec2) }
explode_data.jsonl/42521
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 557 }
[ 2830, 3393, 54469, 1155, 353, 8840, 836, 8, 341, 36867, 1669, 1532, 4713, 851, 7, 16, 17, 18, 19, 20, 20, 340, 46640, 4173, 1669, 7860, 24664, 6127, 4173, 7, 16, 19, 340, 68438, 1669, 2622, 21, 19, 7, 21, 19, 340, 197, 20468, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCreateWithoutLogin(t *testing.T) { username := "testuser-0" _, _, err := apiCreateUser(username, "") if err == nil { t.Fatal("Succeeed to create user without being logged-in") } }
explode_data.jsonl/52344
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 72 }
[ 2830, 3393, 4021, 26040, 6231, 1155, 353, 8840, 836, 8, 341, 72358, 1669, 330, 1944, 872, 12, 15, 698, 197, 6878, 8358, 1848, 1669, 6330, 4021, 1474, 17084, 11, 14676, 743, 1848, 621, 2092, 341, 197, 3244, 26133, 445, 53216, 346, 1205...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestNewMappingRuleHistory(t *testing.T) { history, err := testMappingRule2.history() require.NoError(t, err) expected := []view.MappingRule{ { ID: "12669817-13ae-40e6-ba2f-33087b262c68", Name: "bar", Tombstoned: true, CutoverMillis: 67890, Filter: "tag3:value3 tag4:value4", AggregationID: aggregation.MustCompressTypes(aggregation.Min, aggregation.Max), StoragePolicies: policy.StoragePolicies{ policy.NewStoragePolicy(10*time.Minute, xtime.Minute, 1800*time.Hour), }, LastUpdatedAtMillis: 67890, LastUpdatedBy: "someone-else", Tags: []models.Tag{}, }, { ID: "12669817-13ae-40e6-ba2f-33087b262c68", Name: "foo", Tombstoned: false, CutoverMillis: 12345, Filter: "tag1:value1 tag2:value2", AggregationID: aggregation.DefaultID, StoragePolicies: policy.StoragePolicies{ policy.NewStoragePolicy(10*time.Second, xtime.Second, 24*time.Hour), policy.NewStoragePolicy(time.Minute, xtime.Minute, 720*time.Hour), policy.NewStoragePolicy(time.Hour, xtime.Hour, 365*24*time.Hour), }, LastUpdatedAtMillis: 12345, LastUpdatedBy: "someone", Tags: []models.Tag{}, }, } require.Equal(t, expected, history) }
explode_data.jsonl/64588
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 620 }
[ 2830, 3393, 3564, 6807, 11337, 13424, 1155, 353, 8840, 836, 8, 341, 9598, 2579, 11, 1848, 1669, 1273, 6807, 11337, 17, 23430, 741, 17957, 35699, 1155, 11, 1848, 692, 42400, 1669, 3056, 1050, 76455, 11337, 515, 197, 197, 515, 298, 29580,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSelectScatterLimit(t *testing.T) { // Special setup: Don't use createLegacyExecutorEnv. cell := "aa" hc := discovery.NewFakeLegacyHealthCheck() s := createSandbox("TestExecutor") s.VSchema = executorVSchema getSandbox(KsTestUnsharded).VSchema = unshardedVSchema serv := new(sandboxTopo) resolver := newTestLegacyResolver(hc, serv, cell) shards := []string{"-20", "20-40", "40-60", "60-80", "80-a0", "a0-c0", "c0-e0", "e0-"} var conns []*sandboxconn.SandboxConn for i, shard := range shards { sbc := hc.AddTestTablet(cell, shard, 1, "TestExecutor", shard, topodatapb.TabletType_MASTER, true, 1, nil) sbc.SetResults([]*sqltypes.Result{{ Fields: []*querypb.Field{ {Name: "col1", Type: sqltypes.Int32}, {Name: "col2", Type: sqltypes.Int32}, }, RowsAffected: 1, InsertID: 0, Rows: [][]sqltypes.Value{{ sqltypes.NewInt32(1), sqltypes.NewInt32(int32(i % 4)), }}, }}) conns = append(conns, sbc) } executor := NewExecutor(context.Background(), serv, cell, resolver, false, testBufferSize, testCacheSize) query := "select col1, col2 from user order by col2 desc limit 3" gotResult, err := executorExec(executor, query, nil) require.NoError(t, err) wantQueries := []*querypb.BoundQuery{{ Sql: "select col1, col2 from user order by col2 desc limit :__upper_limit", BindVariables: map[string]*querypb.BindVariable{"__upper_limit": sqltypes.Int64BindVariable(3)}, }} for _, conn := range conns { if !reflect.DeepEqual(conn.Queries, wantQueries) { t.Errorf("got: conn.Queries = %v, want: %v", conn.Queries, wantQueries) } } wantResult := &sqltypes.Result{ Fields: []*querypb.Field{ {Name: "col1", Type: sqltypes.Int32}, {Name: "col2", Type: sqltypes.Int32}, }, RowsAffected: 3, InsertID: 0, } wantResult.Rows = append(wantResult.Rows, []sqltypes.Value{ sqltypes.NewInt32(1), sqltypes.NewInt32(3), }, []sqltypes.Value{ sqltypes.NewInt32(1), sqltypes.NewInt32(3), }, []sqltypes.Value{ sqltypes.NewInt32(1), sqltypes.NewInt32(2), }) if !reflect.DeepEqual(gotResult, wantResult) { t.Errorf("scatter order by:\n%v, want\n%v", gotResult, wantResult) } }
explode_data.jsonl/67418
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 946 }
[ 2830, 3393, 3379, 3326, 1650, 16527, 1155, 353, 8840, 836, 8, 341, 197, 322, 9785, 6505, 25, 4320, 944, 990, 1855, 77415, 25255, 14359, 624, 45987, 1669, 330, 5305, 698, 9598, 66, 1669, 18335, 7121, 52317, 77415, 14542, 3973, 741, 1903,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestAESSIV_KeySizes(t *testing.T) { keyStr := "198371900187498172316311acf81d238ff7619873a61983d619c87b63a1987f" + "987131819803719b847126381cd763871638aa71638176328761287361231321" + "812731321de508761437195ff231765aa4913219873ac6918639816312130011" + "abc900bba11400187984719827431246bbab1231eb4145215ff7141436616beb" + "9817298148712fed3aab61000ff123313e" key, _ := hex.DecodeString(keyStr) for i := 0; i < len(key); i++ { _, err := daead.NewAESSIV(key[:i]) if i == daead.AESSIVKeySize && err != nil { t.Errorf("Rejected valid key size: %v, %v", i, err) } if i != daead.AESSIVKeySize && err == nil { t.Errorf("Allowed invalid key size: %v", i) } } }
explode_data.jsonl/66191
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 338 }
[ 2830, 3393, 32, 9996, 3090, 35253, 34930, 1155, 353, 8840, 836, 8, 341, 23634, 2580, 19687, 197, 197, 1, 16, 24, 23, 18, 22, 16, 24, 15, 15, 16, 23, 22, 19, 24, 23, 16, 22, 17, 18, 16, 21, 18, 16, 16, 62594, 23, 16, 67, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestPopulateCluster_CNI(t *testing.T) { c := buildMinimalCluster() c.Spec.Kubelet = &kopsapi.KubeletConfigSpec{ ConfigureCBR0: fi.Bool(false), NetworkPluginName: "cni", NonMasqueradeCIDR: c.Spec.NonMasqueradeCIDR, CloudProvider: c.Spec.CloudProvider, } full, err := build(c) if err != nil { t.Fatalf("error during build: %v", err) } if full.Spec.Kubelet.NetworkPluginName != "cni" { t.Fatalf("Unexpected NetworkPluginName: %v", full.Spec.Kubelet.NetworkPluginName) } if fi.BoolValue(full.Spec.Kubelet.ConfigureCBR0) != false { t.Fatalf("Unexpected ConfigureCBR0: %v", full.Spec.Kubelet.ConfigureCBR0) } if fi.BoolValue(full.Spec.KubeControllerManager.ConfigureCloudRoutes) != true { t.Fatalf("Unexpected ConfigureCloudRoutes: %v", full.Spec.KubeControllerManager.ConfigureCloudRoutes) } }
explode_data.jsonl/75034
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 340 }
[ 2830, 3393, 11598, 6334, 28678, 920, 14912, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 1936, 88328, 28678, 2822, 1444, 36473, 11352, 3760, 1149, 284, 609, 74, 3721, 2068, 11352, 3760, 1149, 2648, 8327, 515, 197, 197, 28560, 12979, 49, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestCreateCertificateSupportsCreatingCACertsAndSigning(t *testing.T) { t.Parallel() caPrivKey, _, caCertificate, _ := CreateSampleCertKeyPair(t, nil, nil, true, nil) caCertTmpPath := StoreCertToTempFile(t, caCertificate) defer os.Remove(caCertTmpPath) _, _, signedCertificate, _ := CreateSampleCertKeyPair(t, caCertificate, caPrivKey, false, nil) signedCertTmpPath := StoreCertToTempFile(t, signedCertificate) defer os.Remove(signedCertTmpPath) // Verify the signed certificate is indeed signed by the CA certificate verifyCmd := shell.Command{ Command: "openssl", Args: []string{"verify", "-CAfile", caCertTmpPath, signedCertTmpPath}, } shell.RunCommand(t, verifyCmd) }
explode_data.jsonl/62204
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 242 }
[ 2830, 3393, 4021, 33202, 7916, 82, 24973, 92832, 15546, 3036, 93358, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 924, 32124, 1592, 11, 8358, 2162, 33202, 11, 716, 1669, 4230, 17571, 36934, 1592, 12443, 1155, 11, 2092, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSaveNextWalHead_MiddleWalFile(t *testing.T) { dataFolder := testtools.NewMockDataFolder() manager := internal.NewDeltaFileManager(dataFolder) walPartRecorder, err := internal.NewWalPartRecorder(WalFilename, manager) assert.NoError(t, err) nextWalHead := []byte{1, 2, 3, 4, 5} err = walPartRecorder.SaveNextWalHead(nextWalHead) assert.NoError(t, err) manager.FlushFiles(nil) deltaFilename, err := internal.GetDeltaFilenameFor(WalFilename) assert.NoError(t, err) partFile, err := manager.GetPartFile(deltaFilename) assert.NoError(t, err) assert.Equal(t, nextWalHead, partFile.WalHeads[internal.GetPositionInDelta(WalFilename)]) }
explode_data.jsonl/59804
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 227 }
[ 2830, 3393, 8784, 5847, 88298, 12346, 1245, 3310, 88298, 1703, 1155, 353, 8840, 836, 8, 341, 8924, 13682, 1669, 1273, 15918, 7121, 11571, 1043, 13682, 741, 92272, 1669, 5306, 7121, 20277, 62878, 2592, 13682, 692, 6692, 278, 5800, 47023, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMailbox(t *testing.T) { var ( expected = []int{2, 3, 4, 5, 6, 7, 8, 9, 10, 11} toDeliver = []int{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11} ) const capacity = 10 m := utils.NewMailbox[int](capacity) // Queue deliveries for i, d := range toDeliver { atCapacity := m.Deliver(d) if atCapacity && i < capacity { t.Errorf("mailbox at capacity %d", i) } else if !atCapacity && i >= capacity { t.Errorf("mailbox below capacity %d", i) } } // Retrieve them var recvd []int chDone := make(chan struct{}) go func() { defer close(chDone) for range m.Notify() { for { x, exists := m.Retrieve() if !exists { break } recvd = append(recvd, x) } } }() close(m.Notify()) <-chDone require.Equal(t, expected, recvd) }
explode_data.jsonl/31932
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 358 }
[ 2830, 3393, 16702, 2011, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 42400, 220, 284, 3056, 396, 90, 17, 11, 220, 18, 11, 220, 19, 11, 220, 20, 11, 220, 21, 11, 220, 22, 11, 220, 23, 11, 220, 24, 11, 220, 16, 15, 11, 220,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestClock_WaitJobs(t *testing.T) { var ( myClock = Default().Reset() interval = time.Millisecond waitChan = make(chan struct{}) jobsNum = 1000 jobFunc = func() { //do nothing } ) job, inserted := myClock.AddJobRepeat(interval, 0, jobFunc) if !inserted { t.Error("add repeat job failure") } go func() { for i := 0; i < jobsNum; i++ { time.Sleep(time.Millisecond * 10) waitJobs := myClock.WaitJobs() if waitJobs != 1 { t.Errorf("waitJobs=%v are inconsistent with expectations\n", waitJobs) } } waitChan <- struct{}{} }() <-waitChan job.Cancel() if myClock.WaitJobs() != 0 { t.Error("数据列表操作获取的数据与Clock实际情况不一致!") } }
explode_data.jsonl/2018
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 329 }
[ 2830, 3393, 26104, 2763, 1315, 40667, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 13624, 26104, 220, 284, 7899, 1005, 14828, 741, 197, 2084, 6152, 284, 882, 71482, 198, 197, 48750, 46019, 284, 1281, 35190, 2036, 37790, 197, 12428, 54...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEventSystemExtrinsicSuccess_Encode(t *testing.T) { encoded, err := EncodeToBytes(exampleEventFin) assert.NoError(t, err) assert.Equal(t, exampleEventFinEnc, encoded) encoded, err = EncodeToBytes(exampleEventApp) assert.NoError(t, err) assert.Equal(t, exampleEventAppEnc, encoded) }
explode_data.jsonl/68326
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 108 }
[ 2830, 3393, 1556, 2320, 840, 45002, 7188, 93529, 534, 1155, 353, 8840, 836, 8, 341, 197, 19329, 11, 1848, 1669, 56562, 1249, 7078, 66203, 1556, 9134, 340, 6948, 35699, 1155, 11, 1848, 340, 6948, 12808, 1155, 11, 3110, 1556, 9134, 7408, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRead_expired(t *testing.T) { assert := assert.New(t) server, log, cleanup := setup(t) defer cleanup() ctx, cancel := context.WithCancel(context.Background()) cancel() server.append([][]byte{[]byte("hello"), []byte("world")}) entriesCh := make(chan []blog.Entry, 4) err := log.Read(ctx, 1, entriesCh) assert.Equal(ctx.Err(), err) }
explode_data.jsonl/55
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 131 }
[ 2830, 3393, 4418, 80221, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 41057, 11, 1487, 11, 21290, 1669, 6505, 1155, 340, 16867, 21290, 741, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 84441, 741, 410...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBufferIndexRune(t *testing.T) { tt := []struct { b RuneArray r rune n int }{ {RuneArray(nil), '0', -1}, {RuneArray([]rune("01234")), '0', 0}, {RuneArray([]rune("01234")), '3', 3}, {RuneArray([]rune("αβγ")), 'α', 0}, {RuneArray([]rune("αβγ")), 'γ', 2}, } for _, tc := range tt { n := tc.b.IndexRune(tc.r) if n != tc.n { t.Errorf("IndexRune(%v) for buffer %v returned %v; expected %v", tc.r, tc.b, n, tc.n) } } }
explode_data.jsonl/37932
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 239 }
[ 2830, 3393, 4095, 1552, 49, 2886, 1155, 353, 8840, 836, 8, 341, 3244, 83, 1669, 3056, 1235, 341, 197, 2233, 70778, 1857, 198, 197, 7000, 63499, 198, 197, 9038, 526, 198, 197, 59403, 197, 197, 90, 49, 2886, 1857, 27907, 701, 364, 15,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInvisibleSchema(t *testing.T) { assert := assert.New(t) assert.True(IsInvisibleSchema(metricsSchema)) assert.True(IsInvisibleSchema("METRICS_ScHEma")) assert.False(IsInvisibleSchema("mysql")) assert.False(IsInvisibleSchema(informationSchema)) assert.False(IsInvisibleSchema("Bogusname")) }
explode_data.jsonl/35834
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 111 }
[ 2830, 3393, 641, 12601, 8632, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 6948, 32443, 65473, 641, 12601, 8632, 89788, 8632, 1171, 6948, 32443, 65473, 641, 12601, 8632, 445, 44875, 84172, 1098, 66, 1799, 1728, 5455, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSymlinkModeUnmarshal(t *testing.T) { // Set up test cases. testCases := []struct { text string expectedMode SymlinkMode expectFailure bool }{ {"", SymlinkMode_SymlinkModeDefault, true}, {"asdf", SymlinkMode_SymlinkModeDefault, true}, {"ignore", SymlinkMode_SymlinkModeIgnore, false}, {"portable", SymlinkMode_SymlinkModePortable, false}, {"posix-raw", SymlinkMode_SymlinkModePOSIXRaw, false}, } // Process test cases. for _, testCase := range testCases { var mode SymlinkMode if err := mode.UnmarshalText([]byte(testCase.text)); err != nil { if !testCase.expectFailure { t.Errorf("unable to unmarshal text (%s): %s", testCase.text, err) } } else if testCase.expectFailure { t.Error("unmarshaling succeeded unexpectedly for text:", testCase.text) } else if mode != testCase.expectedMode { t.Errorf( "unmarshaled mode (%s) does not match expected (%s)", mode, testCase.expectedMode, ) } } }
explode_data.jsonl/30510
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 390 }
[ 2830, 3393, 34667, 44243, 3636, 1806, 27121, 1155, 353, 8840, 836, 8, 341, 197, 322, 2573, 705, 1273, 5048, 624, 18185, 37302, 1669, 3056, 1235, 341, 197, 15425, 688, 914, 198, 197, 42400, 3636, 220, 5718, 44243, 3636, 198, 197, 24952, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestFindSockets(t *testing.T) { tmpdir, err := os.MkdirTemp("", "socktest") require.NoError(t, err) defer func() { err := os.Remove(tmpdir) require.NoError(t, err) }() c := &Ceph{ CephBinary: "foo", OsdPrefix: "ceph-osd", MonPrefix: "ceph-mon", MdsPrefix: "ceph-mds", RgwPrefix: "ceph-client", SocketDir: tmpdir, SocketSuffix: "asok", CephUser: "client.admin", CephConfig: "/etc/ceph/ceph.conf", GatherAdminSocketStats: true, GatherClusterStats: false, } for _, st := range sockTestParams { require.NoError(t, createTestFiles(tmpdir, st)) sockets, err := findSockets(c) require.NoError(t, err) for i := 1; i <= st.osds; i++ { assertFoundSocket(t, tmpdir, typeOsd, i, sockets) } for i := 1; i <= st.mons; i++ { assertFoundSocket(t, tmpdir, typeMon, i, sockets) } for i := 1; i <= st.mdss; i++ { assertFoundSocket(t, tmpdir, typeMds, i, sockets) } for i := 1; i <= st.rgws; i++ { assertFoundSocket(t, tmpdir, typeRgw, i, sockets) } require.NoError(t, cleanupTestFiles(tmpdir, st)) } }
explode_data.jsonl/40228
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 602 }
[ 2830, 3393, 9885, 50, 19601, 1155, 353, 8840, 836, 8, 341, 20082, 3741, 11, 1848, 1669, 2643, 1321, 12438, 12151, 19814, 330, 13199, 1944, 1138, 17957, 35699, 1155, 11, 1848, 340, 16867, 2915, 368, 341, 197, 9859, 1669, 2643, 13270, 103...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestProcessFlagsInvalid(t *testing.T) { ctx := &context.Context{} source := []string{ "{{.Version}", } expected := `template: tmpl:1: unexpected "}" in operand` flags, err := processFlags(ctx, &artifact.Artifact{}, []string{}, source, "-testflag=") require.EqualError(t, err, expected) require.Nil(t, flags) }
explode_data.jsonl/54156
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 119 }
[ 2830, 3393, 7423, 9195, 7928, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 609, 2147, 9328, 31483, 47418, 1669, 3056, 917, 515, 197, 197, 1, 2979, 13, 5637, 24375, 197, 630, 42400, 1669, 1565, 4214, 25, 79839, 25, 16, 25, 16500, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWithStreamInterceptor(t *testing.T) { intcp := func(srv interface{}, ss grpc.ServerStream, info *grpc.StreamServerInfo, handler grpc.StreamHandler) error { return nil } opt := WithStreamInterceptor(intcp) cmp := newCmp(t, opt) assert.Equal(t, 2, len(cmp.config.streamInterceptors)) t.Log("done") }
explode_data.jsonl/68381
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 115 }
[ 2830, 3393, 2354, 3027, 32786, 1155, 353, 8840, 836, 8, 341, 2084, 4672, 1669, 2915, 1141, 10553, 3749, 22655, 10870, 47900, 22997, 3027, 11, 3546, 353, 56585, 33308, 5475, 1731, 11, 7013, 47900, 33308, 3050, 8, 1465, 341, 197, 853, 209...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReceiverExists(t *testing.T) { in := ` route: receiver: team-X receivers: - name: 'team-Y' ` _, err := Load(in) expected := "undefined receiver \"team-X\" used in route" if err == nil { t.Fatalf("no error returned, expected:\n%q", expected) } if err.Error() != expected { t.Errorf("\nexpected:\n%q\ngot:\n%q", expected, err.Error()) } }
explode_data.jsonl/72895
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 149 }
[ 2830, 3393, 25436, 15575, 1155, 353, 8840, 836, 8, 341, 17430, 1669, 22074, 8966, 510, 262, 13964, 25, 2083, 30550, 271, 53387, 1945, 510, 12, 829, 25, 364, 9196, 29137, 1248, 3989, 197, 6878, 1848, 1669, 8893, 5900, 692, 42400, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestServer_Rejects_Continuation0(t *testing.T) { testServerRejectsConn(t, func(st *serverTester) { st.fr.AllowIllegalWrites = true if err := st.fr.WriteContinuation(0, true, st.encodeHeader()); err != nil { t.Fatal(err) } }) }
explode_data.jsonl/71656
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 101 }
[ 2830, 3393, 5475, 50693, 583, 82, 62, 36139, 4002, 15, 1155, 353, 8840, 836, 8, 341, 18185, 5475, 78413, 82, 9701, 1155, 11, 2915, 5895, 353, 4030, 58699, 8, 341, 197, 18388, 18798, 29081, 33713, 93638, 284, 830, 198, 197, 743, 1848, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUnmarshalStructure(t *testing.T) { sta := Structure{Qri: KindStructure.String(), Format: "csv"} cases := []struct { value interface{} out *Structure err string }{ {sta, &sta, ""}, {&sta, &sta, ""}, {[]byte("{\"qri\":\"st:0\"}"), &Structure{Qri: KindStructure.String()}, ""}, {5, nil, "couldn't parse structure, value is invalid type"}, } for i, c := range cases { got, err := UnmarshalStructure(c.value) if !(err == nil && c.err == "" || err != nil && err.Error() == c.err) { t.Errorf("case %d error mismatch. expected: '%s', got: '%s'", i, c.err, err) continue } if err := CompareStructures(c.out, got); err != nil { t.Errorf("case %d structure mismatch: %s", i, err.Error()) continue } } }
explode_data.jsonl/16443
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 311 }
[ 2830, 3393, 1806, 27121, 22952, 1155, 353, 8840, 836, 8, 341, 18388, 64, 1669, 28596, 90, 48, 461, 25, 16840, 22952, 6431, 1507, 15042, 25, 330, 18104, 16707, 1444, 2264, 1669, 3056, 1235, 341, 197, 16309, 3749, 16094, 197, 13967, 256, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestConverterMapOptionalNoOverride(t *testing.T) { fieldMap := make(map[string]codegen.FieldMapperEntry) fieldMap["One"] = codegen.FieldMapperEntry{ QualifiedName: "Two", Override: false, } lines, err := convertTypes( "Foo", "Bar", `struct Foo { 1: optional bool one 2: optional bool two } struct Bar { 1: optional bool one 2: optional bool two }`, nil, fieldMap, ) assert.NoError(t, err) assertPrettyEqual(t, trim(` out.One = (*bool)(in.Two) if in.One != nil { out.One = (*bool)(in.One) } out.Two = (*bool)(in.Two) `), lines) }
explode_data.jsonl/62065
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 256 }
[ 2830, 3393, 14920, 2227, 15309, 2753, 2177, 1155, 353, 8840, 836, 8, 341, 39250, 2227, 1669, 1281, 9147, 14032, 60, 95859, 17087, 10989, 5874, 340, 39250, 2227, 1183, 3966, 1341, 284, 2038, 4370, 17087, 10989, 5874, 515, 197, 16995, 928, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWriteFileContent(t *testing.T) { content, err := ioutil.ReadFile(".gitignore") if err != nil { t.Errorf("Got: error %v, was: nil error to open .gitignore", err) } writeFileContent(content) }
explode_data.jsonl/7964
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 76 }
[ 2830, 3393, 7985, 1703, 2762, 1155, 353, 8840, 836, 8, 341, 27751, 11, 1848, 1669, 43144, 78976, 5680, 12882, 13130, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 13080, 445, 32462, 25, 1465, 1018, 85, 11, 572, 25, 2092, 1465, 311, 1787...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestApiV2GetDevice(t *testing.T) { t.Parallel() // enforce specific field naming in errors returned by API updateRestErrorFieldName() dev := &model.Device{ Id: "foo", IdData: `{"mac": "00:00:00:01"}`, IdDataStruct: map[string]interface{}{ "mac": "00:00:00:01", }, PubKey: "pubkey", Status: model.DevStatusPending, AuthSets: []model.AuthSet{ { Id: "1", DeviceId: "foo", IdData: `{"mac": "00:00:00:01"}`, IdDataStruct: map[string]interface{}{ "mac": "00:00:00:01", }, }, }, } apiDev, _ := deviceV2FromDbModel(dev) tcases := []struct { req *http.Request device *model.Device err error code int body string }{ { req: test.MakeSimpleRequest("GET", "http://1.2.3.4/api/management/v2/devauth/devices/foo", nil), device: dev, err: nil, code: http.StatusOK, body: string(asJSON(apiDev)), }, { req: test.MakeSimpleRequest("GET", "http://1.2.3.4/api/management/v2/devauth/devices/bar", nil), device: nil, err: store.ErrDevNotFound, code: http.StatusNotFound, body: RestError("device not found"), }, { req: test.MakeSimpleRequest("GET", "http://1.2.3.4/api/management/v2/devauth/devices/bar", nil), device: nil, err: errors.New("generic error"), code: http.StatusInternalServerError, body: RestError("internal error"), }, } for i := range tcases { tc := tcases[i] t.Run(fmt.Sprintf("tc %d", i), func(t *testing.T) { t.Parallel() da := &mocks.App{} da.On("GetDevice", mtest.ContextMatcher(), mock.AnythingOfType("string")). Return(tc.device, tc.err) apih := makeMockApiHandler(t, da, nil) runTestRequest(t, apih, tc.req, tc.code, tc.body) }) } }
explode_data.jsonl/632
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 806 }
[ 2830, 3393, 6563, 53, 17, 1949, 6985, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 322, 28162, 3151, 2070, 34948, 304, 5975, 5927, 553, 5333, 198, 27175, 12416, 1454, 51241, 2822, 27302, 1669, 609, 2528, 43995, 515, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateWalletInvalidEntropy(t *testing.T) { t.Parallel() // testDir is empty, meaning wallet was not created from before. testDir, err := ioutil.TempDir("", "testcreate") if err != nil { t.Fatalf("unable to create temp directory: %v", err) } defer func() { os.RemoveAll(testDir) }() // Create new UnlockerService. service := walletunlocker.New(testDir, testNetParams, true, nil) // We'll attempt to init the wallet with an invalid cipher seed and // passphrase. req := &lnrpc.InitWalletRequest{ WalletPassword: testPassword, CipherSeedMnemonic: []string{"invalid", "seed"}, AezeedPassphrase: []byte("fake pass"), } ctx := context.Background() _, err = service.InitWallet(ctx, req) if err == nil { t.Fatalf("wallet creation should have failed") } }
explode_data.jsonl/24039
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 278 }
[ 2830, 3393, 4021, 38259, 7928, 97582, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 322, 1273, 6184, 374, 4287, 11, 7290, 15085, 572, 537, 3465, 504, 1573, 624, 18185, 6184, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeploySelect_Service(t *testing.T) { const testApp = "mockApp" testCases := map[string]struct { setupMocks func(mocks deploySelectMocks) svc string env string wantErr error wantEnv string wantSvc string }{ "return error if fail to retrieve environment": { setupMocks: func(m deploySelectMocks) { m.configSvc. EXPECT(). ListEnvironments(testApp). Return(nil, errors.New("some error")) }, wantErr: fmt.Errorf("list environments: list environments: some error"), }, "return error if fail to list deployed services": { setupMocks: func(m deploySelectMocks) { m.configSvc. EXPECT(). ListEnvironments(testApp). Return([]*config.Environment{ { Name: "test", }, }, nil) m.deploySvc. EXPECT(). ListDeployedServices(testApp, "test"). Return(nil, errors.New("some error")) }, wantErr: fmt.Errorf("list deployed service for environment test: some error"), }, "return error if no deployed services found": { setupMocks: func(m deploySelectMocks) { m.configSvc. EXPECT(). ListEnvironments(testApp). Return([]*config.Environment{ { Name: "test", }, }, nil) m.deploySvc. EXPECT(). ListDeployedServices(testApp, "test"). Return([]string{}, nil) }, wantErr: fmt.Errorf("no deployed services found in application %s", testApp), }, "return error if fail to select": { setupMocks: func(m deploySelectMocks) { m.configSvc. EXPECT(). ListEnvironments(testApp). Return([]*config.Environment{ { Name: "test", }, }, nil) m.deploySvc. EXPECT(). ListDeployedServices(testApp, "test"). Return([]string{"mockSvc1", "mockSvc2"}, nil) m.prompt. EXPECT(). SelectOne("Select a deployed service", "Help text", []string{"mockSvc1 (test)", "mockSvc2 (test)"}). Return("", errors.New("some error")) }, wantErr: fmt.Errorf("select deployed services for application %s: some error", testApp), }, "success": { setupMocks: func(m deploySelectMocks) { m.configSvc. EXPECT(). ListEnvironments(testApp). Return([]*config.Environment{ { Name: "test", }, }, nil) m.deploySvc. EXPECT(). ListDeployedServices(testApp, "test"). Return([]string{"mockSvc1", "mockSvc2"}, nil) m.prompt. EXPECT(). SelectOne("Select a deployed service", "Help text", []string{"mockSvc1 (test)", "mockSvc2 (test)"}). Return("mockSvc1 (test)", nil) }, wantEnv: "test", wantSvc: "mockSvc1", }, "skip with only one deployed service": { setupMocks: func(m deploySelectMocks) { m.configSvc. EXPECT(). ListEnvironments(testApp). Return([]*config.Environment{ { Name: "test", }, }, nil) m.deploySvc. EXPECT(). ListDeployedServices(testApp, "test"). Return([]string{"mockSvc"}, nil) }, wantEnv: "test", wantSvc: "mockSvc", }, "return error if fail to check if service passed in by flag is deployed or not": { env: "test", svc: "mockSvc", setupMocks: func(m deploySelectMocks) { m.deploySvc. EXPECT(). IsServiceDeployed(testApp, "test", "mockSvc"). Return(false, errors.New("some error")) }, wantErr: fmt.Errorf("check if service mockSvc is deployed in environment test: some error"), }, "success with flags": { env: "test", svc: "mockSvc", setupMocks: func(m deploySelectMocks) { m.deploySvc. EXPECT(). IsServiceDeployed(testApp, "test", "mockSvc"). Return(true, nil) }, wantEnv: "test", wantSvc: "mockSvc", }, } for name, tc := range testCases { t.Run(name, func(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() mockdeploySvc := mocks.NewMockDeployStoreClient(ctrl) mockconfigSvc := mocks.NewMockConfigLister(ctrl) mockprompt := mocks.NewMockPrompter(ctrl) mocks := deploySelectMocks{ deploySvc: mockdeploySvc, configSvc: mockconfigSvc, prompt: mockprompt, } tc.setupMocks(mocks) sel := DeploySelect{ Select: &Select{ config: mockconfigSvc, prompt: mockprompt, }, deployStoreSvc: mockdeploySvc, } gotDeployed, err := sel.DeployedService("Select a deployed service", "Help text", testApp, WithEnv(tc.env), WithSvc(tc.svc)) if tc.wantErr != nil { require.EqualError(t, tc.wantErr, err.Error()) } else { require.Equal(t, tc.wantSvc, gotDeployed.Svc) require.Equal(t, tc.wantEnv, gotDeployed.Env) } }) } }
explode_data.jsonl/44890
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2116 }
[ 2830, 3393, 69464, 3379, 52548, 1155, 353, 8840, 836, 8, 341, 4777, 1273, 2164, 284, 330, 16712, 2164, 698, 18185, 37302, 1669, 2415, 14032, 60, 1235, 341, 197, 84571, 72577, 2915, 1255, 25183, 10517, 3379, 72577, 340, 197, 1903, 7362, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBackupAuthorized(t *testing.T) { dbMock := new(DBMock) authHandler := AuthHandlerMock{} services := &Services{db: dbMock, cookieHandler: &authHandler} router, err := CreateRouter(services) assert.NoError(t, err) req, _ := http.NewRequest("POST", "/api/backup", nil) res := httptest.NewRecorder() user := testUser authHandler.AllowUser(&user) dbMock.On("Backup", &user).Return("json backup", nil).Once() router.ServeHTTP(res, req) assert.Equal(t, http.StatusOK, res.Code) assert.Equal(t, "json backup", res.Body.String()) dbMock.AssertExpectations(t) authHandler.AssertExpectations(t) }
explode_data.jsonl/62536
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 232 }
[ 2830, 3393, 56245, 60454, 1155, 353, 8840, 836, 8, 341, 20939, 11571, 1669, 501, 32184, 11571, 340, 78011, 3050, 1669, 7366, 3050, 11571, 31483, 1903, 2161, 1669, 609, 11025, 90, 1999, 25, 2927, 11571, 11, 12544, 3050, 25, 609, 3242, 30...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsTimeout(t *testing.T) { err := serrors.New("no timeout") assert.False(t, serrors.IsTimeout(err)) wrappedErr := serrors.WrapStr("timeout", &testToTempErr{msg: "to", timeout: true}) assert.True(t, serrors.IsTimeout(wrappedErr)) noTimeoutWrappingTimeout := serrors.WrapStr("notimeout", &testToTempErr{ msg: "non timeout wraps timeout", timeout: false, cause: &testToTempErr{msg: "timeout", timeout: true}, }) assert.False(t, serrors.IsTimeout(noTimeoutWrappingTimeout)) }
explode_data.jsonl/4289
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 190 }
[ 2830, 3393, 3872, 7636, 1155, 353, 8840, 836, 8, 341, 9859, 1669, 274, 7650, 7121, 445, 2152, 9632, 1138, 6948, 50757, 1155, 11, 274, 7650, 4506, 7636, 3964, 1171, 6692, 56289, 7747, 1669, 274, 7650, 38968, 2580, 445, 14150, 756, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRuntime_ContentTypeCanary(t *testing.T) { // test that it can make a simple request // and get the response for it. // defaults all the way down result := []task{ {false, "task 1 content", 1}, {false, "task 2 content", 2}, } server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { if req.Header.Get("Authorization") != "Bearer the-super-secret-token" { rw.WriteHeader(400) return } rw.Header().Add(httpkit.HeaderContentType, httpkit.JSONMime+";charset=utf-8") rw.WriteHeader(http.StatusOK) jsongen := json.NewEncoder(rw) jsongen.Encode(result) })) defer server.Close() rwrtr := client.RequestWriterFunc(func(req client.Request, _ strfmt.Registry) error { return nil }) hu, _ := url.Parse(server.URL) runtime := New(hu.Host, "/", []string{"http"}) res, err := runtime.Submit(&client.Operation{ ID: "getTasks", Method: "GET", PathPattern: "/", Schemes: []string{"http"}, Params: rwrtr, Reader: client.ResponseReaderFunc(func(response client.Response, consumer httpkit.Consumer) (interface{}, error) { if response.Code() == 200 { var result []task if err := consumer.Consume(response.Body(), &result); err != nil { return nil, err } return result, nil } return nil, errors.New("Generic error") }), AuthInfo: BearerToken("the-super-secret-token"), }) if assert.NoError(t, err) { assert.IsType(t, []task{}, res) actual := res.([]task) assert.EqualValues(t, result, actual) } }
explode_data.jsonl/53824
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 604 }
[ 2830, 3393, 15123, 62, 29504, 6713, 658, 1155, 353, 8840, 836, 8, 341, 197, 322, 1273, 429, 432, 646, 1281, 264, 4285, 1681, 198, 197, 322, 323, 633, 279, 2033, 369, 432, 624, 197, 322, 16674, 678, 279, 1616, 1495, 198, 9559, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestVisitorList(t *testing.T) { setup() defer teardown() reqOpts := &ListOptions{Count: 50, Page: 1} addRestHandlerFunc("/data/visitors", func(w http.ResponseWriter, req *http.Request) { testURLParam(t, req, "depth", "minimal") testURLParam(t, req, "count", "50") testURLParam(t, req, "page", "1") testMethod(t, req, "GET") rJSON := `{"elements":[{"type":"Visitor","id":"10005","contactId": "6","visitorId": "10", "V_LastVisitDateAndTime": "1464545297"}], "page":1,"pageSize":50,"total":1}` fmt.Fprint(w, rJSON) }) visitors, resp, err := client.Visitors.List(reqOpts) if err != nil { t.Errorf("Visitors.List recieved error: %v", err) } want := []Visitor{{Type: "Visitor", VisitorID: 10, ContactID: 6, LastVisitDateAndTime: 1464545297}} testModels(t, "Visitors.List", visitors, want) if resp.PageSize != reqOpts.Count { t.Errorf("Visitors.List response page size incorrect.\nExpected: %d\nRecieved: %d", reqOpts.Count, resp.PageSize) } if resp.Page != reqOpts.Page { t.Error("Visitors.List response page number incorrect") } }
explode_data.jsonl/2013
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 418 }
[ 2830, 3393, 16796, 852, 1155, 353, 8840, 836, 8, 341, 84571, 741, 16867, 49304, 2822, 24395, 43451, 1669, 609, 852, 3798, 90, 2507, 25, 220, 20, 15, 11, 5755, 25, 220, 16, 630, 12718, 12416, 3050, 9626, 4283, 691, 14, 2682, 11905, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEntry_SelectionHides(t *testing.T) { e, window := setupSelection(t, false) defer teardownImageTest(window) c := window.Canvas() c.Unfocus() test.AssertRendersToMarkup(t, "entry/selection_focus_lost.xml", c) assert.Equal(t, "sti", e.SelectedText()) c.Focus(e) test.AssertRendersToMarkup(t, "entry/selection_focus_gained.xml", c) assert.Equal(t, "sti", e.SelectedText()) }
explode_data.jsonl/12363
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 161 }
[ 2830, 3393, 5874, 88435, 39, 3341, 1155, 353, 8840, 836, 8, 341, 7727, 11, 3241, 1669, 6505, 11177, 1155, 11, 895, 340, 16867, 49304, 1906, 2271, 15906, 340, 1444, 1669, 3241, 54121, 2822, 1444, 10616, 17414, 741, 18185, 11711, 49, 1450...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDuplicateGroupSnapshots(t *testing.T) { defer resetTest() name := "test-group-snap-duplicate" namespace := "default" selectors, err := parseKeyValueList([]string{"app=mysql"}) require.NoError(t, err, "failed to parse selectors") createGroupSnapshotAndVerify(t, name, namespace, selectors, "", "", nil, nil, 0) // create another with the same name. should fail cmdArgs := []string{"create", "groupsnapshots", "-n", namespace, "--pvcSelectors", "app=mysql", name} expected := fmt.Sprintf("Error from server (AlreadyExists): groupvolumesnapshots.stork.libopenstorage.org \"%s\" already exists", name) testCommon(t, cmdArgs, nil, expected, true) }
explode_data.jsonl/20430
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 221 }
[ 2830, 3393, 53979, 2808, 61871, 27634, 1155, 353, 8840, 836, 8, 341, 16867, 7585, 2271, 2822, 11609, 1669, 330, 1944, 4351, 1331, 6861, 1737, 14070, 698, 56623, 1669, 330, 2258, 698, 38010, 1087, 11, 1848, 1669, 4715, 72082, 852, 10556, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUtilNotifyConsentChange(t *testing.T) { q := make(chan string) server := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, req *http.Request) { rw.Header().Set("Content-Type", "application/json") rw.WriteHeader(200) defer req.Body.Close() bodyBytes, _ := ioutil.ReadAll(req.Body) fmt.Printf("body: %s\n", string(bodyBytes)) if string(bodyBytes) != `{"action":"consentchange","identity":"user3@user3.com","brief":"brief","mode":"email","status":"no"}` { q <- fmt.Sprintf("bad request in notifyConsentChange: %s", string(bodyBytes)) } else { q <- "ok" } })) // Close the server when test finishes defer server.Close() notifyConsentChange(server.URL, "brief", "no", "email", "user3@user3.com") response := <-q if response != "ok" { t.Fatal(response) } }
explode_data.jsonl/9663
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 310 }
[ 2830, 3393, 2742, 28962, 15220, 306, 4072, 1155, 353, 8840, 836, 8, 341, 18534, 1669, 1281, 35190, 914, 340, 41057, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 2601, 86, 1758, 37508, 11, 4232, 353, 1254, 9659, 8, 341, 197, 7000...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDocstore_Put(t *testing.T) { cases := []struct { name string dbFile string conn string domid []byte txid []byte want *model.AnchorRecord }{ {"normal", testdb2, conn2, dom1, tx1, ar1}, } for _, c := range cases { c := c t.Run(c.name, func(t *testing.T) { docs := store.NewDocstore(c.conn) ctx := context.Background() ctx, cancelFunc := context.WithTimeout(ctx, 30*time.Second) defer cancelFunc() // put if err := docs.Put(ctx, c.want); err != nil { t.Error(err) } // save if err := docs.Close(); err != nil { t.Error(err) os.Remove(c.dbFile) t.Skip() } // load & get docs2 := store.NewDocstore(c.conn) got, err := docs2.Get(ctx, c.domid, c.txid) if err != nil { t.Error(err) } if !reflect.DeepEqual(got, c.want) { t.Errorf("got %+v but want %+v", got, c.want) } // cleanup docs2.Close() os.Remove(c.dbFile) }) } }
explode_data.jsonl/61312
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 459 }
[ 2830, 3393, 9550, 4314, 1088, 332, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 256, 914, 198, 197, 20939, 1703, 914, 198, 197, 32917, 256, 914, 198, 197, 2698, 43019, 220, 3056, 3782, 198, 197, 46237, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestNewIndexInfo(t *testing.T) { testIndexInfo := func(config *ServiceConfig, expectedIndexInfos map[string]*IndexInfo) { for indexName, expectedIndexInfo := range expectedIndexInfos { index, err := config.NewIndexInfo(indexName) if err != nil { t.Fatal(err) } else { checkEqual(t, index.Name, expectedIndexInfo.Name, indexName+" name") checkEqual(t, index.Official, expectedIndexInfo.Official, indexName+" is official") checkEqual(t, index.Secure, expectedIndexInfo.Secure, indexName+" is secure") checkEqual(t, len(index.Mirrors), len(expectedIndexInfo.Mirrors), indexName+" mirrors") } } } config := NewServiceConfig(nil) noMirrors := []string{} expectedIndexInfos := map[string]*IndexInfo{ IndexName: { Name: IndexName, Official: true, Secure: true, Mirrors: noMirrors, }, "index." + IndexName: { Name: IndexName, Official: true, Secure: true, Mirrors: noMirrors, }, "example.com": { Name: "example.com", Official: false, Secure: true, Mirrors: noMirrors, }, "127.0.0.1:5000": { Name: "127.0.0.1:5000", Official: false, Secure: false, Mirrors: noMirrors, }, } testIndexInfo(config, expectedIndexInfos) publicMirrors := []string{"http://mirror1.local", "http://mirror2.local"} config = makeServiceConfig(publicMirrors, []string{"example.com"}) expectedIndexInfos = map[string]*IndexInfo{ IndexName: { Name: IndexName, Official: true, Secure: true, Mirrors: publicMirrors, }, "index." + IndexName: { Name: IndexName, Official: true, Secure: true, Mirrors: publicMirrors, }, "example.com": { Name: "example.com", Official: false, Secure: false, Mirrors: noMirrors, }, "example.com:5000": { Name: "example.com:5000", Official: false, Secure: true, Mirrors: noMirrors, }, "127.0.0.1": { Name: "127.0.0.1", Official: false, Secure: false, Mirrors: noMirrors, }, "127.0.0.1:5000": { Name: "127.0.0.1:5000", Official: false, Secure: false, Mirrors: noMirrors, }, "other.com": { Name: "other.com", Official: false, Secure: true, Mirrors: noMirrors, }, } testIndexInfo(config, expectedIndexInfos) config = makeServiceConfig(nil, []string{"42.42.0.0/16"}) expectedIndexInfos = map[string]*IndexInfo{ "example.com": { Name: "example.com", Official: false, Secure: false, Mirrors: noMirrors, }, "example.com:5000": { Name: "example.com:5000", Official: false, Secure: false, Mirrors: noMirrors, }, "127.0.0.1": { Name: "127.0.0.1", Official: false, Secure: false, Mirrors: noMirrors, }, "127.0.0.1:5000": { Name: "127.0.0.1:5000", Official: false, Secure: false, Mirrors: noMirrors, }, "other.com": { Name: "other.com", Official: false, Secure: true, Mirrors: noMirrors, }, } testIndexInfo(config, expectedIndexInfos) }
explode_data.jsonl/13108
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1394 }
[ 2830, 3393, 3564, 1552, 1731, 1155, 353, 8840, 836, 8, 341, 18185, 1552, 1731, 1669, 2915, 8754, 353, 1860, 2648, 11, 3601, 1552, 38059, 2415, 14032, 8465, 1552, 1731, 8, 341, 197, 2023, 1922, 675, 11, 3601, 1552, 1731, 1669, 2088, 36...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestParseProtocolVersion(t *testing.T) { tests := []struct { proto []byte major, minor uint isErr bool }{ // Valid ProtocolVersion messages. {[]byte{82, 70, 66, 32, 48, 48, 51, 46, 48, 48, 56, 10}, 3, 8, false}, // RFB 003.008\n {[]byte{82, 70, 66, 32, 48, 48, 51, 46, 56, 56, 57, 10}, 3, 889, false}, // RFB 003.889\n -- OS X 10.10.3 {[]byte{82, 70, 66, 32, 48, 48, 48, 46, 48, 48, 48, 10}, 0, 0, false}, // RFB 000.0000\n // Invalid messages. {[]byte{82, 70, 66, 32, 51, 46, 56, 10}, 0, 0, true}, // RFB 3.8\n -- too short; not zero padded {[]byte{82, 70, 66, 10}, 0, 0, true}, // RFB\n -- too short {[]byte{}, 0, 0, true}, // (empty) -- too short } for _, tt := range tests { major, minor, err := parseProtocolVersion(tt.proto) if err != nil && !tt.isErr { t.Fatalf("parseProtocolVersion(%v) unexpected error %v", tt.proto, err) } if err == nil && tt.isErr { t.Fatalf("parseProtocolVersion(%v) expected error", tt.proto) } if major != tt.major { t.Errorf("parseProtocolVersion(%v) major = %v, want %v", tt.proto, major, tt.major) } if major != tt.major { t.Errorf("parseProtocolVersion(%v) minor = %v, want %v", tt.proto, minor, tt.minor) } } }
explode_data.jsonl/60424
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 607 }
[ 2830, 3393, 14463, 20689, 5637, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 197, 15110, 286, 3056, 3782, 198, 197, 2109, 3035, 11, 8922, 2622, 198, 197, 19907, 7747, 286, 1807, 198, 197, 59403, 197, 197, 322, 7818...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestManifest_ResolveImageMap(t *testing.T) { cxt := context.NewTestContext(t) cxt.AddTestFile("testdata/porter-images.yaml", config.Name) m, err := LoadManifestFrom(cxt.Context, config.Name) require.NoError(t, err, "could not load manifest") rm := RuntimeManifest{Manifest: m} expectedImage, ok := m.ImageMap["something"] require.True(t, ok, "couldn't get expected image") expectedRef := fmt.Sprintf("%s@%s", expectedImage.Repository, expectedImage.Digest) step := rm.Install[0] err = rm.ResolveStep(step) assert.NoError(t, err, "Should have successfully resolved step") s := step.Data["searcher"].(map[interface{}]interface{}) assert.NotNil(t, s) img, ok := s["image"] assert.True(t, ok, "should have found image") val := fmt.Sprintf("%v", img) assert.Equal(t, expectedRef, val) repo, ok := s["repo"] assert.True(t, ok, "should have found repo") val = fmt.Sprintf("%v", repo) assert.Equal(t, expectedImage.Repository, val) digest, ok := s["digest"] assert.True(t, ok, "should have found digest") val = fmt.Sprintf("%v", digest) assert.Equal(t, expectedImage.Digest, val) tag, ok := s["tag"] assert.True(t, ok, "should have found tag") val = fmt.Sprintf("%v", tag) assert.Equal(t, expectedImage.Tag, val) }
explode_data.jsonl/37727
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 468 }
[ 2830, 3393, 38495, 62, 56808, 1906, 2227, 1155, 353, 8840, 836, 8, 341, 1444, 2252, 1669, 2266, 7121, 2271, 1972, 1155, 340, 1444, 2252, 1904, 2271, 1703, 445, 92425, 71377, 261, 50991, 33406, 497, 2193, 2967, 692, 2109, 11, 1848, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_MapToSlice(t *testing.T) { dtest.C(t, func(t *dtest.T) { m := d.Map{ "k1": "v1", "k2": "v2", } s := dutil.MapToSlice(m) t.Assert(len(s), 4) t.AssertIN(s[0], d.Slice{"k1", "k2", "v1", "v2"}) t.AssertIN(s[1], d.Slice{"k1", "k2", "v1", "v2"}) t.AssertIN(s[2], d.Slice{"k1", "k2", "v1", "v2"}) t.AssertIN(s[3], d.Slice{"k1", "k2", "v1", "v2"}) }) dtest.C(t, func(t *dtest.T) { m := d.MapStrStr{ "k1": "v1", "k2": "v2", } s := dutil.MapToSlice(m) t.Assert(len(s), 4) t.AssertIN(s[0], d.Slice{"k1", "k2", "v1", "v2"}) t.AssertIN(s[1], d.Slice{"k1", "k2", "v1", "v2"}) t.AssertIN(s[2], d.Slice{"k1", "k2", "v1", "v2"}) t.AssertIN(s[3], d.Slice{"k1", "k2", "v1", "v2"}) }) dtest.C(t, func(t *dtest.T) { m := d.MapStrStr{} s := dutil.MapToSlice(m) t.Assert(len(s), 0) }) dtest.C(t, func(t *dtest.T) { s := dutil.MapToSlice(1) t.Assert(s, nil) }) }
explode_data.jsonl/16859
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 579 }
[ 2830, 3393, 56992, 1249, 33236, 1155, 353, 8840, 836, 8, 341, 2698, 1944, 727, 1155, 11, 2915, 1155, 353, 67, 1944, 836, 8, 341, 197, 2109, 1669, 294, 10104, 515, 298, 197, 62911, 16, 788, 330, 85, 16, 756, 298, 197, 62911, 17, 78...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOnDemandAuthorizationServiceCreate(t *testing.T) { c := newMockClient(200, filepath.Join("testdata", "on-demand-authorization.json")) res, err := c.OnDemandAuthorization.Create(ctx) assert.Nil(t, err) assert.NotNil(t, res) }
explode_data.jsonl/82565
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 87 }
[ 2830, 3393, 1925, 81027, 18124, 1860, 4021, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 501, 11571, 2959, 7, 17, 15, 15, 11, 26054, 22363, 445, 92425, 497, 330, 263, 56264, 12, 39554, 4323, 5455, 10202, 11, 1848, 1669, 272, 8071, 81027,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestBackendGS(t *testing.T) { defer func() { if t.Skipped() { SkipDisallowed(t, "restic/backend/gs.TestBackendGS") } }() vars := []string{ "RESTIC_TEST_GS_PROJECT_ID", "RESTIC_TEST_GS_APPLICATION_CREDENTIALS", "RESTIC_TEST_GS_REPOSITORY", } for _, v := range vars { if os.Getenv(v) == "" { t.Skipf("environment variable %v not set", v) return } } t.Logf("run tests") newGSTestSuite(t).RunTests(t) }
explode_data.jsonl/25587
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 216 }
[ 2830, 3393, 29699, 16522, 1155, 353, 8840, 836, 8, 341, 16867, 2915, 368, 341, 197, 743, 259, 808, 74, 6450, 368, 341, 298, 7568, 13389, 4839, 20967, 1155, 11, 330, 3927, 292, 70020, 4846, 82, 8787, 29699, 16522, 1138, 197, 197, 532, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInsufficientPermissions_ForMember(t *testing.T) { expected := &discordgo.Member{User: &discordgo.User{Username: mockconstants.TestUser}} inp := &callbacks.InsufficientPermissions{Member: expected} actual := inp.ForMember() err := deepEqual(actual, expected) if err != nil { t.Error(err) } }
explode_data.jsonl/56093
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 108 }
[ 2830, 3393, 15474, 26683, 23851, 84368, 9366, 1155, 353, 8840, 836, 8, 341, 42400, 1669, 609, 42579, 3346, 46404, 90, 1474, 25, 609, 42579, 3346, 7344, 90, 11115, 25, 7860, 15763, 8787, 1474, 11248, 17430, 79, 1669, 609, 68311, 5337, 82...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAddActionTemplate(t *testing.T) { taskTempl := NewTaskTemplate("testTask", false) expectTaskTempl := &TaskTemplate{ TaskName: "testTask", Initial: false, ActionTemplates: []ActionTemplate{ ActionTemplate{ Name: "HTTP", StructName: "HTTPAction", ConstructorParams: map[string]Value{}, }, ActionTemplate{ Name: "XPath", StructName: "XPathAction", ConstructorParams: map[string]Value{}, }, ActionTemplate{ Name: "Join", StructName: "FieldJoinAction", ConstructorParams: map[string]Value{}, }, }, DataPipeTemplates: []DataPipeTemplate{}, } taskTempl.AddActionTemplate(NewActionTemplate("HTTP", "HTTPAction", nil)) taskTempl.AddActionTemplate(NewActionTemplate("XPath", "XPathAction", map[string]interface{}{})) taskTempl.AddActionTemplate(NewActionTemplate("Join", "FieldJoinAction", map[string]interface{}{})) err := taskTempl.AddActionTemplate(NewActionTemplate("HTTP", "HTTPAction", nil)) assert.NotNil(t, err) assert.Equal(t, expectTaskTempl, taskTempl) }
explode_data.jsonl/19589
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 453 }
[ 2830, 3393, 2212, 2512, 7275, 1155, 353, 8840, 836, 8, 341, 49115, 21988, 500, 1669, 1532, 6262, 7275, 445, 1944, 6262, 497, 895, 692, 24952, 6262, 21988, 500, 1669, 609, 6262, 7275, 515, 197, 81153, 675, 25, 330, 1944, 6262, 756, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValueAtQuantileSlice_RemoveIf(t *testing.T) { // Test RemoveIf on empty slice emptySlice := NewValueAtQuantileSlice() emptySlice.RemoveIf(func(el ValueAtQuantile) bool { t.Fail() return false }) // Test RemoveIf filtered := generateTestValueAtQuantileSlice() pos := 0 filtered.RemoveIf(func(el ValueAtQuantile) bool { pos++ return pos%3 == 0 }) assert.Equal(t, 5, filtered.Len()) }
explode_data.jsonl/32758
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 1130, 1655, 44220, 457, 33236, 66843, 2679, 1155, 353, 8840, 836, 8, 341, 197, 322, 3393, 10783, 2679, 389, 4287, 15983, 198, 197, 3194, 33236, 1669, 1532, 1130, 1655, 44220, 457, 33236, 741, 197, 3194, 33236, 13270, 2679, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPluginShutdownTest(t *testing.T) { if testing.Short() { t.Skip("skipping test to verify forced shutdown of slow plugin") } th := Setup(t) defer th.TearDown() tearDown, _, _ := SetAppEnvironmentWithPlugins(t, []string{ ` package main import ( "github.com/cjdelisle/matterfoss-server/v5/plugin" ) type MyPlugin struct { plugin.MatterfossPlugin } func main() { plugin.ClientMain(&MyPlugin{}) } `, ` package main import ( "github.com/cjdelisle/matterfoss-server/v5/plugin" ) type MyPlugin struct { plugin.MatterfossPlugin } func (p *MyPlugin) OnDeactivate() error { c := make(chan bool) <-c return nil } func main() { plugin.ClientMain(&MyPlugin{}) } `, }, th.App, th.App.NewPluginAPI) defer tearDown() done := make(chan bool) go func() { defer close(done) th.App.Srv().ShutDownPlugins() }() select { case <-done: case <-time.After(15 * time.Second): require.Fail(t, "failed to force plugin shutdown after 10 seconds") } }
explode_data.jsonl/39281
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 461 }
[ 2830, 3393, 11546, 62004, 2271, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 445, 4886, 5654, 1273, 311, 10146, 9575, 23766, 315, 6301, 9006, 1138, 197, 630, 70479, 1669, 18626, 1155, 340, 16867, 270, 836,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHttpParser_simpleResponse(t *testing.T) { data := "HTTP/1.1 200 OK\r\n" + "Date: Tue, 14 Aug 2012 22:31:45 GMT\r\n" + "Expires: -1\r\n" + "Cache-Control: private, max-age=0\r\n" + "Content-Type: text/html; charset=UTF-8\r\n" + "Content-Encoding: gzip\r\n" + "Server: gws\r\n" + "Content-Length: 0\r\n" + "X-XSS-Protection: 1; mode=block\r\n" + "X-Frame-Options: SAMEORIGIN\r\n" + "\r\n" message, ok, complete := testParse(nil, data) assert.True(t, ok) assert.True(t, complete) assert.False(t, message.isRequest) assert.Equal(t, 200, int(message.statusCode)) assert.Equal(t, "OK", string(message.statusPhrase)) assert.True(t, isVersion(message.version, 1, 1)) assert.Equal(t, 262, int(message.size)) assert.Equal(t, 0, message.contentLength) }
explode_data.jsonl/16487
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 356 }
[ 2830, 3393, 2905, 6570, 30015, 2582, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 330, 9230, 14, 16, 13, 16, 220, 17, 15, 15, 10402, 12016, 1699, 1, 3610, 197, 197, 1, 1916, 25, 39167, 11, 220, 16, 19, 4928, 220, 17, 15, 16, 17, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUpgradeCmd(t *testing.T) { tmpChart, _ := ioutil.TempDir("testdata", "tmp") defer os.RemoveAll(tmpChart) cfile := &chart.Metadata{ Name: "testUpgradeChart", Description: "A Helm chart for Kubernetes", Version: "0.1.0", } chartPath, err := chartutil.Create(cfile, tmpChart) if err != nil { t.Errorf("Error creating chart for upgrade: %v", err) } ch, _ := chartutil.Load(chartPath) _ = helm.ReleaseMock(&helm.MockReleaseOptions{ Name: "funny-bunny", Chart: ch, }) // update chart version cfile = &chart.Metadata{ Name: "testUpgradeChart", Description: "A Helm chart for Kubernetes", Version: "0.1.2", } chartPath, err = chartutil.Create(cfile, tmpChart) if err != nil { t.Errorf("Error creating chart: %v", err) } ch, err = chartutil.Load(chartPath) if err != nil { t.Errorf("Error loading updated chart: %v", err) } // update chart version again cfile = &chart.Metadata{ Name: "testUpgradeChart", Description: "A Helm chart for Kubernetes", Version: "0.1.3", } chartPath, err = chartutil.Create(cfile, tmpChart) if err != nil { t.Errorf("Error creating chart: %v", err) } var ch2 *chart.Chart ch2, err = chartutil.Load(chartPath) if err != nil { t.Errorf("Error loading updated chart: %v", err) } originalDepsPath := filepath.Join("testdata/testcharts/reqtest") missingDepsPath := filepath.Join("testdata/testcharts/chart-missing-deps") badDepsPath := filepath.Join("testdata/testcharts/chart-bad-requirements") var ch3 *chart.Chart ch3, err = chartutil.Load(originalDepsPath) if err != nil { t.Errorf("Error loading chart with missing dependencies: %v", err) } tests := []releaseCase{ { name: "upgrade a release", args: []string{"funny-bunny", chartPath}, resp: helm.ReleaseMock(&helm.MockReleaseOptions{Name: "funny-bunny", Version: 2, Chart: ch}), expected: "Release \"funny-bunny\" has been upgraded. Happy Helming!\n", rels: []*release.Release{helm.ReleaseMock(&helm.MockReleaseOptions{Name: "funny-bunny", Version: 2, Chart: ch})}, }, { name: "upgrade a release with timeout", args: []string{"funny-bunny", chartPath}, flags: []string{"--timeout", "120"}, resp: helm.ReleaseMock(&helm.MockReleaseOptions{Name: "funny-bunny", Version: 3, Chart: ch2}), expected: "Release \"funny-bunny\" has been upgraded. Happy Helming!\n", rels: []*release.Release{helm.ReleaseMock(&helm.MockReleaseOptions{Name: "funny-bunny", Version: 3, Chart: ch2})}, }, { name: "upgrade a release with --reset-values", args: []string{"funny-bunny", chartPath}, flags: []string{"--reset-values", "true"}, resp: helm.ReleaseMock(&helm.MockReleaseOptions{Name: "funny-bunny", Version: 4, Chart: ch2}), expected: "Release \"funny-bunny\" has been upgraded. Happy Helming!\n", rels: []*release.Release{helm.ReleaseMock(&helm.MockReleaseOptions{Name: "funny-bunny", Version: 4, Chart: ch2})}, }, { name: "upgrade a release with --reuse-values", args: []string{"funny-bunny", chartPath}, flags: []string{"--reuse-values", "true"}, resp: helm.ReleaseMock(&helm.MockReleaseOptions{Name: "funny-bunny", Version: 5, Chart: ch2}), expected: "Release \"funny-bunny\" has been upgraded. Happy Helming!\n", rels: []*release.Release{helm.ReleaseMock(&helm.MockReleaseOptions{Name: "funny-bunny", Version: 5, Chart: ch2})}, }, { name: "install a release with 'upgrade --install'", args: []string{"zany-bunny", chartPath}, flags: []string{"-i"}, resp: helm.ReleaseMock(&helm.MockReleaseOptions{Name: "zany-bunny", Version: 1, Chart: ch}), expected: "Release \"zany-bunny\" has been upgraded. Happy Helming!\n", rels: []*release.Release{helm.ReleaseMock(&helm.MockReleaseOptions{Name: "zany-bunny", Version: 1, Chart: ch})}, }, { name: "install a release with 'upgrade --install' and timeout", args: []string{"crazy-bunny", chartPath}, flags: []string{"-i", "--timeout", "120"}, resp: helm.ReleaseMock(&helm.MockReleaseOptions{Name: "crazy-bunny", Version: 1, Chart: ch}), expected: "Release \"crazy-bunny\" has been upgraded. Happy Helming!\n", rels: []*release.Release{helm.ReleaseMock(&helm.MockReleaseOptions{Name: "crazy-bunny", Version: 1, Chart: ch})}, }, { name: "install a release with 'upgrade --install' and custom description", args: []string{"crazy-bunny", chartPath}, flags: []string{"-i", "--description", "foo"}, resp: helm.ReleaseMock(&helm.MockReleaseOptions{Name: "crazy-bunny", Version: 1, Chart: ch, Description: "foo"}), expected: "Release \"crazy-bunny\" has been upgraded. Happy Helming!\n", rels: []*release.Release{helm.ReleaseMock(&helm.MockReleaseOptions{Name: "crazy-bunny", Version: 1, Chart: ch, Description: "foo"})}, }, { name: "upgrade a release with wait", args: []string{"crazy-bunny", chartPath}, flags: []string{"--wait"}, resp: helm.ReleaseMock(&helm.MockReleaseOptions{Name: "crazy-bunny", Version: 2, Chart: ch2}), expected: "Release \"crazy-bunny\" has been upgraded. Happy Helming!\n", rels: []*release.Release{helm.ReleaseMock(&helm.MockReleaseOptions{Name: "crazy-bunny", Version: 2, Chart: ch2})}, }, { name: "upgrade a release with description", args: []string{"crazy-bunny", chartPath}, flags: []string{"--description", "foo"}, resp: helm.ReleaseMock(&helm.MockReleaseOptions{Name: "crazy-bunny", Version: 2, Chart: ch2}), expected: "Release \"crazy-bunny\" has been upgraded. Happy Helming!\n", rels: []*release.Release{helm.ReleaseMock(&helm.MockReleaseOptions{Name: "crazy-bunny", Version: 2, Chart: ch2, Description: "foo"})}, }, { name: "upgrade a release with missing dependencies", args: []string{"bonkers-bunny", missingDepsPath}, resp: helm.ReleaseMock(&helm.MockReleaseOptions{Name: "bonkers-bunny", Version: 1, Chart: ch3}), err: true, }, { name: "upgrade a release with bad dependencies", args: []string{"bonkers-bunny", badDepsPath}, resp: helm.ReleaseMock(&helm.MockReleaseOptions{Name: "bonkers-bunny", Version: 1, Chart: ch3}), err: true, }, } cmd := func(c *helm.FakeClient, out io.Writer) *cobra.Command { return newUpgradeCmd(c, out) } runReleaseCases(t, tests, cmd) }
explode_data.jsonl/78484
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2556 }
[ 2830, 3393, 43861, 15613, 1155, 353, 8840, 836, 8, 341, 20082, 14488, 11, 716, 1669, 43144, 65009, 6184, 445, 92425, 497, 330, 5173, 1138, 16867, 2643, 84427, 10368, 14488, 340, 1444, 1192, 1669, 609, 15941, 46475, 515, 197, 21297, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFlickr(t *testing.T) { qs := make(chan url.Values, 1) s := httptest.NewServer( http.HandlerFunc( func(w http.ResponseWriter, r *http.Request) { if r.Method == "GET" && r.FormValue("method") == "flickr.test.login" { w.Write([]byte(`{"user":{"username":{"_content":"someone"}}}`)) return } if r.Method != "POST" { return } r.ParseForm() qs <- r.PostForm if r.FormValue("method") == "flickr.photos.comments.addComment" { w.Write([]byte(`{"comment":{"permalink":"https://www.flickr.com/someone/photos/43333233#comment-123"}}`)) } }, ), ) defer s.Close() flickr, err := Flickr(FlickrOptions{ BaseURL: s.URL, }) if !assert.Nil(t, err) { return } t.Run("reply", func(t *testing.T) { assert := assert.New(t) location, err := flickr.Create(map[string][]interface{}{ "hx-kind": {"reply"}, "in-reply-to": {"https://www.flickr.com/photos/someone/43324322/"}, "content": {"cool pic"}, }) assert.Nil(err) assert.Equal("https://www.flickr.com/someone/photos/43333233#comment-123", location) select { case q := <-qs: assert.Equal("flickr.photos.comments.addComment", q.Get("method")) assert.Equal("43324322", q.Get("photo_id")) assert.Equal("cool pic", q.Get("comment_text")) case <-time.After(time.Second): t.Fatal("expected request to be made within 1s") } }) t.Run("like", func(t *testing.T) { assert := assert.New(t) location, err := flickr.Create(map[string][]interface{}{ "hx-kind": {"like"}, "like-of": {"https://www.flickr.com/photos/someone/43324322/"}, }) assert.Nil(err) assert.Equal("https://www.flickr.com/photos/someone/43324322/", location) select { case q := <-qs: assert.Equal("flickr.favorites.add", q.Get("method")) assert.Equal("43324322", q.Get("photo_id")) case <-time.After(time.Second): t.Fatal("expected request to be made within 1s") } }) }
explode_data.jsonl/47933
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 828 }
[ 2830, 3393, 37, 31803, 1155, 353, 8840, 836, 8, 341, 18534, 82, 1669, 1281, 35190, 2515, 35145, 11, 220, 16, 692, 1903, 1669, 54320, 70334, 7121, 5475, 1006, 197, 28080, 89164, 1006, 298, 29244, 3622, 1758, 37508, 11, 435, 353, 1254, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestMakeFuncInterface(t *testing.T) { fn := func(i int) int { return i } incr := func(in []Value) []Value { return []Value{ValueOf(int(in[0].Int() + 1))} } fv := MakeFunc(TypeOf(fn), incr) ValueOf(&fn).Elem().Set(fv) if r := fn(2); r != 3 { t.Errorf("Call returned %d, want 3", r) } if r := fv.Call([]Value{ValueOf(14)})[0].Int(); r != 15 { t.Errorf("Call returned %d, want 15", r) } if r := fv.Interface().(func(int) int)(26); r != 27 { t.Errorf("Call returned %d, want 27", r) } }
explode_data.jsonl/29554
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 228 }
[ 2830, 3393, 8078, 9626, 5051, 1155, 353, 8840, 836, 8, 341, 40095, 1669, 2915, 1956, 526, 8, 526, 314, 470, 600, 456, 197, 98428, 1669, 2915, 5900, 3056, 1130, 8, 3056, 1130, 341, 197, 853, 3056, 1130, 90, 1130, 2124, 1548, 5900, 58...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInsertMessage(t *testing.T) { db, mock, err := NewMock() if err != nil { t.Fail() } dw := DatabaseWrapper{db: db} defer dw.db.Close() query := regexp.QuoteMeta("insert into messages(id, roomID, userID, messageText, created) values(default, $1, $2, $3, $4)") t.Run("Test InsertMessage", func(t *testing.T) { created := time.Now() prep := mock.ExpectPrepare(query) prep.ExpectExec().WithArgs(message.RoomID, message.UserID, message.Text, created).WillReturnResult(sqlmock.NewResult(0, 1)) err := dw.InsertMessage(message.RoomID, message.UserID, message.Text, created) assert.NoError(t, err) }) }
explode_data.jsonl/7586
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 236 }
[ 2830, 3393, 13780, 2052, 1155, 353, 8840, 836, 8, 341, 20939, 11, 7860, 11, 1848, 1669, 1532, 11571, 741, 743, 1848, 961, 2092, 341, 197, 3244, 57243, 741, 197, 630, 2698, 86, 1669, 9994, 11542, 90, 1999, 25, 2927, 532, 16867, 13835, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLoopRet1(t *testing.T) { const SCRIPT = ` for (var i = 0; i < 20; i++) { } ` testScript1(SCRIPT, _undefined, t) }
explode_data.jsonl/75260
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 57 }
[ 2830, 3393, 14620, 12020, 16, 1155, 353, 8840, 836, 8, 341, 4777, 53679, 284, 22074, 2023, 320, 947, 600, 284, 220, 15, 26, 600, 366, 220, 17, 15, 26, 600, 2457, 314, 456, 197, 19324, 18185, 5910, 16, 7, 24787, 11, 716, 9614, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestItoa(t *testing.T) { for i, b := range tmap { assert.Equal(t, itoa(i), string(b)) } for i := int64(minItoa); i <= maxItoa; i++ { assert.Equal(t, itoa(i), strconv.Itoa(int(i))) } }
explode_data.jsonl/48344
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 96 }
[ 2830, 3393, 40, 49095, 1155, 353, 8840, 836, 8, 341, 2023, 600, 11, 293, 1669, 2088, 259, 2186, 341, 197, 6948, 12808, 1155, 11, 432, 19533, 1956, 701, 914, 1883, 1171, 197, 532, 2023, 600, 1669, 526, 21, 19, 14146, 40, 49095, 1215,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetVPC(t *testing.T) { type args struct { ctx context.Context req service.GetVPCRequest } tests := []struct { name string args args wantVpcs []model.Vpc wantErr bool }{ { name: "测试 Vpc 查询", args: args{ ctx: nil, req: service.GetVPCRequest{ Provider: "AlibabaCloud", RegionId: "cn-qingdao", VpcName: "测试一键创建", PageNumber: 0, PageSize: 20, AccountKey: "", }, }, wantVpcs: nil, wantErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { gotVpc, err := service.GetVPC(tt.args.ctx, tt.args.req) if (err != nil) != tt.wantErr { t.Errorf("GetVPC() error = %v, wantErr %v", err, tt.wantErr) return } if len(gotVpc.Vpcs) == 0 { t.Errorf("GetVPC() error = %v, wantErr %v", err, tt.wantErr) } }) } }
explode_data.jsonl/72644
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 467 }
[ 2830, 3393, 1949, 53, 4872, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 20985, 2266, 9328, 198, 197, 24395, 2473, 2234, 53, 4872, 1900, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 31215,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestColorRGBA(t *testing.T) { for _, test := range []struct { name string color *cpb.Color rgba *color.RGBA }{ {"White", &cpb.Color{Red: 1, Green: 1, Blue: 1}, &color.RGBA{R: 255, G: 255, B: 255, A: 255}}, {"ExplicitAlpha", &cpb.Color{Red: 1, Green: 1, Blue: 1, Alpha: &wpb.FloatValue{Value: 1}}, &color.RGBA{R: 255, G: 255, B: 255, A: 255}}, {"Red", &cpb.Color{Red: 1, Green: 0, Blue: 0}, &color.RGBA{R: 255, G: 0, B: 0, A: 255}}, {"Float", &cpb.Color{Red: 0.5, Green: 0, Blue: 0}, &color.RGBA{R: 128, G: 0, B: 0, A: 255}}, {"MultiFloat", &cpb.Color{Red: 0.25, Green: 0.5, Blue: 0}, &color.RGBA{R: 64, G: 128, B: 0, A: 255}}, {"Black", &cpb.Color{Red: 0, Green: 0, Blue: 0}, &color.RGBA{R: 0, G: 0, B: 0, A: 255}}, {"PartialAlpha", &cpb.Color{Red: 0, Green: 0.5, Blue: 0, Alpha: &wpb.FloatValue{Value: 0.5}}, &color.RGBA{R: 0, G: 128, B: 0, A: 128}}, {"NoAlpha", &cpb.Color{Red: 0, Green: 0.5, Blue: 0, Alpha: &wpb.FloatValue{Value: 0}}, &color.RGBA{R: 0, G: 128, B: 0, A: 0}}, } { t.Run(test.name, func(t *testing.T) { rgba := ProtoColorToRGBA(test.color) t.Run("RGBA", func(t *testing.T) { assertEqual(t, "R", rgba.R, test.rgba.R) assertEqual(t, "G", rgba.G, test.rgba.G) assertEqual(t, "B", rgba.B, test.rgba.B) assertEqual(t, "A", rgba.A, test.rgba.A) }) t.Run("Color", func(t *testing.T) { color := RGBAToProtoColor(rgba) assertEqual(t, "Red", color.GetRed(), test.color.GetRed()) assertEqual(t, "Green", color.GetGreen(), test.color.GetGreen()) assertEqual(t, "Blue", color.GetBlue(), test.color.GetBlue()) if test.color.Alpha == nil { assertEqual(t, "Alpha", color.GetAlpha().GetValue(), float32(1)) } else { assertEqual(t, "Alpha", color.GetAlpha().GetValue(), test.color.GetAlpha().GetValue()) } }) }) } }
explode_data.jsonl/17648
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 876 }
[ 2830, 3393, 1636, 58927, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 3056, 1235, 341, 197, 11609, 220, 914, 198, 197, 21481, 353, 4672, 65, 6669, 198, 197, 197, 20400, 220, 353, 3423, 80114, 32, 198, 197, 59403, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestChunkUploadDownload(t *testing.T) { var ( targets = "0x222" chunksEndpoint = "/chunks" chunksResource = func(a swarm.Address) string { return "/chunks/" + a.String() } resourceTargets = func(addr swarm.Address) string { return "/chunks/" + addr.String() + "?targets=" + targets } chunk = testingc.GenerateTestRandomChunk() statestoreMock = statestore.NewStateStore() logger = logging.New(ioutil.Discard, 0) tag = tags.NewTags(statestoreMock, logger) storerMock = mock.NewStorer() pinningMock = pinning.NewServiceMock() client, _, _ = newTestServer(t, testServerOptions{ Storer: storerMock, Pinning: pinningMock, Tags: tag, Post: mockpost.New(mockpost.WithAcceptAll()), }) ) t.Run("empty chunk", func(t *testing.T) { jsonhttptest.Request(t, client, http.MethodPost, chunksEndpoint, http.StatusBadRequest, jsonhttptest.WithRequestHeader(api.SwarmPostageBatchIdHeader, batchOkStr), jsonhttptest.WithExpectedJSONResponse(jsonhttp.StatusResponse{ Message: "data length", Code: http.StatusBadRequest, }), ) }) t.Run("ok", func(t *testing.T) { jsonhttptest.Request(t, client, http.MethodPost, chunksEndpoint, http.StatusCreated, jsonhttptest.WithRequestHeader(api.SwarmPostageBatchIdHeader, batchOkStr), jsonhttptest.WithRequestBody(bytes.NewReader(chunk.Data())), jsonhttptest.WithExpectedJSONResponse(api.ChunkAddressResponse{Reference: chunk.Address()}), ) // try to fetch the same chunk resp := request(t, client, http.MethodGet, chunksResource(chunk.Address()), nil, http.StatusOK) data, err := ioutil.ReadAll(resp.Body) if err != nil { t.Fatal(err) } if !bytes.Equal(chunk.Data(), data) { t.Fatal("data retrieved doesnt match uploaded content") } }) t.Run("pin-invalid-value", func(t *testing.T) { jsonhttptest.Request(t, client, http.MethodPost, chunksEndpoint, http.StatusCreated, jsonhttptest.WithRequestHeader(api.SwarmPostageBatchIdHeader, batchOkStr), jsonhttptest.WithRequestBody(bytes.NewReader(chunk.Data())), jsonhttptest.WithExpectedJSONResponse(api.ChunkAddressResponse{Reference: chunk.Address()}), jsonhttptest.WithRequestHeader(api.SwarmPinHeader, "invalid-pin"), ) // Also check if the chunk is NOT pinned if storerMock.GetModeSet(chunk.Address()) == storage.ModeSetPin { t.Fatal("chunk should not be pinned") } }) t.Run("pin-header-missing", func(t *testing.T) { jsonhttptest.Request(t, client, http.MethodPost, chunksEndpoint, http.StatusCreated, jsonhttptest.WithRequestHeader(api.SwarmPostageBatchIdHeader, batchOkStr), jsonhttptest.WithRequestBody(bytes.NewReader(chunk.Data())), jsonhttptest.WithExpectedJSONResponse(api.ChunkAddressResponse{Reference: chunk.Address()}), ) // Also check if the chunk is NOT pinned if storerMock.GetModeSet(chunk.Address()) == storage.ModeSetPin { t.Fatal("chunk should not be pinned") } }) t.Run("pin-ok", func(t *testing.T) { reference := chunk.Address() jsonhttptest.Request(t, client, http.MethodPost, chunksEndpoint, http.StatusCreated, jsonhttptest.WithRequestHeader(api.SwarmPostageBatchIdHeader, batchOkStr), jsonhttptest.WithRequestBody(bytes.NewReader(chunk.Data())), jsonhttptest.WithExpectedJSONResponse(api.ChunkAddressResponse{Reference: reference}), jsonhttptest.WithRequestHeader(api.SwarmPinHeader, "True"), ) has, err := storerMock.Has(context.Background(), reference) if err != nil { t.Fatal(err) } if !has { t.Fatal("storer check root chunk reference: have none; want one") } refs, err := pinningMock.Pins() if err != nil { t.Fatal(err) } if have, want := len(refs), 1; have != want { t.Fatalf("root pin count mismatch: have %d; want %d", have, want) } if have, want := refs[0], reference; !have.Equal(want) { t.Fatalf("root pin reference mismatch: have %q; want %q", have, want) } }) t.Run("retrieve-targets", func(t *testing.T) { resp := request(t, client, http.MethodGet, resourceTargets(chunk.Address()), nil, http.StatusOK) // Check if the target is obtained correctly if resp.Header.Get(api.TargetsRecoveryHeader) != targets { t.Fatalf("targets mismatch. got %s, want %s", resp.Header.Get(api.TargetsRecoveryHeader), targets) } }) }
explode_data.jsonl/55703
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1673 }
[ 2830, 3393, 28304, 13844, 11377, 1155, 353, 8840, 836, 8, 1476, 2405, 2399, 197, 28861, 82, 260, 284, 330, 15, 87, 17, 17, 17, 698, 197, 23049, 15296, 27380, 220, 284, 3521, 84263, 698, 197, 23049, 15296, 4783, 220, 284, 2915, 2877, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWorkerNotControlledByUs(t *testing.T) { f := newFixture(t) startTime := metav1.Now() completionTime := metav1.Now() mpiJob := newMPIJob("test", int32Ptr(64), &startTime, &completionTime) f.setUpMPIJob(mpiJob) f.setUpConfigMap(newConfigMap(mpiJob, 8, 8)) f.setUpRbac(mpiJob, 8) worker := newWorker(mpiJob, 8, 8, gpuResourceName, false) worker.OwnerReferences = nil f.setUpWorker(worker) f.runExpectError(getKey(mpiJob, t), gpuResourceName) }
explode_data.jsonl/75015
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 196 }
[ 2830, 3393, 21936, 2623, 3273, 832, 1359, 3558, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 18930, 1155, 340, 21375, 1462, 1669, 77520, 16, 13244, 741, 32810, 14386, 1462, 1669, 77520, 16, 13244, 2822, 197, 39479, 12245, 1669, 501, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDomainDescribe(t *testing.T) { args := testutil.Args scenarios := []testutil.TestScenario{ { Args: args("domain describe --service-id 123 --version 1"), WantError: "error parsing arguments: required flag --name not provided", }, { Args: args("domain describe --service-id 123 --version 1 --name www.test.com"), API: mock.API{ ListVersionsFn: testutil.ListVersions, GetDomainFn: getDomainError, }, WantError: errTest.Error(), }, { Args: args("domain describe --service-id 123 --version 1 --name www.test.com"), API: mock.API{ ListVersionsFn: testutil.ListVersions, GetDomainFn: getDomainOK, }, WantOutput: describeDomainOutput, }, } for _, testcase := range scenarios { t.Run(testcase.Name, func(t *testing.T) { var stdout bytes.Buffer opts := testutil.NewRunOpts(testcase.Args, &stdout) opts.APIClient = mock.APIClient(testcase.API) err := app.Run(opts) testutil.AssertErrorContains(t, err, testcase.WantError) testutil.AssertString(t, testcase.WantOutput, stdout.String()) }) } }
explode_data.jsonl/17441
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 435 }
[ 2830, 3393, 13636, 74785, 1155, 353, 8840, 836, 8, 341, 31215, 1669, 1273, 1314, 51015, 198, 29928, 60494, 1669, 3056, 1944, 1314, 8787, 54031, 515, 197, 197, 515, 298, 197, 4117, 25, 414, 2827, 445, 12204, 7512, 1177, 7936, 12897, 220,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultiPolygon(t *testing.T) { mp := orb.MultiPolygon{ {{{0, 0}, {1, 0}, {1, 1}, {0, 0}}}, {{{0, 0}, {0, 0}}}, } mp = DouglasPeucker(0).MultiPolygon(mp) if len(mp) != 1 { t.Errorf("should remove empty polygon") } }
explode_data.jsonl/40645
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 111 }
[ 2830, 3393, 20358, 37619, 1155, 353, 8840, 836, 8, 341, 53230, 1669, 36366, 57706, 37619, 515, 197, 197, 90691, 15, 11, 220, 15, 2137, 314, 16, 11, 220, 15, 2137, 314, 16, 11, 220, 16, 2137, 314, 15, 11, 220, 15, 74869, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSetupCerts(t *testing.T) { tempDir := tests.MakeTempDir() defer os.RemoveAll(tempDir) k8s := config.KubernetesConfig{ APIServerName: constants.APIServerName, DNSDomain: constants.ClusterDNSDomain, ServiceCIDR: constants.DefaultServiceCIDR, } if err := os.Mkdir(filepath.Join(tempDir, "certs"), 0777); err != nil { t.Fatalf("error create certificate directory: %v", err) } if err := util.GenerateCACert( filepath.Join(tempDir, "certs", "mycert.pem"), filepath.Join(tempDir, "certs", "mykey.pem"), "Test Certificate", ); err != nil { t.Fatalf("error generating certificate: %v", err) } expected := map[string]string{ `sudo /bin/bash -c "test -f /usr/share/ca-certificates/mycert.pem || ln -s /etc/ssl/certs/mycert.pem /usr/share/ca-certificates/mycert.pem"`: "-", `sudo /bin/bash -c "test -f /usr/share/ca-certificates/minikubeCA.pem || ln -s /etc/ssl/certs/minikubeCA.pem /usr/share/ca-certificates/minikubeCA.pem"`: "-", } f := command.NewFakeCommandRunner() f.SetCommandToOutput(expected) var filesToBeTransferred []string for _, cert := range certs { filesToBeTransferred = append(filesToBeTransferred, filepath.Join(localpath.MiniPath(), cert)) } filesToBeTransferred = append(filesToBeTransferred, filepath.Join(localpath.MiniPath(), "ca.crt")) filesToBeTransferred = append(filesToBeTransferred, filepath.Join(localpath.MiniPath(), "certs", "mycert.pem")) if err := SetupCerts(f, k8s, config.Node{}); err != nil { t.Fatalf("Error starting cluster: %v", err) } for _, cert := range filesToBeTransferred { _, err := f.GetFileToContents(cert) if err != nil { t.Errorf("Cert not generated: %s", cert) } } }
explode_data.jsonl/1756
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 673 }
[ 2830, 3393, 21821, 34, 15546, 1155, 353, 8840, 836, 8, 341, 16280, 6184, 1669, 7032, 50133, 12151, 6184, 741, 16867, 2643, 84427, 9758, 6184, 692, 16463, 23, 82, 1669, 2193, 11352, 29827, 2648, 515, 197, 197, 2537, 1637, 2836, 675, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestManualSignInStoresUserGroupsInTheSession(t *testing.T) { userGroups := []string{"somegroup", "someothergroup"} opts := baseTestOptions() opts.HtpasswdUserGroups = userGroups err := validation.Validate(opts) if err != nil { t.Fatal(err) } proxy, err := NewOAuthProxy(opts, func(email string) bool { return true }) if err != nil { t.Fatal(err) } proxy.basicAuthValidator = AlwaysSuccessfulValidator{} rw := httptest.NewRecorder() formData := url.Values{} formData.Set("username", "someuser") formData.Set("password", "somepass") signInReq, _ := http.NewRequest(http.MethodPost, "/oauth2/sign_in", strings.NewReader(formData.Encode())) signInReq.Header.Add("Content-Type", "application/x-www-form-urlencoded") proxy.ServeHTTP(rw, signInReq) assert.Equal(t, http.StatusFound, rw.Code) req, _ := http.NewRequest(http.MethodGet, "/something", strings.NewReader(formData.Encode())) for _, c := range rw.Result().Cookies() { req.AddCookie(c) } s, err := proxy.sessionStore.Load(req) if err != nil { t.Fatal(err) } assert.Equal(t, userGroups, s.Groups) }
explode_data.jsonl/36391
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 428 }
[ 2830, 3393, 52092, 35423, 69026, 1474, 22173, 86093, 5283, 1155, 353, 8840, 836, 8, 341, 19060, 22173, 1669, 3056, 917, 4913, 14689, 4074, 497, 330, 14689, 1575, 4074, 63159, 64734, 1669, 2331, 2271, 3798, 741, 64734, 3839, 790, 395, 6377...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCoalescer(t *testing.T) { ctx := zlog.Test(context.Background(), t) coalescer := &coalescer{} pkgs := test.GenUniquePackages(6) for _, p := range pkgs { // Mark them as if they came from this package's package scanner p.RepositoryHint = `rhcc` } repo := []*claircore.Repository{&goldRepo} layerArtifacts := []*indexer.LayerArtifacts{ { Hash: test.RandomSHA256Digest(t), Pkgs: pkgs[:1], }, { Hash: test.RandomSHA256Digest(t), Pkgs: pkgs[:2], }, { Hash: test.RandomSHA256Digest(t), Pkgs: pkgs[:3], Repos: repo, }, { Hash: test.RandomSHA256Digest(t), Pkgs: pkgs[:4], }, { Hash: test.RandomSHA256Digest(t), Pkgs: pkgs[:5], Repos: repo, }, { Hash: test.RandomSHA256Digest(t), Pkgs: pkgs, }, } ir, err := coalescer.Coalesce(ctx, layerArtifacts) if err != nil { t.Fatalf("received error from coalesce method: %v", err) } // Expect 0-5 to have gotten associated with the repository. for i := range pkgs { es, ok := ir.Environments[strconv.Itoa(i)] if !ok && i == 5 { // Left out the last package. continue } e := es[0] if len(e.RepositoryIDs) == 0 { t.Error("expected some repositories") } for _, id := range e.RepositoryIDs { r := ir.Repositories[id] if got, want := r.Name, goldRepo.Name; got != want { t.Errorf("got: %q, want: %q", got, want) } } } }
explode_data.jsonl/2713
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 642 }
[ 2830, 3393, 7339, 3831, 3828, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 1147, 839, 8787, 5378, 19047, 1507, 259, 340, 197, 1015, 3831, 3828, 1669, 609, 1015, 3831, 3828, 16094, 3223, 74, 5857, 1669, 1273, 65384, 22811, 69513, 7, 21, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func Test_return_single_match(t *testing.T) { projects := NewProjects() project1 := Project{Name: "PROJECT_1"} project2 := Project{Name: "PROJECT_2"} project3 := Project{Name: "PROJECT_3"} project4 := Project{Name: "PROJECT_4"} projects.AddAll([]Project{project1, project2, project3, project4}) filteredProjects := FuzzyMatch("PROJECT_2", projects) assert.Equal(t, []Project{project2}, filteredProjects.List()) }
explode_data.jsonl/10976
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 12511, 19487, 10708, 1155, 353, 8840, 836, 8, 341, 197, 17161, 1669, 1532, 29958, 741, 72470, 16, 1669, 5787, 63121, 25, 330, 41455, 62, 16, 16707, 72470, 17, 1669, 5787, 63121, 25, 330, 41455, 62, 17, 16707, 72470, 18, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIntString_Unmarshal(t *testing.T) { var i sdk.IntString raw := []byte(`100`) err := json.Unmarshal(raw, &i) if err != nil { t.Error(err) } if i.Valid != true { t.Error("Unmarshalled IntString is not valid") } if i.Value != 100 { t.Errorf("Unmarshalled IntString should be 100, got: %d", i.Value) } }
explode_data.jsonl/81860
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 142 }
[ 2830, 3393, 1072, 703, 40687, 27121, 1155, 353, 8840, 836, 8, 341, 2405, 600, 45402, 7371, 703, 198, 76559, 1669, 3056, 3782, 5809, 16, 15, 15, 63, 692, 9859, 1669, 2951, 38097, 22460, 11, 609, 72, 340, 743, 1848, 961, 2092, 341, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestLabelTransaction(t *testing.T) { tests := []struct { name string // Whether the transaction should be known to the wallet. txKnown bool // Whether the test should write an existing label to disk. existingLabel bool // The overwrite parameter to call label transaction with. overwrite bool // The error we expect to be returned. expectedErr *er.ErrorCode }{ { name: "existing label, not overwrite", txKnown: true, existingLabel: true, overwrite: false, expectedErr: ErrTxLabelExists, }, { name: "existing label, overwritten", txKnown: true, existingLabel: true, overwrite: true, expectedErr: nil, }, { name: "no prexisting label, ok", txKnown: true, existingLabel: false, overwrite: false, expectedErr: nil, }, { name: "transaction unknown", txKnown: false, existingLabel: false, overwrite: false, expectedErr: ErrUnknownTransaction, }, } for _, test := range tests { test := test t.Run(test.name, func(t *testing.T) { t.Parallel() w, cleanup := testWallet(t) defer cleanup() // If the transaction should be known to the store, we // write txdetail to disk. if test.txKnown { rec, err := wtxmgr.NewTxRecord( TstSerializedTx, time.Now(), ) if err != nil { t.Fatal(err) } err = walletdb.Update(w.db, func(tx walletdb.ReadWriteTx) er.R { ns := tx.ReadWriteBucket( wtxmgrNamespaceKey, ) return w.TxStore.InsertTx( ns, rec, nil, ) }) if err != nil { t.Fatalf("could not insert tx: %v", err) } } // If we want to setup an existing label for the purpose // of the test, write one to disk. if test.existingLabel { err := w.LabelTransaction( *TstTxHash, "existing label", false, ) if err != nil { t.Fatalf("could not write label: %v", err) } } newLabel := "new label" err := w.LabelTransaction( *TstTxHash, newLabel, test.overwrite, ) if test.expectedErr == nil && err == nil { } else if test.expectedErr == nil || !test.expectedErr.Is(err) { t.Fatalf("expected: %v, got: %v", test.expectedErr, err) } }) } }
explode_data.jsonl/52560
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1033 }
[ 2830, 3393, 2476, 8070, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 271, 197, 197, 322, 13139, 279, 7745, 1265, 387, 3881, 311, 279, 15085, 624, 197, 46237, 48206, 1807, 271, 197, 197, 322, 13139, 279,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetRelatableFiatCurrencies(t *testing.T) { CreateTestBot(t) btsusd, err := currency.NewPairFromStrings("BTC", "USD") if err != nil { t.Fatal(err) } btceur, err := currency.NewPairFromStrings("BTC", "EUR") if err != nil { t.Fatal(err) } p := GetRelatableFiatCurrencies(btsusd) if !p.Contains(btceur, true) { t.Fatal("Unexpected result") } btczar, err := currency.NewPairFromStrings("BTC", "ZAR") if err != nil { t.Fatal(err) } p = GetRelatableFiatCurrencies(btsusd) if !p.Contains(btczar, true) { t.Fatal("Unexpected result") } }
explode_data.jsonl/59232
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 252 }
[ 2830, 3393, 1949, 6740, 15086, 37, 10358, 34, 19607, 1155, 353, 8840, 836, 8, 341, 75569, 2271, 23502, 1155, 692, 2233, 2576, 355, 67, 11, 1848, 1669, 11413, 7121, 12443, 3830, 20859, 445, 59118, 497, 330, 26749, 1138, 743, 1848, 961, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestChannelGetDustSum(t *testing.T) { t.Run("dust sum tweakless", func(t *testing.T) { testGetDustSum(t, channeldb.SingleFunderTweaklessBit) }) t.Run("dust sum anchors zero htlc fee", func(t *testing.T) { testGetDustSum(t, channeldb.SingleFunderTweaklessBit| channeldb.AnchorOutputsBit| channeldb.ZeroHtlcTxFeeBit, ) }) }
explode_data.jsonl/72524
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 150 }
[ 2830, 3393, 9629, 1949, 35, 590, 9190, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 67, 590, 2629, 51043, 1717, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 18185, 1949, 35, 590, 9190, 1155, 11, 521, 1020, 783, 65, 23119, 37, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDowngradeAudience(t *testing.T) { decoder := xml.NewDecoder(strings.NewReader(thoseGuys2018)) var cmcScte2018 scte224.Audience decodeErr := decoder.Decode(&cmcScte2018) if nil != decodeErr { t.Log(decodeErr) t.FailNow() } pretty, marshalErr := xml.MarshalIndent(DowngradeAudience(cmcScte2018), "", " ") if nil != marshalErr { t.Log(marshalErr) t.FailNow() } downgraded := string(pretty) if thoseGuys2015 != downgraded { t.Log(downgraded) t.Log("did not match") t.Log(thoseGuys2015) t.Fail() } }
explode_data.jsonl/3238
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 236 }
[ 2830, 3393, 4454, 6937, 52949, 1835, 1155, 353, 8840, 836, 8, 1476, 197, 48110, 1669, 8396, 7121, 20732, 51442, 68587, 24365, 960, 16780, 1047, 17, 15, 16, 23, 1171, 2405, 9961, 66, 50, 302, 68, 17, 15, 16, 23, 274, 302, 68, 17, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestResetTimeoutPrecommitUponNewHeight(t *testing.T) { config.Consensus.SkipTimeoutCommit = false cs1, vss := randState(4) vs2, vs3, vs4 := vss[1], vss[2], vss[3] height, round := cs1.Height, cs1.Round partSize := types.BlockPartSizeBytes proposalCh := subscribe(cs1.eventBus, types.EventQueryCompleteProposal) newRoundCh := subscribe(cs1.eventBus, types.EventQueryNewRound) newBlockHeader := subscribe(cs1.eventBus, types.EventQueryNewBlockHeader) pv1, err := cs1.privValidator.GetPubKey() require.NoError(t, err) addr := pv1.Address() voteCh := subscribeToVoter(cs1, addr) // start round and wait for propose and prevote startTestRound(cs1, height, round) ensureNewRound(newRoundCh, height, round) ensureNewProposal(proposalCh, height, round) rs := cs1.GetRoundState() theBlockHash := rs.ProposalBlock.Hash() theBlockParts := rs.ProposalBlockParts.Header() ensurePrevote(voteCh, height, round) validatePrevote(t, cs1, round, vss[0], theBlockHash) signAddVotes(cs1, types.PrevoteType, theBlockHash, theBlockParts, vs2, vs3, vs4) ensurePrecommit(voteCh, height, round) validatePrecommit(t, cs1, round, round, vss[0], theBlockHash, theBlockHash) // add precommits signAddVotes(cs1, types.PrecommitType, nil, types.PartSetHeader{}, vs2) signAddVotes(cs1, types.PrecommitType, theBlockHash, theBlockParts, vs3) signAddVotes(cs1, types.PrecommitType, theBlockHash, theBlockParts, vs4) ensureNewBlockHeader(newBlockHeader, height, theBlockHash) prop, propBlock := decideProposal(cs1, vs2, height+1, 0) propBlockParts := propBlock.MakePartSet(partSize) if err := cs1.SetProposalAndBlock(prop, propBlock, propBlockParts, "some peer"); err != nil { t.Fatal(err) } ensureNewProposal(proposalCh, height+1, 0) rs = cs1.GetRoundState() assert.False( t, rs.TriggeredTimeoutPrecommit, "triggeredTimeoutPrecommit should be false at the beginning of each height") }
explode_data.jsonl/81663
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 689 }
[ 2830, 3393, 14828, 7636, 4703, 17413, 49284, 3564, 3640, 1155, 353, 8840, 836, 8, 341, 25873, 94594, 13626, 57776, 7636, 33441, 284, 895, 198, 71899, 16, 11, 348, 778, 1669, 10382, 1397, 7, 19, 692, 5195, 82, 17, 11, 6165, 18, 11, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_getReadOnlyRootFS_override(t *testing.T) { os.Setenv("readonly_root_filesystem", "false") val := getReadOnlyRootFS() want := false if val != want { t.Errorf("want %t, but got %t", want, val) t.Fail() } }
explode_data.jsonl/11977
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 92 }
[ 2830, 3393, 3062, 20914, 8439, 8485, 48576, 1155, 353, 8840, 836, 8, 341, 25078, 4202, 3160, 445, 22569, 12993, 2458, 8948, 497, 330, 3849, 5130, 19302, 1669, 633, 20914, 8439, 8485, 741, 50780, 1669, 895, 198, 743, 1044, 961, 1366, 341...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReconciler_CancelDeployment_JobUpdate(t *testing.T) { // Create a base job job := mock.Job() // Create two deployments running := structs.NewDeployment(job) failed := structs.NewDeployment(job) failed.Status = structs.DeploymentStatusFailed // Make the job newer than the deployment job.Version += 10 cases := []struct { name string deployment *structs.Deployment cancel bool }{ { name: "running deployment", deployment: running, cancel: true, }, { name: "failed deployment", deployment: failed, cancel: false, }, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { // Create 10 allocations var allocs []*structs.Allocation for i := 0; i < 10; i++ { alloc := mock.Alloc() alloc.Job = job alloc.JobID = job.ID alloc.NodeID = uuid.Generate() alloc.Name = structs.AllocName(job.ID, job.TaskGroups[0].Name, uint(i)) alloc.TaskGroup = job.TaskGroups[0].Name allocs = append(allocs, alloc) } reconciler := NewAllocReconciler(testLogger(), allocUpdateFnIgnore, false, job.ID, job, c.deployment, allocs, nil) r := reconciler.Compute() var updates []*structs.DeploymentStatusUpdate if c.cancel { updates = []*structs.DeploymentStatusUpdate{ { DeploymentID: c.deployment.ID, Status: structs.DeploymentStatusCancelled, StatusDescription: structs.DeploymentStatusDescriptionNewerJob, }, } } // Assert the correct results assertResults(t, r, &resultExpectation{ createDeployment: nil, deploymentUpdates: updates, place: 0, inplace: 0, stop: 0, desiredTGUpdates: map[string]*structs.DesiredUpdates{ job.TaskGroups[0].Name: { Ignore: 10, }, }, }) }) } }
explode_data.jsonl/67250
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 829 }
[ 2830, 3393, 693, 40446, 5769, 97485, 75286, 10598, 674, 4289, 1155, 353, 8840, 836, 8, 341, 197, 322, 4230, 264, 2331, 2618, 198, 68577, 1669, 7860, 45293, 2822, 197, 322, 4230, 1378, 71542, 198, 197, 27173, 1669, 62845, 7121, 75286, 28...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSelectStatementParsing(t *testing.T) { t.Run("Test valid select parsing", func(t *testing.T) { inputs := []string{ "Select a,b,c from test;", "select a1 from test;", } expectedOutputs := []*SelectStatement{ { Item: []*tokenizer.Token{ {Value: "a", Kind: tokenizer.IdentifierKind}, {Value: "b", Kind: tokenizer.IdentifierKind}, {Value: "c", Kind: tokenizer.IdentifierKind}, }, From: tokenizer.Token{ Value: "test", Kind: tokenizer.IdentifierKind, }, }, { Item: []*tokenizer.Token{ {Value: "a1", Kind: tokenizer.IdentifierKind}, }, From: tokenizer.Token{ Value: "test", Kind: tokenizer.IdentifierKind, }, }, } for testCase := range inputs { tokenList := *tokenizer.ParseTokenSequence(inputs[testCase]) actualResult, err := parseSelectStatement(tokenList) if err != nil { t.Errorf("Parsing failed on set #%d: %v", testCase, err) } if !actualResult.Equals(expectedOutputs[testCase]) { t.Errorf("Assertion failed. Expected: %s, got: %s", actualResult.String(), expectedOutputs[testCase].String()) } } }) t.Run("Test invalid select parsing", func(t *testing.T) { inputs := []string{ "Select 1,b,c from test;", "INsert into test values (1,2,3);", "Select from test;", "Select from test", "Select a, b, c from", } for testCase := range inputs { tokenList := *tokenizer.ParseTokenSequence(inputs[testCase]) actualResult, err := parseSelectStatement(tokenList) if err == nil { t.Errorf("Expected error on set #%d. Values got: %v", testCase, actualResult) } } }) }
explode_data.jsonl/55222
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 771 }
[ 2830, 3393, 3379, 8636, 68839, 1155, 353, 8840, 836, 8, 972, 3244, 16708, 445, 2271, 2697, 3293, 22314, 497, 2915, 1155, 353, 8840, 836, 8, 972, 197, 22427, 82, 1669, 3056, 917, 1666, 298, 197, 1, 3379, 264, 8402, 10109, 504, 1273, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDeleteFlowStrict(t *testing.T) { br := "br02" err := PrepareOVSBridge(br) if err != nil { t.Fatalf("Failed to prepare OVS bridge: %v", br) } defer func() { err = DeleteOVSBridge(br) if err != nil { t.Errorf("error while deleting OVS bridge: %v", err) } }() bridge := binding.NewOFBridge(br) table = bridge.CreateTable(3, 4, binding.TableMissActionNext) err = bridge.Connect(maxRetry, make(chan struct{})) if err != nil { t.Fatal("Failed to start OFService") } defer bridge.Disconnect() flows, expectFlows := prepareOverlapFlows(table, "1.1.1.1", true) testDeleteSingleFlow(t, br, table, flows, expectFlows) flows2, expectFlows2 := prepareOverlapFlows(table, "2.2.2.2", false) testDeleteSingleFlow(t, br, table, flows2, expectFlows2) }
explode_data.jsonl/23784
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 303 }
[ 2830, 3393, 6435, 18878, 41857, 1155, 353, 8840, 836, 8, 341, 80255, 1669, 330, 1323, 15, 17, 698, 9859, 1669, 31166, 38957, 16680, 11183, 41237, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 9408, 311, 10549, 506, 26050, 14164...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPrivateKey_Bytes(t *testing.T) { tests := []struct { name string wantedLength int }{ {"Test Private Key Bytes Length of 32 bytes", 32}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { privKey, err := crypto.NewPrivateKey() if err != nil { t.Errorf("Error calling NewPrivateKey() error = %v", err) return } got := privKey.Bytes() if len(got) != tt.wantedLength { t.Errorf("Bytes() error, wantedLength = %v, got = %v", tt.wantedLength, len(got)) return } }) } }
explode_data.jsonl/6960
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 236 }
[ 2830, 3393, 75981, 62, 7078, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 260, 914, 198, 197, 6692, 7566, 4373, 526, 198, 197, 59403, 197, 197, 4913, 2271, 9679, 5309, 30024, 17287, 315, 220, 18, 17, 5820, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGeneral(t *testing.T) { var err error // Unexisting field and field index filter := []Filter{ {FieldName: "Oi.Descr", Value: "Debug"}, {FieldName: "Oi.Santa.Clause", Value: "Tree"}, } dataSet := make(RowCollection, 0, 2) dataSet, err = sq.Where(dataSet, filter...) if err != nil { t.Error(err) } fld := dataSet[0].FieldByIndex(999) if fld != nil { t.Error("Field should be nil when trying to access it by wrong index") } _, err = dataSet.CollectValues("unexpected field name") if err == nil { t.Error("Should be error on collecting values from unexisting field") } // Empty dataset filter = []Filter{ {FieldName: "Oi.Descr", Value: "QWERTY"}, } dataSet = make(RowCollection, 0, 2) dataSet, err = sq.Where(dataSet, filter...) if err != nil { t.Error(err) } _, err = dataSet.CollectValues("Oi.Santa.ID") if err == nil { t.Error("Should be error on collecting values from empty dataset") } // Try to filter empty dataset dataSet, err = dataSet.Where(dataSet, filter...) if err == nil { t.Error("Should be error when filtering empty dataset") } // Unexisting fieldname in filter filter = []Filter{ {FieldName: "Oi.Descrucio", Value: "Debug"}, {FieldName: "Oi.Santa.Clause", Value: "Tree"}, } dataSet = make(RowCollection, 0, 2) dataSet, err = sq.Where(dataSet, filter...) if err == nil { t.Error("Should be error on quering with unexisting fieldname in filter") } }
explode_data.jsonl/46146
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 524 }
[ 2830, 3393, 15415, 1155, 353, 8840, 836, 8, 341, 2405, 1848, 1465, 198, 197, 322, 1230, 36895, 2070, 323, 2070, 1922, 198, 50108, 1669, 3056, 5632, 515, 197, 197, 90, 51241, 25, 330, 81096, 68428, 81, 497, 5162, 25, 330, 7939, 7115, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestInitCmd(t *testing.T) { home, cleanup := testutil.NewTestCaseDir(t) t.Cleanup(cleanup) logger := log.NewNopLogger() cfg, err := createDefaultTendermintConfig(home) require.NoError(t, err) serverCtx := server.NewContext(viper.New(), cfg, logger) clientCtx := client.Context{}.WithJSONMarshaler(makeCodec()).WithHomeDir(home) ctx := context.Background() ctx = context.WithValue(ctx, client.ClientContextKey, &clientCtx) ctx = context.WithValue(ctx, server.ServerContextKey, serverCtx) cmd := InitCmd(testMbm, home) cmd.SetArgs([]string{"appnode-test"}) require.NoError(t, cmd.ExecuteContext(ctx)) }
explode_data.jsonl/58647
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 224 }
[ 2830, 3393, 3803, 15613, 1155, 353, 8840, 836, 8, 341, 197, 5117, 11, 21290, 1669, 1273, 1314, 7121, 16458, 6184, 1155, 340, 3244, 727, 60639, 1337, 60639, 692, 17060, 1669, 1487, 7121, 45, 453, 7395, 741, 50286, 11, 1848, 1669, 1855, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPoolParamsQuery(t *testing.T) { kb, err := keys.NewKeyBaseFromDir(InitClientHome(t, "")) require.NoError(t, err) addr, _ := CreateAddr(t, name1, pw, kb) cleanup, _, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr}, true) defer cleanup() defaultParams := staking.DefaultParams() params := getStakingParams(t, port) require.True(t, defaultParams.Equal(params)) pool := getStakingPool(t, port) initialPool := staking.InitialPool() tokens := sdk.TokensFromTendermintPower(100) freeTokens := sdk.TokensFromTendermintPower(50) initialPool.NotBondedTokens = initialPool.NotBondedTokens.Add(tokens) initialPool.BondedTokens = initialPool.BondedTokens.Add(tokens) // Delegate tx on GaiaAppGenState initialPool.NotBondedTokens = initialPool.NotBondedTokens.Add(freeTokens) // freeTokensPerAcc = 50 on GaiaAppGenState require.Equal(t, initialPool.BondedTokens, pool.BondedTokens) //TODO include this test once REST for distribution is online, need to include distribution tokens from inflation // for this equality to make sense //require.Equal(t, initialPool.NotBondedTokens, pool.NotBondedTokens) }
explode_data.jsonl/25407
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 403 }
[ 2830, 3393, 10551, 4870, 2859, 1155, 353, 8840, 836, 8, 341, 16463, 65, 11, 1848, 1669, 6894, 7121, 1592, 3978, 3830, 6184, 7, 3803, 2959, 7623, 1155, 11, 77561, 17957, 35699, 1155, 11, 1848, 340, 53183, 11, 716, 1669, 4230, 13986, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewRedisEnterpriseID(t *testing.T) { id := NewRedisEnterpriseID("12345678-1234-9876-4563-123456789012", "example-resource-group", "clusterValue") if id.SubscriptionId != "12345678-1234-9876-4563-123456789012" { t.Fatalf("Expected %q but got %q for Segment 'SubscriptionId'", id.SubscriptionId, "12345678-1234-9876-4563-123456789012") } if id.ResourceGroupName != "example-resource-group" { t.Fatalf("Expected %q but got %q for Segment 'ResourceGroupName'", id.ResourceGroupName, "example-resource-group") } if id.ClusterName != "clusterValue" { t.Fatalf("Expected %q but got %q for Segment 'ClusterName'", id.ClusterName, "clusterValue") } }
explode_data.jsonl/1710
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 256 }
[ 2830, 3393, 3564, 48137, 85647, 915, 1155, 353, 8840, 836, 8, 341, 15710, 1669, 1532, 48137, 85647, 915, 445, 16, 17, 18, 19, 20, 21, 22, 23, 12, 16, 17, 18, 19, 12, 24, 23, 22, 21, 12, 19, 20, 21, 18, 12, 16, 17, 18, 19, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCreateVolumeSnapshotDBEntry(t *testing.T) { var vol = &model.VolumeSpec{ BaseModel: &model.BaseModel{ Id: "bd5b12a8-a101-11e7-941e-d77981b584d8", }, Size: 1, Status: "available", } var req = &model.VolumeSnapshotSpec{ BaseModel: &model.BaseModel{}, VolumeId: "bd5b12a8-a101-11e7-941e-d77981b584d8", Name: "sample-snapshot-01", Description: "This is the first sample snapshot for testing", Size: int64(1), ProfileId: "3769855c-a102-11e7-b772-17b880d2f537", Status: "creating", Metadata: map[string]string{"a": "a"}, } t.Run("Everything should work well", func(t *testing.T) { mockClient := new(dbtest.Client) mockClient.On("GetVolume", context.NewAdminContext(), "bd5b12a8-a101-11e7-941e-d77981b584d8").Return(vol, nil) mockClient.On("CreateVolumeSnapshot", context.NewAdminContext(), req).Return(&SampleSnapshots[0], nil) db.C = mockClient var expected = &SampleSnapshots[0] result, err := CreateVolumeSnapshotDBEntry(context.NewAdminContext(), req) if err != nil { t.Errorf("failed to create volume snapshot, err is %v\n", err) } assertTestResult(t, result, expected) }) t.Run("The profile id should not be empty", func(t *testing.T) { req.ProfileId = "" mockClient := new(dbtest.Client) mockClient.On("GetVolume", context.NewAdminContext(), "bd5b12a8-a101-11e7-941e-d77981b584d8").Return(vol, nil) mockClient.On("CreateVolumeSnapshot", context.NewAdminContext(), req).Return(&SampleSnapshots[0], nil) db.C = mockClient _, err := CreateVolumeSnapshotDBEntry(context.NewAdminContext(), req) expectedError := "profile id can not be empty when creating volume snapshot in db" assertTestResult(t, err.Error(), expectedError) }) }
explode_data.jsonl/29977
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 699 }
[ 2830, 3393, 4021, 18902, 15009, 3506, 5874, 1155, 353, 8840, 836, 8, 341, 2405, 4400, 284, 609, 2528, 79106, 8327, 515, 197, 66732, 1712, 25, 609, 2528, 13018, 1712, 515, 298, 67211, 25, 330, 8940, 20, 65, 16, 17, 64, 23, 7409, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestConfigTxContext_GetClusterConfigTimeout(t *testing.T) { cryptoDir := testutils.GenerateTestClientCrypto(t, []string{"admin", "server"}) testServer, _, _, err := SetupTestServer(t, cryptoDir) defer func() { if testServer != nil { _ = testServer.Stop() } }() require.NoError(t, err) StartTestServer(t, testServer) serverPort, err := testServer.Port() require.NoError(t, err) bcdb := createDBInstance(t, cryptoDir, serverPort) session := openUserSessionWithQueryTimeout(t, bcdb, "admin", cryptoDir, time.Nanosecond) tx, err := session.ConfigTx() require.Error(t, err) require.Contains(t, err.Error(), "queryTimeout error") require.Nil(t, tx) }
explode_data.jsonl/65326
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 244 }
[ 2830, 3393, 2648, 31584, 1972, 13614, 28678, 2648, 7636, 1155, 353, 8840, 836, 8, 341, 1444, 9444, 6184, 1669, 1273, 6031, 57582, 2271, 2959, 58288, 1155, 11, 3056, 917, 4913, 2882, 497, 330, 4030, 23625, 18185, 5475, 11, 8358, 8358, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_validateEFSConfig(t *testing.T) { testCases := map[string]struct { inConfig *manifest.EFSConfigOrBool wantErr error }{ "no EFS config": { inConfig: nil, wantErr: nil, }, "managed EFS config": { inConfig: &manifest.EFSConfigOrBool{ Enabled: aws.Bool(true), }, }, "EFS explicitly disabled": { inConfig: &manifest.EFSConfigOrBool{ Enabled: aws.Bool(false), }, }, "advanced managed EFS config": { inConfig: &manifest.EFSConfigOrBool{ Advanced: manifest.EFSVolumeConfiguration{ UID: aws.Uint32(12345), GID: aws.Uint32(12345), }, }, }, "BYO EFS": { inConfig: &manifest.EFSConfigOrBool{ Advanced: manifest.EFSVolumeConfiguration{ FileSystemID: aws.String("fs-1234"), RootDirectory: aws.String("/files"), AuthConfig: &manifest.AuthorizationConfig{ IAM: aws.Bool(true), }, }, }, }, "error when access point specified with root dir": { inConfig: &manifest.EFSConfigOrBool{ Advanced: manifest.EFSVolumeConfiguration{ FileSystemID: aws.String("fs-1234"), RootDirectory: aws.String("/files"), AuthConfig: &manifest.AuthorizationConfig{ IAM: aws.Bool(true), AccessPointID: aws.String("fsap-12345"), }, }, }, wantErr: errAccessPointWithRootDirectory, }, "error when access point specified without IAM": { inConfig: &manifest.EFSConfigOrBool{ Advanced: manifest.EFSVolumeConfiguration{ FileSystemID: aws.String("fs-1234"), AuthConfig: &manifest.AuthorizationConfig{ IAM: aws.Bool(false), AccessPointID: aws.String("fsap-12345"), }, }, }, wantErr: errAccessPointWithoutIAM, }, "Enabled with advanced config": { inConfig: &manifest.EFSConfigOrBool{ Enabled: aws.Bool(true), Advanced: manifest.EFSVolumeConfiguration{ UID: aws.Uint32(12345), GID: aws.Uint32(12345), }, }, wantErr: errInvalidEFSConfig, }, "UID with BYO": { inConfig: &manifest.EFSConfigOrBool{ Advanced: manifest.EFSVolumeConfiguration{ FileSystemID: aws.String("fs-1234"), UID: aws.Uint32(12345), GID: aws.Uint32(12345), }, }, wantErr: errUIDWithNonManagedFS, }, "invalid UID config": { inConfig: &manifest.EFSConfigOrBool{ Advanced: manifest.EFSVolumeConfiguration{ UID: aws.Uint32(12345), }, }, wantErr: errInvalidUIDGIDConfig, }, "invalid GID config": { inConfig: &manifest.EFSConfigOrBool{ Advanced: manifest.EFSVolumeConfiguration{ GID: aws.Uint32(12345), }, }, wantErr: errInvalidUIDGIDConfig, }, "error when UID is 0": { inConfig: &manifest.EFSConfigOrBool{ Advanced: manifest.EFSVolumeConfiguration{ UID: aws.Uint32(0), GID: aws.Uint32(12345), }, }, wantErr: errReservedUID, }, "empty EFS config should be invalid": { inConfig: &manifest.EFSConfigOrBool{ Advanced: manifest.EFSVolumeConfiguration{}, }, wantErr: errEmptyEFSConfig, }, "FSID not specified for BYO": { inConfig: &manifest.EFSConfigOrBool{ Advanced: manifest.EFSVolumeConfiguration{ RootDirectory: aws.String("/storage"), AuthConfig: &manifest.AuthorizationConfig{ IAM: aws.Bool(true), }, }, }, wantErr: errNoFSID, }, } for name, tc := range testCases { t.Run(name, func(t *testing.T) { vol := manifest.Volume{ EFS: tc.inConfig, } gotErr := validateEFSConfig(vol) if tc.wantErr == nil { require.NoError(t, gotErr) } else { require.EqualError(t, gotErr, tc.wantErr.Error()) } }) } }
explode_data.jsonl/65186
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1694 }
[ 2830, 3393, 42681, 36, 8485, 2648, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 2415, 14032, 60, 1235, 341, 197, 17430, 2648, 353, 42315, 5142, 8485, 2648, 2195, 11233, 271, 197, 50780, 7747, 1465, 198, 197, 59403, 197, 197, 1, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestBlockEvents(t *testing.T) { for i, c := range GetClients() { i, c := i, c // capture params t.Run(reflect.TypeOf(c).String(), func(t *testing.T) { // start for this test it if it wasn't already running if !c.IsRunning() { // if so, then we start it, listen, and stop it. err := c.Start() require.Nil(t, err, "%d: %+v", i, err) defer c.Stop() } // listen for a new block; ensure height increases by 1 var firstBlockHeight int64 for j := 0; j < 3; j++ { evtTyp := types.EventNewBlock evt, err := client.WaitForOneEvent(c, evtTyp, waitForEventTimeout) require.Nil(t, err, "%d: %+v", j, err) blockEvent, ok := evt.(types.EventDataNewBlock) require.True(t, ok, "%d: %#v", j, evt) block := blockEvent.Block if j == 0 { firstBlockHeight = block.Header.Height continue } require.Equal(t, block.Header.Height, firstBlockHeight+int64(j)) } }) } }
explode_data.jsonl/64474
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 407 }
[ 2830, 3393, 4713, 7900, 1155, 353, 8840, 836, 8, 341, 2023, 600, 11, 272, 1669, 2088, 2126, 47174, 368, 341, 197, 8230, 11, 272, 1669, 600, 11, 272, 442, 12322, 3628, 198, 197, 3244, 16708, 13321, 767, 73921, 1337, 568, 703, 1507, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestValidateWithInvalidPdAddress(t *testing.T) { cmd := new(cobra.Command) o := newOptions() o.addFlags(cmd) require.Nil(t, cmd.ParseFlags([]string{"--pd=aa"})) err := o.complete(cmd) require.Nil(t, err) err = o.validate() require.Regexp(t, ".*PD endpoint should be a valid http or https URL.*", err.Error()) }
explode_data.jsonl/41628
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 129 }
[ 2830, 3393, 17926, 2354, 7928, 47, 67, 4286, 1155, 353, 8840, 836, 8, 341, 25920, 1669, 501, 1337, 28856, 12714, 340, 22229, 1669, 501, 3798, 741, 22229, 1364, 9195, 14160, 692, 17957, 59678, 1155, 11, 5439, 8937, 9195, 10556, 917, 4913...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestW(t *testing.T) { const want = "wawiwewo" for _, v := range [2]string{"わゐゑを", "ワヰヱヲ"} { got, err := KanaToRomaji(v) assert.Equal(t, want, got) assert.Nil(t, err) } }
explode_data.jsonl/11309
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 54, 1155, 353, 8840, 836, 8, 341, 4777, 1366, 284, 330, 86, 672, 37081, 365, 78, 1837, 2023, 8358, 348, 1669, 2088, 508, 17, 30953, 4913, 77083, 144622, 145550, 29412, 497, 330, 124556, 146257, 145447, 144529, 9207, 341, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCookieSyncNoBidders(t *testing.T) { rr := doPost("{}", nil, true, syncersForTest()) assert.Equal(t, rr.Header().Get("Content-Type"), "application/json; charset=utf-8") assert.Equal(t, http.StatusOK, rr.Code) assert.ElementsMatch(t, []string{"appnexus", "audienceNetwork", "lifestreet", "pubmatic"}, parseSyncs(t, rr.Body.Bytes())) assert.Equal(t, "no_cookie", parseStatus(t, rr.Body.Bytes())) }
explode_data.jsonl/21690
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 160 }
[ 2830, 3393, 20616, 12154, 2753, 33, 99129, 1155, 353, 8840, 836, 8, 341, 197, 634, 1669, 65156, 445, 42351, 2092, 11, 830, 11, 12811, 388, 2461, 2271, 2398, 6948, 12808, 1155, 11, 34393, 15753, 1005, 1949, 445, 2762, 10804, 3975, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1