text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestDescriptionLength(t *testing.T) { var errs []error for _, g := range cfg.Groups { description := g.Description len := utf8.RuneCountInString(description) //Ref: https://developers.google.com/admin-sdk/groups-settings/v1/reference/groups if len > 300 { errs = append(errs, fmt.Errorf("Number of characters in description \"%s\" for group name \"%s\" "+ "should not exceed 300; is: %d", description, g.Name, len)) } } if len(errs) > 0 { for _, err := range errs { t.Error(err) } } }
explode_data.jsonl/24789
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 210 }
[ 2830, 3393, 5009, 4373, 1155, 353, 8840, 836, 8, 341, 2405, 70817, 3056, 841, 198, 2023, 8358, 342, 1669, 2088, 13286, 59800, 341, 197, 42407, 1669, 342, 28773, 271, 197, 33111, 1669, 10644, 23, 2013, 2886, 2507, 641, 703, 29833, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestThreeLeggedCat100MBMacbookCoastToCoast(t *testing.T) { SkipUnlessEpic(t) conf := testutil.LatencyConfig{}.NetworkNYtoSF().BlockstoreSlowSSD2014().RoutingSlow() if err := RunThreeLeggedCat(RandomBytes(100*unit.MB), conf); err != nil { t.Fatal(err) } }
explode_data.jsonl/75509
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 101 }
[ 2830, 3393, 19641, 18910, 3556, 26801, 16, 15, 15, 8412, 19552, 2190, 7339, 559, 1249, 7339, 559, 1155, 353, 8840, 836, 8, 341, 7568, 13389, 35587, 36, 15587, 1155, 340, 67850, 1669, 1273, 1314, 1214, 266, 2251, 2648, 46391, 12320, 2305...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestParseScientificNotation(t *testing.T) { s := NewTestStatsd() sciNotationLines := []string{ "scientific.notation:4.6968460083008E-5|ms", "scientific.notation:4.6968460083008E-5|g", "scientific.notation:4.6968460083008E-5|c", "scientific.notation:4.6968460083008E-5|h", } for _, line := range sciNotationLines { err := s.parseStatsdLine(line) if err != nil { t.Errorf("Parsing line [%s] should not have resulted in error: %s\n", line, err) } } }
explode_data.jsonl/14367
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 205 }
[ 2830, 3393, 14463, 50565, 1086, 2623, 367, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 1532, 2271, 16635, 67, 741, 1903, 5855, 2623, 367, 16794, 1669, 3056, 917, 515, 197, 197, 1, 84830, 1253, 11606, 25, 19, 13, 21, 24, 21, 23, 19, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRestrictSuperAdmin(t *testing.T) { ds := dsmock.Stub(dsmock.YAMLData(`--- user/1/organization_management_level: superadmin personal_note/1/user_id: 1 personal_note/2/user_id: 2 unknown_collection/404/field: 404 user/404/unknown_field: blub `)) restricter := restrict.Middleware(ds, 1) keys := []string{ "unknown_collection/404/field", "personal_note/1/id", "personal_note/2/id", "user/404/unknown_field", } got, err := restricter.Get(context.Background(), keys...) if err != nil { t.Fatalf("Restrict returned: %v", err) } if got["unknown_collection/404/field"] == nil { t.Errorf("unknown_collection/404/field was restricted") } if got["user/404/unknown_field"] == nil { t.Errorf("user/404/unknown_field was restricted") } if got["personal_note/1/id"] == nil { t.Errorf("personal_note/1/id got restricted") } if got["personal_note/2/id"] != nil { t.Errorf("personal_note/2/id got not restricted") } }
explode_data.jsonl/19354
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 373 }
[ 2830, 3393, 50360, 849, 19284, 7210, 1155, 353, 8840, 836, 8, 341, 83336, 1669, 294, 3563, 1176, 7758, 392, 1500, 3563, 1176, 7507, 31102, 1043, 5809, 10952, 19060, 14, 16, 14, 23899, 45363, 8274, 25, 2256, 2882, 198, 98805, 278, 27207,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestLogger_Errorf(t *testing.T) { expectedLog := testLogStatement + "\n" f := func() { logger := NewLogger(DEBUG) logger.Errorf("%s", testLogStatement) } output := testutil.StderrOutputForFunc(f) if output != expectedLog { t.Errorf("Stdout mismatch. Expected: %s Got: %s", expectedLog, output) } }
explode_data.jsonl/46809
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 123 }
[ 2830, 3393, 7395, 28651, 69, 1155, 353, 8840, 836, 8, 341, 42400, 2201, 1669, 1273, 2201, 8636, 488, 2917, 77, 698, 1166, 1669, 2915, 368, 341, 197, 17060, 1669, 1532, 7395, 52792, 340, 197, 17060, 13080, 4430, 82, 497, 1273, 2201, 86...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPlacePhotoMissingWidthAndHeight(t *testing.T) { photoReference := "ThisIsNotAPhotoReference" c, _ := NewClient(WithAPIKey(apiKey)) r := &PlacePhotoRequest{ PhotoReference: photoReference, } _, err := c.PlacePhoto(context.Background(), r) if err == nil { t.Errorf("Error expected: maps: both MaxHeight & MaxWidth missing") } if "maps: both MaxHeight & MaxWidth missing" != err.Error() { t.Errorf("Wrong error returned \"%v\"", err) } }
explode_data.jsonl/76312
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 158 }
[ 2830, 3393, 17371, 10463, 25080, 3327, 3036, 3640, 1155, 353, 8840, 836, 8, 341, 197, 11556, 8856, 1669, 330, 1986, 3872, 2623, 2537, 71, 2072, 8856, 698, 1444, 11, 716, 1669, 1532, 2959, 7, 2354, 7082, 1592, 24827, 1592, 1171, 7000, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestParsePSISectionHeader(t *testing.T) { // Unknown table type w := astibinary.New() w.Write(uint8(254)) // Table ID w.Write("1") // Syntax section indicator w.Write("0000000") // Finish the byte d, _, _, _, _, err := parsePSISectionHeader(astibyte.NewIterator(w.Bytes())) assert.Equal(t, d, &PSISectionHeader{ TableID: 254, TableType: PSITableTypeUnknown, }) assert.NoError(t, err) // Valid table type d, offsetStart, offsetSectionsStart, offsetSectionsEnd, offsetEnd, err := parsePSISectionHeader(astibyte.NewIterator(psiSectionHeaderBytes())) assert.Equal(t, d, psiSectionHeader) assert.Equal(t, 0, offsetStart) assert.Equal(t, 3, offsetSectionsStart) assert.Equal(t, 2729, offsetSectionsEnd) assert.Equal(t, 2733, offsetEnd) assert.NoError(t, err) }
explode_data.jsonl/25232
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 308 }
[ 2830, 3393, 14463, 5012, 40, 9620, 4047, 1155, 353, 8840, 836, 8, 341, 197, 322, 21693, 1965, 943, 198, 6692, 1669, 11763, 579, 3287, 7121, 741, 6692, 4073, 8488, 23, 7, 17, 20, 19, 593, 442, 6633, 3034, 198, 6692, 4073, 445, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNoEndpointsMetric(t *testing.T) { type endpoint struct { ip string hostname string } internalTrafficPolicyLocal := v1.ServiceInternalTrafficPolicyLocal externalTrafficPolicyLocal := v1.ServiceExternalTrafficPolicyTypeLocal metrics.RegisterMetrics() testCases := []struct { name string internalTrafficPolicy *v1.ServiceInternalTrafficPolicyType externalTrafficPolicy v1.ServiceExternalTrafficPolicyType endpoints []endpoint expectedSyncProxyRulesNoLocalEndpointsTotalInternal int expectedSyncProxyRulesNoLocalEndpointsTotalExternal int }{ { name: "internalTrafficPolicy is set and there are local endpoints", internalTrafficPolicy: &internalTrafficPolicyLocal, endpoints: []endpoint{ {"10.0.1.1", testHostname}, {"10.0.1.2", "host1"}, {"10.0.1.3", "host2"}, }, }, { name: "externalTrafficPolicy is set and there are local endpoints", externalTrafficPolicy: externalTrafficPolicyLocal, endpoints: []endpoint{ {"10.0.1.1", testHostname}, {"10.0.1.2", "host1"}, {"10.0.1.3", "host2"}, }, }, { name: "both policies are set and there are local endpoints", internalTrafficPolicy: &internalTrafficPolicyLocal, externalTrafficPolicy: externalTrafficPolicyLocal, endpoints: []endpoint{ {"10.0.1.1", testHostname}, {"10.0.1.2", "host1"}, {"10.0.1.3", "host2"}, }, }, { name: "internalTrafficPolicy is set and there are no local endpoints", internalTrafficPolicy: &internalTrafficPolicyLocal, endpoints: []endpoint{ {"10.0.1.1", "host0"}, {"10.0.1.2", "host1"}, {"10.0.1.3", "host2"}, }, expectedSyncProxyRulesNoLocalEndpointsTotalInternal: 1, }, { name: "externalTrafficPolicy is set and there are no local endpoints", externalTrafficPolicy: externalTrafficPolicyLocal, endpoints: []endpoint{ {"10.0.1.1", "host0"}, {"10.0.1.2", "host1"}, {"10.0.1.3", "host2"}, }, expectedSyncProxyRulesNoLocalEndpointsTotalExternal: 1, }, { name: "Both policies are set and there are no local endpoints", internalTrafficPolicy: &internalTrafficPolicyLocal, externalTrafficPolicy: externalTrafficPolicyLocal, endpoints: []endpoint{ {"10.0.1.1", "host0"}, {"10.0.1.2", "host1"}, {"10.0.1.3", "host2"}, }, expectedSyncProxyRulesNoLocalEndpointsTotalInternal: 1, expectedSyncProxyRulesNoLocalEndpointsTotalExternal: 1, }, { name: "Both policies are set and there are no endpoints at all", internalTrafficPolicy: &internalTrafficPolicyLocal, externalTrafficPolicy: externalTrafficPolicyLocal, endpoints: []endpoint{}, expectedSyncProxyRulesNoLocalEndpointsTotalInternal: 0, expectedSyncProxyRulesNoLocalEndpointsTotalExternal: 0, }, } for _, tc := range testCases { defer featuregatetesting.SetFeatureGateDuringTest(t, utilfeature.DefaultFeatureGate, features.ServiceInternalTrafficPolicy, true)() ipt := iptablestest.NewFake() ipvs := ipvstest.NewFake() ipset := ipsettest.NewFake(testIPSetVersion) fp := NewFakeProxier(ipt, ipvs, ipset, []net.IP{netutils.ParseIPSloppy("10.0.0.1")}, nil, v1.IPv4Protocol) fp.servicesSynced = true // fp.endpointsSynced = true fp.endpointSlicesSynced = true // Add initial service serviceName := "svc1" namespaceName := "ns1" svc := &v1.Service{ ObjectMeta: metav1.ObjectMeta{Name: serviceName, Namespace: namespaceName}, Spec: v1.ServiceSpec{ ClusterIP: "172.20.1.1", Selector: map[string]string{"foo": "bar"}, Ports: []v1.ServicePort{{Name: "p80", Port: 80, TargetPort: intstr.FromInt(80), Protocol: v1.ProtocolTCP, NodePort: 30000}}, }, } if tc.internalTrafficPolicy != nil { svc.Spec.InternalTrafficPolicy = tc.internalTrafficPolicy } if tc.externalTrafficPolicy != "" { svc.Spec.Type = v1.ServiceTypeNodePort svc.Spec.ExternalTrafficPolicy = tc.externalTrafficPolicy } fp.OnServiceAdd(svc) // Add initial endpoint slice tcpProtocol := v1.ProtocolTCP endpointSlice := &discovery.EndpointSlice{ ObjectMeta: metav1.ObjectMeta{ Name: fmt.Sprintf("%s-1", serviceName), Namespace: namespaceName, Labels: map[string]string{discovery.LabelServiceName: serviceName}, }, Ports: []discovery.EndpointPort{{ Name: utilpointer.StringPtr("p80"), Port: utilpointer.Int32Ptr(80), Protocol: &tcpProtocol, }}, AddressType: discovery.AddressTypeIPv4, } for _, ep := range tc.endpoints { endpointSlice.Endpoints = append(endpointSlice.Endpoints, discovery.Endpoint{ Addresses: []string{ep.ip}, Conditions: discovery.EndpointConditions{Ready: utilpointer.BoolPtr(true)}, NodeName: utilpointer.StringPtr(ep.hostname), }) } fp.OnEndpointSliceAdd(endpointSlice) fp.syncProxyRules() syncProxyRulesNoLocalEndpointsTotalInternal, err := testutil.GetGaugeMetricValue(metrics.SyncProxyRulesNoLocalEndpointsTotal.WithLabelValues("internal")) if err != nil { t.Errorf("failed to get %s value(internal), err: %v", metrics.SyncProxyRulesNoLocalEndpointsTotal.Name, err) } if tc.expectedSyncProxyRulesNoLocalEndpointsTotalInternal != int(syncProxyRulesNoLocalEndpointsTotalInternal) { t.Errorf("sync_proxy_rules_no_endpoints_total metric mismatch(internal): got=%d, expected %d", int(syncProxyRulesNoLocalEndpointsTotalInternal), tc.expectedSyncProxyRulesNoLocalEndpointsTotalInternal) } syncProxyRulesNoLocalEndpointsTotalExternal, err := testutil.GetGaugeMetricValue(metrics.SyncProxyRulesNoLocalEndpointsTotal.WithLabelValues("external")) if err != nil { t.Errorf("failed to get %s value(external), err: %v", metrics.SyncProxyRulesNoLocalEndpointsTotal.Name, err) } if tc.expectedSyncProxyRulesNoLocalEndpointsTotalExternal != int(syncProxyRulesNoLocalEndpointsTotalExternal) { t.Errorf("sync_proxy_rules_no_endpoints_total metric mismatch(external): got=%d, expected %d", int(syncProxyRulesNoLocalEndpointsTotalExternal), tc.expectedSyncProxyRulesNoLocalEndpointsTotalExternal) } } }
explode_data.jsonl/44388
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2547 }
[ 2830, 3393, 2753, 80786, 54310, 1155, 353, 8840, 836, 8, 341, 13158, 14887, 2036, 341, 197, 46531, 981, 914, 198, 197, 197, 27806, 914, 198, 197, 630, 33343, 87229, 13825, 7319, 1669, 348, 16, 13860, 11569, 87229, 13825, 7319, 198, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestCreateHandleFlags(t *testing.T) { app := cli.NewApp() flagset := flag.NewFlagSet("flags", 1) flagset.String("image-id", "", "") flagset.String("flavor-id", "", "") flagset.String("security-groups", "", "") flagset.String("networks", "", "") flagset.String("metadata", "", "") flagset.String("admin-pass", "", "") flagset.String("keypair", "", "") flagset.Set("image-id", "13ba-75c0-4483-acf9") flagset.Set("flavor-id", "1234-b95f-ac5b-cd23") flagset.Set("security-groups", "sg1,sg2,sg3") flagset.Set("networks", "1111-2222-3333-4444,5555-7777-8888-9999") flagset.Set("metadata", "img=foo,flavor=bar") flagset.Set("admin-pass", "secret") flagset.Set("keypair", "kp1") c := cli.NewContext(app, flagset, nil) cmd := &commandCreate{ Ctx: &handler.Context{ CLIContext: c, }, } expected := &handler.Resource{ Params: &paramsCreate{ opts: &servers.CreateOpts{ ImageRef: "13ba-75c0-4483-acf9", FlavorRef: "1234-b95f-ac5b-cd23", SecurityGroups: []string{"sg1", "sg2", "sg3"}, Networks: []osServers.Network{ { UUID: "1111-2222-3333-4444", }, { UUID: "5555-7777-8888-9999", }, }, Metadata: map[string]string{ "img": "foo", "flavor": "bar", }, AdminPass: "secret", KeyPair: "kp1", }, }, } actual := &handler.Resource{} err := cmd.HandleFlags(actual) th.AssertNoErr(t, err) th.AssertDeepEquals(t, *expected.Params.(*paramsCreate).opts, *actual.Params.(*paramsCreate).opts) }
explode_data.jsonl/75789
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 717 }
[ 2830, 3393, 4021, 6999, 9195, 1155, 353, 8840, 836, 8, 341, 28236, 1669, 21348, 7121, 2164, 741, 30589, 746, 1669, 5181, 7121, 12135, 1649, 445, 11161, 497, 220, 16, 340, 30589, 746, 6431, 445, 1805, 12897, 497, 7342, 14676, 30589, 746,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEndpoint(t *testing.T) { type input struct { UseHttps bool Host string } testInputs := []input{ {UseHttps: true, Host: "rs.qiniu.com"}, {UseHttps: false, Host: "rs.qiniu.com"}, {UseHttps: true, Host: ""}, {UseHttps: false, Host: ""}, {UseHttps: true, Host: "https://rs.qiniu.com"}, {UseHttps: false, Host: "https://rs.qiniu.com"}, {UseHttps: false, Host: "http://rs.qiniu.com"}, } testWants := []string{"https://rs.qiniu.com", "http://rs.qiniu.com", "", "", "https://rs.qiniu.com", "http://rs.qiniu.com", "http://rs.qiniu.com"} for ind, testInput := range testInputs { testGot := endpoint(testInput.UseHttps, testInput.Host) testWant := testWants[ind] if testGot != testWant { t.Fail() } } }
explode_data.jsonl/48862
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 339 }
[ 2830, 3393, 27380, 1155, 353, 8840, 836, 8, 341, 13158, 1946, 2036, 341, 197, 95023, 92869, 1807, 198, 197, 197, 9296, 257, 914, 198, 197, 532, 18185, 31946, 1669, 3056, 1355, 515, 197, 197, 90, 10253, 92869, 25, 830, 11, 16102, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestExactPrefix(t *testing.T) { defer leaktest.AfterTest(t)() st := cluster.MakeTestingClusterSettings() evalCtx := tree.MakeTestingEvalContext(st) testData := []struct { s string // expected value e int }{ { s: "", e: 0, }, { s: "[/1 - /1]", e: 1, }, { s: "[/1 - /2]", e: 0, }, { s: "[/1/2/3 - /1/2/3]", e: 3, }, { s: "[/1/2/3 - /1/2/3] [/1/2/5 - /1/2/8]", e: 2, }, { s: "[/1/2/3 - /1/2/3] [/1/2/5 - /1/3/8]", e: 1, }, { s: "[/1/2/3 - /1/2/3] [/1/3/3 - /1/3/3]", e: 1, }, { s: "[/1/2/3 - /1/2/3] [/3 - /4]", e: 0, }, { s: "[/1/2/1 - /1/2/1] [/1/3/1 - /1/4/1]", e: 1, }, } kc := testKeyContext(1, 2, 3) for i, tc := range testData { t.Run(fmt.Sprintf("%d", i), func(t *testing.T) { spans := parseSpans(&evalCtx, tc.s) var c Constraint c.Init(kc, &spans) if res := c.ExactPrefix(kc.EvalCtx); res != tc.e { t.Errorf("expected %d got %d", tc.e, res) } }) } }
explode_data.jsonl/59311
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 623 }
[ 2830, 3393, 57954, 14335, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 18388, 1669, 10652, 50133, 16451, 28678, 6086, 741, 93413, 23684, 1669, 4916, 50133, 16451, 54469, 1972, 5895, 692, 18185, 1043, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestModeAssert(t *testing.T) { type args struct { mode int } tests := []struct { name string args args iSHash bool isSignature bool isECDSA bool isRSA bool }{ {name: "sha3", args: args{mode: crypto.SHA3}, iSHash: true}, {name: "sha256", args: args{mode: crypto.SHA3}, iSHash: true}, {name: "sha1", args: args{mode: crypto.SHA3}, iSHash: true}, {name: "signature", args: args{mode: crypto.Secp384r1}, isSignature: true, isECDSA: true}, {name: "crypto", args: args{mode: crypto.Sm4}}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := ModeIsHashAlgo(tt.args.mode); got != tt.iSHash { t.Errorf("ModeIsHashAlgo() = %v", got) } if got := ModeIsSignatureAlgo(tt.args.mode); got != tt.isSignature { t.Errorf("ModeIsSignatureAlgo() = %v", got) } if got := ModeIsRSAAlgo(tt.args.mode); got != tt.isRSA { t.Errorf("ModeIsRSAAlgo() = %v", got) } if got := ModeIsECDSAAlgo(tt.args.mode); got != tt.isECDSA { t.Errorf("ModeIsECDSAAlgo() = %v", got) } }) } }
explode_data.jsonl/45159
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 512 }
[ 2830, 3393, 3636, 8534, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 60247, 526, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 286, 914, 198, 197, 31215, 286, 2827, 198, 197, 8230, 50, 6370, 414, 1807, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestWriteSetCookies(t *testing.T) { defer log.SetOutput(os.Stderr) var logbuf bytes.Buffer log.SetOutput(&logbuf) for i, tt := range writeSetCookiesTests { if g, e := tt.Cookie.String(), tt.Raw; g != e { t.Errorf("Test %d, expecting:\n%s\nGot:\n%s\n", i, e, g) continue } } if got, sub := logbuf.String(), "dropping domain attribute"; !strings.Contains(got, sub) { t.Errorf("Expected substring %q in log output. Got:\n%s", sub, got) } }
explode_data.jsonl/20294
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 195 }
[ 2830, 3393, 7985, 1649, 50672, 1155, 353, 8840, 836, 8, 341, 16867, 1487, 4202, 5097, 9638, 77319, 340, 2405, 1487, 5909, 5820, 22622, 198, 6725, 4202, 5097, 2099, 839, 5909, 692, 2023, 600, 11, 17853, 1669, 2088, 3270, 1649, 50672, 182...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestReverse(t *testing.T) { type testData struct { in, out []byte } tests := []testData{ {in: []byte{125, 55, 90, 127}, out: []byte{127, 90, 55, 125}}, {in: []byte{}, out: []byte{}}, {in: nil, out: nil}, } for i, test := range tests { Reverse(ByteSlice(test.in), 0, len(test.in)-1) if err := a.AssertSlicesEqual(a.ByteSlicesMatch{Expected: test.out, Actual: test.in}); err != nil { t.Error(m.ErrorMessageTestCount(i+1, err)) } } }
explode_data.jsonl/9012
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 208 }
[ 2830, 3393, 45695, 1155, 353, 8840, 836, 8, 341, 13158, 67348, 2036, 341, 197, 17430, 11, 700, 3056, 3782, 198, 197, 630, 78216, 1669, 3056, 1944, 1043, 515, 197, 197, 90, 258, 25, 3056, 3782, 90, 16, 17, 20, 11, 220, 20, 20, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAllClusters(t *testing.T) { g := gomega.NewGomegaWithT(t) // Setup the Manager and Controller. Wrap the Controller Reconcile function so it writes each request to a // channel when it is finished. mgr, err := manager.New(cfg, manager.Options{MetricsBindAddress: "0"}) g.Expect(err).NotTo(gomega.HaveOccurred()) c = mgr.GetClient() recFn, requests := SetupTestReconcile(newReconciler(mgr)) g.Expect(add(mgr, recFn)).NotTo(gomega.HaveOccurred()) ctx, cancel := context.WithTimeout(context.TODO(), 5*time.Minute) mgrStopped := StartTestManager(ctx, mgr, g) defer func() { cancel() mgrStopped.Wait() }() for _, cl := range clusters { clinstance := cl.DeepCopy() err = c.Create(context.TODO(), clinstance) g.Expect(err).NotTo(gomega.HaveOccurred()) defer c.Delete(context.TODO(), clinstance) } time.Sleep(6 * time.Second) cAlphaKey := types.NamespacedName{ Name: "clusteralpha", } cAlpha := &spokeClusterV1.ManagedCluster{} err = c.Get(context.TODO(), cAlphaKey, cAlpha) g.Expect(err).NotTo(gomega.HaveOccurred()) cAlpha.Status = spokeClusterV1.ManagedClusterStatus{Conditions: []metav1.Condition{}, Allocatable: spokeClusterV1.ResourceList{ spokeClusterV1.ResourceCPU: resource.MustParse("10500m"), }} err = c.Status().Update(context.TODO(), cAlpha) g.Expect(err).NotTo(gomega.HaveOccurred()) cBetaKey := types.NamespacedName{ Name: "clusterbeta", } cBeta := &spokeClusterV1.ManagedCluster{} err = c.Get(context.TODO(), cBetaKey, cBeta) g.Expect(err).NotTo(gomega.HaveOccurred()) cBeta.Status = spokeClusterV1.ManagedClusterStatus{Conditions: []metav1.Condition{}, Allocatable: spokeClusterV1.ResourceList{ spokeClusterV1.ResourceCPU: resource.MustParse("8"), }} err = c.Status().Update(context.TODO(), cBeta) g.Expect(err).NotTo(gomega.HaveOccurred()) instance := &appv1alpha1.PlacementRule{ ObjectMeta: metav1.ObjectMeta{ Name: prulename, Namespace: prulens, }, Spec: appv1alpha1.PlacementRuleSpec{ ResourceHint: &appv1alpha1.ResourceHint{ Type: appv1alpha1.ResourceTypeCPU, Order: appv1alpha1.SelectionOrderAsce, }, }, } err = c.Create(context.TODO(), instance) defer c.Delete(context.TODO(), instance) g.Expect(err).NotTo(gomega.HaveOccurred()) g.Eventually(requests, timeout).Should(gomega.Receive(gomega.Equal(expectedRequest))) time.Sleep(1 * time.Second) result := &appv1alpha1.PlacementRule{} err = c.Get(context.TODO(), prulekey, result) g.Expect(err).NotTo(gomega.HaveOccurred()) if len(result.Status.Decisions) != 2 { t.Errorf("Failed to get all clusters, placementrule: %v", result) } // expect order of first clusterbeta "8" then second clusteralpha "10500m" for asc cpu sort if result.Status.Decisions[0].ClusterName == "clusteralpha" { t.Errorf("Failed to sort cluster properly, placementrule: %v", result) } decision := &clusterapi.PlacementDecision{} err = c.Get(ctx, pdkey, decision) g.Expect(err).NotTo(gomega.HaveOccurred()) if len(decision.Status.Decisions) != 2 { t.Errorf("Failed to get all clusters, placementdecision: %v", result) } // expect order of first clusterbeta "8" then second clusteralpha "10500m" for asc cpu sort if decision.Status.Decisions[0].ClusterName == "clusteralpha" { t.Errorf("Failed to sort cluster properly, placementdecision: %v", result) } }
explode_data.jsonl/48695
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1310 }
[ 2830, 3393, 2403, 94992, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 342, 32696, 7121, 38, 32696, 2354, 51, 1155, 692, 197, 322, 18626, 279, 10567, 323, 9771, 13, 220, 42187, 279, 9771, 1032, 40446, 457, 729, 773, 432, 13914, 1817, 1681...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewRepositoryStrictValidation(t *testing.T) { t.Parallel() for _, name := range goodStrictValidationRepositoryNames { if repository, err := NewRepository(name, StrictValidation); err != nil { t.Errorf("`%s` should be a valid Repository name, got error: %v", name, err) } else if repository.Name() != name { t.Errorf("`%v` .Name() should reproduce the original name. Wanted: %s Got: %s", repository, name, repository.Name()) } } for _, name := range append(goodWeakValidationRepositoryNames, badRepositoryNames...) { if repo, err := NewRepository(name, StrictValidation); err == nil { t.Errorf("`%s` should be an invalid repository name, got Repository: %#v", name, repo) } } }
explode_data.jsonl/39006
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 242 }
[ 2830, 3393, 3564, 4624, 41857, 13799, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 2023, 8358, 829, 1669, 2088, 1661, 41857, 13799, 4624, 7980, 341, 197, 743, 12542, 11, 1848, 1669, 1532, 4624, 3153, 11, 52881, 13799, 1215, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestFavClips(t *testing.T) { Convey("get FavClips", t, func() { _, err := dao.FavClips(ctx(), 27515258, "da6863c38e83fc7a035c8f7a7d9b1c11", "appkey", "phone", "iphone", "ios", 8230, 1, 20) err = nil So(err, ShouldBeNil) }) }
explode_data.jsonl/51610
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 119 }
[ 2830, 3393, 37, 402, 5066, 3077, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 455, 80941, 5066, 3077, 497, 259, 11, 2915, 368, 341, 197, 197, 6878, 1848, 1669, 24775, 991, 402, 5066, 3077, 7502, 1507, 220, 17, 22, 20, 16, 20, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDelete(t *testing.T) { t.Parallel() flight := newSyncU64set() flight.AddIfNotPresent(10) flight.Delete(10) if flight.pks.Has(10) { t.Error("did not delete the value") } }
explode_data.jsonl/7435
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 78 }
[ 2830, 3393, 6435, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 1166, 4145, 1669, 501, 12154, 52, 21, 19, 746, 741, 1166, 4145, 1904, 2679, 2623, 21195, 7, 16, 15, 692, 1166, 4145, 18872, 7, 16, 15, 340, 743, 10971, 556, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestPrintPodConditions(t *testing.T) { runningPod := &api.Pod{ ObjectMeta: metav1.ObjectMeta{Name: "test1", Labels: map[string]string{"a": "1", "b": "2"}}, Spec: api.PodSpec{Containers: make([]api.Container, 2)}, Status: api.PodStatus{ Phase: "Running", ContainerStatuses: []api.ContainerStatus{ {Ready: true, RestartCount: 3, State: api.ContainerState{Running: &api.ContainerStateRunning{}}}, {RestartCount: 3}, }, }, } succeededPod := &api.Pod{ ObjectMeta: metav1.ObjectMeta{Name: "test1", Labels: map[string]string{"a": "1", "b": "2"}}, Spec: api.PodSpec{Containers: make([]api.Container, 2)}, Status: api.PodStatus{ Phase: "Succeeded", ContainerStatuses: []api.ContainerStatus{ {Ready: true, RestartCount: 3, State: api.ContainerState{Running: &api.ContainerStateRunning{}}}, {RestartCount: 3}, }, }, } failedPod := &api.Pod{ ObjectMeta: metav1.ObjectMeta{Name: "test2", Labels: map[string]string{"b": "2"}}, Spec: api.PodSpec{Containers: make([]api.Container, 2)}, Status: api.PodStatus{ Phase: "Failed", ContainerStatuses: []api.ContainerStatus{ {Ready: true, RestartCount: 3, State: api.ContainerState{Running: &api.ContainerStateRunning{}}}, {RestartCount: 3}, }, }, } tests := []struct { pod *api.Pod expect []metav1.TableRow }{ // Should not have TableRowCondition { pod: runningPod, // Columns: Name, Ready, Reason, Restarts, Age expect: []metav1.TableRow{{Cells: []interface{}{"test1", "1/2", "Running", int64(6), "<unknown>"}}}, }, // Should have TableRowCondition: podSuccessConditions { pod: succeededPod, expect: []metav1.TableRow{ { // Columns: Name, Ready, Reason, Restarts, Age Cells: []interface{}{"test1", "1/2", "Succeeded", int64(6), "<unknown>"}, Conditions: podSuccessConditions, }, }, }, // Should have TableRowCondition: podFailedCondition { pod: failedPod, expect: []metav1.TableRow{ { // Columns: Name, Ready, Reason, Restarts, Age Cells: []interface{}{"test2", "1/2", "Failed", int64(6), "<unknown>"}, Conditions: podFailedConditions, }, }, }, } for i, test := range tests { rows, err := printPod(test.pod, printers.GenerateOptions{}) if err != nil { t.Fatal(err) } for i := range rows { rows[i].Object.Object = nil } if !reflect.DeepEqual(test.expect, rows) { t.Errorf("%d mismatch: %s", i, diff.ObjectReflectDiff(test.expect, rows)) } } }
explode_data.jsonl/21599
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1067 }
[ 2830, 3393, 8994, 23527, 35435, 1155, 353, 8840, 836, 8, 341, 197, 27173, 23527, 1669, 609, 2068, 88823, 515, 197, 23816, 12175, 25, 77520, 16, 80222, 63121, 25, 330, 1944, 16, 497, 60996, 25, 2415, 14032, 30953, 4913, 64, 788, 330, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestMutexProfile(t *testing.T) { testenv.SkipFlaky(t, 19139) old := runtime.SetMutexProfileFraction(1) defer runtime.SetMutexProfileFraction(old) if old != 0 { t.Fatalf("need MutexProfileRate 0, got %d", old) } blockMutex() var w bytes.Buffer Lookup("mutex").WriteTo(&w, 1) prof := w.String() if !strings.HasPrefix(prof, "--- mutex:\ncycles/second=") { t.Errorf("Bad profile header:\n%v", prof) } prof = strings.Trim(prof, "\n") lines := strings.Split(prof, "\n") if len(lines) != 6 { t.Errorf("expected 6 lines, got %d %q\n%s", len(lines), prof, prof) } if len(lines) < 6 { return } // checking that the line is like "35258904 1 @ 0x48288d 0x47cd28 0x458931" r2 := `^\d+ 1 @(?: 0x[[:xdigit:]]+)+` //r2 := "^[0-9]+ 1 @ 0x[0-9a-f x]+$" if ok, err := regexp.MatchString(r2, lines[3]); err != nil || !ok { t.Errorf("%q didn't match %q", lines[3], r2) } r3 := "^#.*runtime/pprof.blockMutex.*$" if ok, err := regexp.MatchString(r3, lines[5]); err != nil || !ok { t.Errorf("%q didn't match %q", lines[5], r3) } }
explode_data.jsonl/13654
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 470 }
[ 2830, 3393, 38099, 8526, 1155, 353, 8840, 836, 8, 341, 18185, 3160, 57776, 3882, 28100, 1155, 11, 220, 16, 24, 16, 18, 24, 340, 61828, 1669, 15592, 4202, 38099, 8526, 61955, 7, 16, 340, 16867, 15592, 4202, 38099, 8526, 61955, 21972, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestJetStreamNotEnabled(t *testing.T) { s := RunServerOnPort(-1) defer s.Shutdown() nc, err := nats.Connect(s.ClientURL()) if err != nil { t.Fatalf("Unexpected error: %v", err) } defer nc.Close() js, err := nc.JetStream() if err != nil { t.Fatalf("Got error during initialization %v", err) } if _, err = js.AccountInfo(); err != nats.ErrJetStreamNotEnabled { t.Fatalf("Did not get the proper error, got %v", err) } }
explode_data.jsonl/29154
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 175 }
[ 2830, 3393, 35641, 3027, 2623, 5462, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 6452, 5475, 1925, 7084, 4080, 16, 340, 16867, 274, 10849, 18452, 2822, 197, 1016, 11, 1848, 1669, 308, 1862, 43851, 1141, 11716, 3144, 2398, 743, 1848, 961, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestTerm(t *testing.T) { for _, tc := range termTestCases { t.Run(tc.name, func(t *testing.T) { t.Logf("-- term test case %q\n", tc.name) t.Logf(" input: %q\n", tc.input) var trm term if tc.quoted { trm = newQuotedTerm(tc.input) } else { trm = newTerm(tc.input) } t.Logf(" term: %v\n", trm) if !(trm.Val == tc.trm.Val && trm.Wildcard == tc.trm.Wildcard) { t.Errorf( "%s:\n"+ "input:\n"+ "\t%q\n"+ "got term:\n"+ "\t%v\n"+ "want term:\n"+ "\t%v\n", tc.name, tc.input, trm, tc.trm) } }) } }
explode_data.jsonl/29231
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 350 }
[ 2830, 3393, 17249, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 4647, 2271, 37302, 341, 197, 3244, 16708, 44415, 2644, 11, 2915, 1155, 353, 8840, 836, 8, 341, 298, 3244, 98954, 21549, 4647, 1273, 1142, 1018, 80, 1699, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCurrentWeatherData(t *testing.T) { handler := testutil.NewHTTPHandler() server := httptest.NewServer(handler) defer server.Close() client := openweather.NewClient("my-app-id") client.URL = server.URL config := &config.OpenWeatherCurrentWeatherData{ Coords: []config.Coordinates{ { Lat: 46.2389, Lon: 14.3556, }, }, } cwd := openweather.NewCurrentWeatherData(client, config, log.Default()) reg := prometheus.NewRegistry() err := reg.Register(cwd) require.NoError(t, err) updated := make(chan struct{}) go func() { cwd.Update() updated <- struct{}{} }() select { case <-handler.Requests: handler.Responses <- []byte(response) case <-time.After(time.Second): require.Fail(t, "request did not arrived") } <-updated gatheredMetrics, err := reg.Gather() require.NoError(t, err) sptr := func(s string) *string { return &s } fptr := func(f float64) *float64 { return &f } metric := func(name string, value float64) *dto.MetricFamily { return &dto.MetricFamily{ Name: sptr("open_weather_" + name), Type: dto.MetricType_GAUGE.Enum(), Help: sptr(""), Metric: []*dto.Metric{ { Label: []*dto.LabelPair{ {Name: sptr("id"), Value: sptr("3197378")}, {Name: sptr("name"), Value: sptr("Kranj")}, }, Gauge: &dto.Gauge{ Value: fptr(value), }, }, }, } } expectedMetrics := []*dto.MetricFamily{ metric("clouds_all", 75), metric("main_feels_like", 287.29), metric("main_humidity", 72), metric("main_pressure", 1015), metric("main_temp", 287.88), metric("main_temp_max", 289.04), metric("main_temp_min", 284.16), metric("wind_deg", 290), metric("wind_speed", 3.6), } require.Equal(t, expectedMetrics, gatheredMetrics) }
explode_data.jsonl/68225
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 749 }
[ 2830, 3393, 5405, 28981, 1043, 1155, 353, 8840, 836, 8, 341, 53326, 1669, 1273, 1314, 7121, 9230, 3050, 741, 41057, 1669, 54320, 70334, 7121, 5475, 36514, 340, 16867, 3538, 10421, 2822, 25291, 1669, 1787, 15206, 7121, 2959, 445, 2408, 200...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTeamTxDependency(t *testing.T) { tt := newTeamTester(t) defer tt.cleanup() ann := makeUserStandalone(t, "ann", standaloneUserArgs{ disableGregor: true, suppressTeamChatAnnounce: true, }) tt.users = append(tt.users, ann) t.Logf("Signed up ann (%s)", ann.username) bob := tt.addPuklessUser("bob") t.Logf("Signed up PUK-less user bob (%s)", bob.username) tracy := tt.addUser("trc") t.Logf("Signed up PUK-ful user trc (%s)", tracy.username) team := ann.createTeam() t.Logf("Team created (%s)", team) ann.addTeamMember(team, bob.username, keybase1.TeamRole_WRITER) teamObj := ann.loadTeam(team, true /* admin */) members, err := teamObj.Members() require.NoError(t, err) require.Equal(t, 1, len(members.Owners)) require.Equal(t, 0, len(members.Admins)+len(members.Writers)+len(members.Readers)+len(members.RestrictedBots)) require.EqualValues(t, ann.userVersion(), members.Owners[0]) require.Equal(t, 1, teamObj.NumActiveInvites()) bob.perUserKeyUpgrade() // Transaction time! // The transaction will try to achieve the following: // 1) Add Tracy as crypto member, // 2) sweep old bob@keybase invite (pukless member), // 3) add bob as crypto member. // The catch is that (3) depends on (2), so signature that does // (3) has to happen after (2). Signatures in flight after (2) are // as follows: // 1. change_membership (adds: trc) // 2. invite (cancel: bob@keybase) // Adding bob as a crypto member should not mutate change_membership 1., // but instead create new change_membership. teamObj = ann.loadTeam(team, true /* admin */) tx := teams.CreateAddMemberTx(teamObj) tx.AddMemberByUsername(context.Background(), tracy.username, keybase1.TeamRole_READER) tx.AddMemberByUsername(context.Background(), bob.username, keybase1.TeamRole_WRITER) payloads := tx.DebugPayloads() require.Equal(t, 3, len(payloads)) err = tx.Post(libkb.NewMetaContextForTest(*ann.tc)) require.NoError(t, err) // State is still fine even without ordering, because nor server // neither team player cares about that. teamObj = ann.loadTeam(team, true /* admin */) members, err = teamObj.Members() require.NoError(t, err) require.Equal(t, 1, len(members.Owners)) require.EqualValues(t, ann.userVersion(), members.Owners[0]) require.Equal(t, 0, len(members.Admins)) require.Equal(t, 1, len(members.Writers)) require.EqualValues(t, bob.userVersion(), members.Writers[0]) require.Equal(t, 1, len(members.Readers)) require.EqualValues(t, tracy.userVersion(), members.Readers[0]) require.Equal(t, 0, teamObj.NumActiveInvites()) require.Equal(t, 0, len(teamObj.GetActiveAndObsoleteInvites())) require.Equal(t, 0, len(members.RestrictedBots)) // Try the opposite logic: reset bob, and try to re-add them as // pukless. The `invite` link should happen after crypto member // sweeping `change_membership`. bob.reset() bob.loginAfterResetPukless() tx = teams.CreateAddMemberTx(teamObj) tx.AddMemberByAssertionOrEmail(context.Background(), fmt.Sprintf("%s@rooter", tracy.username), keybase1.TeamRole_WRITER) tx.AddMemberByUsername(context.Background(), bob.username, keybase1.TeamRole_WRITER) payloads = tx.DebugPayloads() require.Equal(t, 3, len(payloads)) err = tx.Post(libkb.NewMetaContextForTest(*ann.tc)) require.NoError(t, err) }
explode_data.jsonl/27643
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1194 }
[ 2830, 3393, 14597, 31584, 36387, 1155, 353, 8840, 836, 8, 341, 3244, 83, 1669, 501, 14597, 58699, 1155, 340, 16867, 17853, 87689, 2822, 197, 1020, 1669, 1281, 1474, 623, 84112, 1155, 11, 330, 1020, 497, 43388, 1474, 4117, 515, 197, 3459...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInitExistingIssuerCredential(t *testing.T) { testdir, err := ioutil.TempDir(".", "issuerinittest") if err != nil { t.Fatalf("Failed to create temp directory: %s", err.Error()) } defer os.RemoveAll(testdir) err = os.MkdirAll(filepath.Join(testdir, "msp/keystore"), 0777) if err != nil { t.Fatalf("Failed to create directory: %s", err.Error()) } err = lib.CopyFile(testPublicKeyFile, filepath.Join(testdir, "IssuerPublicKey")) if err != nil { t.Fatalf("Failed to copy file: %s", err.Error()) } err = lib.CopyFile(testSecretKeyFile, filepath.Join(testdir, "msp/keystore/IssuerSecretKey")) if err != nil { t.Fatalf("Failed to copy file: %s", err.Error()) } db, issuer := getIssuer(t, testdir, false, false) assert.NotNil(t, issuer) secrekeyfile := filepath.Join(testdir, "msp/keystore/IssuerSecretKey") secrekeyFileInfo, err := os.Stat(secrekeyfile) if err != nil { t.Fatalf("os.Stat failed on test dir: %s", err) } oldmode := secrekeyFileInfo.Mode() err = os.Chmod(secrekeyfile, 0000) if err != nil { t.Fatalf("Chmod on %s failed: %s", secrekeyFileInfo.Name(), err) } err = issuer.Init(false, db, &dbutil.Levels{Credential: 1, RAInfo: 1, Nonce: 1}) assert.Error(t, err, "Init should fail if it fails to load issuer credential") err = os.Chmod(secrekeyfile, oldmode) if err != nil { t.Fatalf("Chmod on %s failed: %s", testdir, err) } err = issuer.Init(false, db, &dbutil.Levels{Credential: 1, RAInfo: 1, Nonce: 1}) assert.NoError(t, err) }
explode_data.jsonl/23678
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 600 }
[ 2830, 3393, 3803, 53067, 98902, 48265, 1155, 353, 8840, 836, 8, 341, 18185, 3741, 11, 1848, 1669, 43144, 65009, 6184, 64217, 330, 66817, 258, 14267, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 9408, 311, 1855, 2730, 6220, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestImportVendor(t *testing.T) { testenv.MustHaveGoBuild(t) // really must just have source ctxt := Default ctxt.GOPATH = "" p, err := ctxt.Import("golang.org/x/net/http2/hpack", filepath.Join(ctxt.GOROOT, "src/net/http"), 0) if err != nil { t.Fatalf("cannot find vendored golang.org/x/net/http2/hpack from net/http directory: %v", err) } want := "vendor/golang.org/x/net/http2/hpack" if p.ImportPath != want { t.Fatalf("Import succeeded but found %q, want %q", p.ImportPath, want) } }
explode_data.jsonl/522
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 208 }
[ 2830, 3393, 11511, 44691, 1155, 353, 8840, 836, 8, 341, 18185, 3160, 50463, 12116, 10850, 11066, 1155, 8, 442, 2167, 1969, 1101, 614, 2530, 198, 197, 77492, 1669, 7899, 198, 197, 77492, 1224, 3067, 4827, 284, 8389, 3223, 11, 1848, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPortNumberForName(t *testing.T) { for _, tc := range []struct { name string subset corev1.EndpointSubset portNumber int32 portName string err error }{{ name: "HTTP to 80", subset: corev1.EndpointSubset{ Ports: []corev1.EndpointPort{{ Port: 8080, Name: "http", }, { Port: 8443, Name: "https", }}, }, portName: "http", portNumber: 8080, }, { name: "no port", subset: corev1.EndpointSubset{ Ports: []corev1.EndpointPort{{ Port: 8443, Name: "https", }}, }, portName: "http", err: errors.New(`no port for name "http" found`), }} { t.Run(tc.name, func(t *testing.T) { portNumber, err := PortNumberForName(tc.subset, tc.portName) if !reflect.DeepEqual(err, tc.err) { // cmp Doesn't work well here due to private fields. t.Errorf("Err = %v, want: %v", err, tc.err) } if tc.err == nil && portNumber != tc.portNumber { t.Errorf("PortNumber = %d, want: %d", portNumber, tc.portNumber) } }) } }
explode_data.jsonl/58834
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 483 }
[ 2830, 3393, 7084, 2833, 2461, 675, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 11609, 981, 914, 198, 197, 28624, 746, 257, 6200, 85, 16, 90409, 70584, 198, 197, 52257, 2833, 526, 18, 17, 198, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetDefaultDecorationConfigsThreadSafety(t *testing.T) { const repo = "repo" p := Plank{DefaultDecorationConfigs: map[string]*prowapi.DecorationConfig{ "*": { GCSConfiguration: &prowapi.GCSConfiguration{ MediaTypes: map[string]string{"text": "text"}, }, }, repo: { GCSConfiguration: &prowapi.GCSConfiguration{ MediaTypes: map[string]string{"text": "text"}, }, }, }} s1 := make(chan struct{}) s2 := make(chan struct{}) go func() { _ = p.GetDefaultDecorationConfigs(repo) close(s1) }() go func() { _ = p.GetDefaultDecorationConfigs(repo) close(s2) }() <-s1 <-s2 }
explode_data.jsonl/8100
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 266 }
[ 2830, 3393, 1949, 3675, 19431, 84905, 6855, 73037, 1155, 353, 8840, 836, 8, 341, 4777, 15867, 284, 330, 23476, 698, 3223, 1669, 1818, 1180, 90, 3675, 19431, 84905, 25, 2415, 14032, 8465, 79, 651, 2068, 22442, 7614, 2648, 515, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestImageInspectImageNotFound(t *testing.T) { client := &Client{ client: newMockClient(errorMock(http.StatusNotFound, "Server error")), } _, _, err := client.ImageInspectWithRaw(context.Background(), "unknown") if err == nil || !IsErrNotFound(err) { t.Fatalf("expected an imageNotFound error, got %v", err) } }
explode_data.jsonl/6823
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 117 }
[ 2830, 3393, 1906, 58533, 1906, 10372, 1155, 353, 8840, 836, 8, 341, 25291, 1669, 609, 2959, 515, 197, 25291, 25, 501, 11571, 2959, 6390, 11571, 19886, 10538, 10372, 11, 330, 5475, 1465, 30154, 197, 630, 197, 6878, 8358, 1848, 1669, 2943...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDomainFree(t *testing.T) { dom, conn := buildTestDomain() defer func() { if res, _ := conn.Close(); res != 0 { t.Errorf("Close() == %d, expected 0", res) } }() if err := dom.Free(); err != nil { t.Error(err) return } }
explode_data.jsonl/64836
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 104 }
[ 2830, 3393, 13636, 10940, 1155, 353, 8840, 836, 8, 341, 2698, 316, 11, 4534, 1669, 1936, 2271, 13636, 741, 16867, 2915, 368, 341, 197, 743, 592, 11, 716, 1669, 4534, 10421, 2129, 592, 961, 220, 15, 341, 298, 3244, 13080, 445, 7925, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStatefulPodControlCreatePodFailed(t *testing.T) { recorder := record.NewFakeRecorder(10) set := newStatefulSet(3) pod := newStatefulSetPod(set, 0) fakeClient := &fake.Clientset{} pvcIndexer := cache.NewIndexer(cache.MetaNamespaceKeyFunc, cache.Indexers{cache.NamespaceIndex: cache.MetaNamespaceIndexFunc}) pvcLister := corelisters.NewPersistentVolumeClaimLister(pvcIndexer) control := NewRealStatefulPodControl(fakeClient, nil, nil, pvcLister, recorder) fakeClient.AddReactor("create", "persistentvolumeclaims", func(action core.Action) (bool, runtime.Object, error) { create := action.(core.CreateAction) return true, create.GetObject(), nil }) fakeClient.AddReactor("create", "pods", func(action core.Action) (bool, runtime.Object, error) { return true, nil, apierrors.NewInternalError(errors.New("API server down")) }) if err := control.CreateStatefulPod(set, pod); err == nil { t.Error("Failed to produce error on Pod creation failure") } events := collectEvents(recorder.Events) if eventCount := len(events); eventCount != 2 { t.Errorf("Pod create failed: got %d events, but want 2", eventCount) } else if !strings.Contains(events[0], v1.EventTypeNormal) { t.Errorf("Found unexpected non-normal event %s", events[0]) } else if !strings.Contains(events[1], v1.EventTypeWarning) { t.Errorf("Found unexpected non-warning event %s", events[1]) } }
explode_data.jsonl/17901
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 466 }
[ 2830, 3393, 1397, 1262, 23527, 3273, 4021, 23527, 9408, 1155, 353, 8840, 836, 8, 341, 67904, 1358, 1669, 3255, 7121, 52317, 47023, 7, 16, 15, 340, 8196, 1669, 36848, 1262, 1649, 7, 18, 340, 3223, 347, 1669, 36848, 1262, 1649, 23527, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRunPodSandboxWithFailingCNI(t *testing.T) { tst := makeVirtletCRITester(t) defer tst.teardown() sandboxes := criapi.GetSandboxes(1) sandboxes[0].Metadata.Uid = "should-fail-cni" tst.runPodSandboxAndExpectError(sandboxes[0]) tst.verify() }
explode_data.jsonl/15664
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 110 }
[ 2830, 3393, 6727, 23527, 50, 31536, 2354, 37, 14277, 34, 14912, 1155, 353, 8840, 836, 8, 341, 3244, 267, 1669, 1281, 53, 2106, 1149, 8973, 952, 5191, 1155, 340, 16867, 71707, 31853, 37496, 2822, 1903, 437, 22204, 1669, 27558, 2068, 2234...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSaveArtifactsExtractError(t *testing.T) { bh := testBuildHandler() th := bh.tar.(*test.FakeTar) expected := fmt.Errorf("extract error") th.ExtractTarError = expected err := bh.Save(bh.config) if err != expected { t.Errorf("Unexpected error returned from saveArtifacts: %v", err) } }
explode_data.jsonl/59443
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 109 }
[ 2830, 3393, 8784, 9286, 26401, 28959, 1454, 1155, 353, 8840, 836, 8, 341, 2233, 71, 1669, 1273, 11066, 3050, 741, 70479, 1669, 42989, 28048, 41399, 1944, 991, 726, 62733, 340, 42400, 1669, 8879, 13080, 445, 23493, 1465, 1138, 70479, 5121,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRandomSearcherBatches(t *testing.T) { actual := model.RandomConfig{MaxTrials: 4, MaxLength: model.NewLengthInBatches(300)} expected := [][]ValidateAfter{ toOps("300B"), toOps("300B"), toOps("300B"), toOps("300B"), } search := newRandomSearch(actual) checkSimulation(t, search, nil, ConstantValidation, expected) }
explode_data.jsonl/24095
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 123 }
[ 2830, 3393, 13999, 5890, 261, 33, 9118, 1155, 353, 8840, 836, 8, 341, 88814, 1669, 1614, 26709, 2648, 90, 5974, 21884, 1127, 25, 220, 19, 11, 7487, 4373, 25, 1614, 7121, 4373, 641, 33, 9118, 7, 18, 15, 15, 10569, 42400, 1669, 52931,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReadReturnsEOFOnUninitialisedPipe(t *testing.T) { t.Parallel() p := &script.Pipe{} buf := []byte{0} // try to read at least 1 byte n, err := p.Read(buf) if !errors.Is(err, io.EOF) { t.Errorf("want EOF, got %v", err) } if n > 0 { t.Errorf("unexpectedly read %d bytes", n) } }
explode_data.jsonl/51536
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 4418, 16446, 23483, 1925, 1806, 9426, 4056, 34077, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 3223, 1669, 609, 2282, 1069, 3444, 16094, 26398, 1669, 3056, 3782, 90, 15, 92, 442, 1430, 311, 1349, 518, 3245, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSetUploadStatusWithNonExistentAssetIDShouldReturnProperError(t *testing.T) { rec := httptest.NewRecorder() req, err := http.NewRequest(constants.RequestMethodPut, fmt.Sprintf("%s?id=%s", constants.StatusURL, constants.MockNonExistentID), nil) require.NoError(t, err) db := &test.MockDb{Err: &data.ErrorNoAssetFound{}} upd := &test.MockUploader{} env := &config.Env{AssetUploader: upd, Store: db} SetUploadStatus(env).ServeHTTP(rec, req) assert.Equal(t, http.StatusNotFound, rec.Code) contentType := rec.Header().Get(constants.HeaderContentType) assert.Equal(t, constants.ApplicationJSON, contentType) }
explode_data.jsonl/34288
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 227 }
[ 2830, 3393, 1649, 13844, 2522, 2354, 8121, 840, 18128, 16604, 915, 14996, 5598, 1336, 712, 1454, 1155, 353, 8840, 836, 8, 341, 67904, 1669, 54320, 70334, 7121, 47023, 741, 24395, 11, 1848, 1669, 1758, 75274, 80368, 68940, 19103, 345, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOption_WithBasicAuth(t *testing.T) { client, err := NewClient("test", "http://httpbin.org/") if err != nil { t.Fatal(err) } resp := &respCarrier{} err = client.Get(resp, "/basic-auth/user/passwd", WithBasicAuth(func() (string, string) { return "user", "passwd" })) if err != nil { t.Fatal(err) } t.Log(resp.String()) n := strings.Index(resp.String(), "authenticated") if n == -1 { t.Fatal("authenticated not found in response") } }
explode_data.jsonl/68692
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 191 }
[ 2830, 3393, 5341, 62, 2354, 15944, 5087, 1155, 353, 8840, 836, 8, 341, 25291, 11, 1848, 1669, 1532, 2959, 445, 1944, 497, 330, 1254, 1110, 1254, 6863, 2659, 53006, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 630, 3465...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClustersStatusAll(t *testing.T) { ctx := context.Background() clusters, mock := createClusters(t) defer shutdownClusters(t, clusters, mock) h := test.Cid1 clusters[0].Pin(ctx, h, api.PinOptions{}) pinDelay() // Global status f := func(t *testing.T, c *Cluster) { statuses, err := c.StatusAll(ctx) if err != nil { t.Error(err) } if len(statuses) != 1 { t.Fatal("bad status. Expected one item") } if !statuses[0].Cid.Equals(h) { t.Error("bad cid in status") } info := statuses[0].PeerMap if len(info) != nClusters { t.Error("bad info in status") } pid := peer.IDB58Encode(c.host.ID()) if info[pid].Status != api.TrackerStatusPinned { t.Error("the hash should have been pinned") } status, err := c.Status(ctx, h) if err != nil { t.Error(err) } pinfo, ok := status.PeerMap[pid] if !ok { t.Fatal("Host not in status") } if pinfo.Status != api.TrackerStatusPinned { t.Error(pinfo.Error) t.Error("the status should show the hash as pinned") } } runF(t, clusters, f) }
explode_data.jsonl/66605
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 455 }
[ 2830, 3393, 94992, 2522, 2403, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 39407, 14605, 11, 7860, 1669, 1855, 94992, 1155, 340, 16867, 23766, 94992, 1155, 11, 26968, 11, 7860, 340, 9598, 1669, 1273, 727, 307, 16, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestStructArray(t *testing.T) { type T struct { Foo string } emptyTSlice := []T{} testData := []struct { csdata []byte accepter []T value []T }{ { []byte(`[{"foo": "bar"}]`), emptyTSlice, []T{{Foo: "bar"}}, }, } for idx, test := range testData { values, err := newValues(&source.ChangeSet{ Data: test.csdata, }) if err != nil { t.Fatal(err) } err = values.Get().Scan(&test.accepter) if err != nil { t.Fatal(err) } if !reflect.DeepEqual(test.accepter, test.value) { t.Fatalf("No.%d Expected %v got %v", idx, test.value, test.accepter) } } }
explode_data.jsonl/20552
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 297 }
[ 2830, 3393, 9422, 1857, 1155, 353, 8840, 836, 8, 341, 13158, 350, 2036, 341, 197, 12727, 2624, 914, 198, 197, 630, 197, 3194, 9951, 4754, 1669, 3056, 51, 31483, 18185, 1043, 1669, 3056, 1235, 341, 197, 71899, 691, 256, 3056, 3782, 198...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGocloak_DeleteRealmRole(t *testing.T) { t.Parallel() cfg := GetConfig(t) client := NewClientWithDebug(t) token := GetAdminToken(t, client) _, roleName := CreateRealmRole(t, client) err := client.DeleteRealmRole( token.AccessToken, cfg.GoCloak.Realm, roleName) FailIfErr(t, err, "DeleteRealmRole failed") }
explode_data.jsonl/79547
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 131 }
[ 2830, 3393, 38, 509, 385, 585, 57418, 64290, 9030, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 50286, 1669, 2126, 2648, 1155, 340, 25291, 1669, 1532, 2959, 2354, 7939, 1155, 340, 43947, 1669, 2126, 7210, 3323, 1155, 11, 2943, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetSetIndexes(t *testing.T) { testCases := []struct { args []string totalSizes []uint64 indexes [][]uint64 success bool }{ // Invalid inputs. { []string{"data{1...3}"}, []uint64{3}, nil, false, }, { []string{"data/controller1/export{1...2}, data/controller2/export{1...4}, data/controller3/export{1...8}"}, []uint64{2, 4, 8}, nil, false, }, // Valid inputs. { []string{"data{1...27}"}, []uint64{27}, [][]uint64{{9, 9, 9}}, true, }, { []string{"http://host{1...3}/data{1...180}"}, []uint64{540}, [][]uint64{{15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15}}, true, }, { []string{"http://host{1...2}.rack{1...4}/data{1...180}"}, []uint64{1440}, [][]uint64{{16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16, 16}}, true, }, { []string{"http://host{1...2}/data{1...180}"}, []uint64{360}, [][]uint64{{12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12, 12}}, true, }, { []string{"data/controller1/export{1...4}, data/controller2/export{1...8}, data/controller3/export{1...12}"}, []uint64{4, 8, 12}, [][]uint64{{4}, {4, 4}, {4, 4, 4}}, true, }, { []string{"data{1...64}"}, []uint64{64}, [][]uint64{{16, 16, 16, 16}}, true, }, { []string{"data{1...24}"}, []uint64{24}, [][]uint64{{12, 12}}, true, }, { []string{"data/controller{1...11}/export{1...8}"}, []uint64{88}, [][]uint64{{11, 11, 11, 11, 11, 11, 11, 11}}, true, }, { []string{"data{1...4}"}, []uint64{4}, [][]uint64{{4}}, true, }, { []string{"data/controller1/export{1...10}, data/controller2/export{1...10}, data/controller3/export{1...10}"}, []uint64{10, 10, 10}, [][]uint64{{10}, {10}, {10}}, true, }, } for _, testCase := range testCases { testCase := testCase t.Run("", func(t *testing.T) { var argPatterns = make([]ellipses.ArgPattern, len(testCase.args)) for i, arg := range testCase.args { patterns, err := ellipses.FindEllipsesPatterns(arg) if err != nil { t.Fatalf("Unexpected failure %s", err) } argPatterns[i] = patterns } gotIndexes, err := getSetIndexes(testCase.args, testCase.totalSizes, 0, argPatterns) if err != nil && testCase.success { t.Errorf("Expected success but failed instead %s", err) } if err == nil && !testCase.success { t.Errorf("Expected failure but passed instead") } if !reflect.DeepEqual(testCase.indexes, gotIndexes) { t.Errorf("Expected %v, got %v", testCase.indexes, gotIndexes) } }) } }
explode_data.jsonl/8754
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1500 }
[ 2830, 3393, 1949, 1649, 62229, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 31215, 981, 3056, 917, 198, 197, 34493, 34930, 3056, 2496, 21, 19, 198, 197, 26327, 288, 262, 52931, 2496, 21, 19, 198, 197, 30553,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestAPIs(t *testing.T) { RegisterFailHandler(Fail) RunSpecsWithDefaultAndCustomReporters(t, "v1beta1 Suite", []Reporter{printer.NewlineReporter{}}) }
explode_data.jsonl/58123
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 63 }
[ 2830, 3393, 7082, 82, 1155, 353, 8840, 836, 8, 341, 79096, 19524, 3050, 7832, 604, 692, 85952, 8327, 16056, 3675, 3036, 10268, 10361, 388, 1155, 345, 197, 197, 1, 85, 16, 19127, 16, 20977, 756, 197, 197, 1294, 52766, 90, 62956, 7121, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestOnePCErrorTracking(t *testing.T) { defer leaktest.AfterTest(t)() defer log.Scope(t).Close(t) ctx := context.Background() clock := hlc.NewClock(hlc.UnixNano, time.Nanosecond) ambient := log.AmbientContext{Tracer: tracing.NewTracer()} sender := &mockSender{} stopper := stop.NewStopper() defer stopper.Stop(ctx) factory := NewTxnCoordSenderFactory( TxnCoordSenderFactoryConfig{ AmbientCtx: log.AmbientContext{Tracer: tracing.NewTracer()}, Clock: clock, Stopper: stopper, Settings: cluster.MakeTestingClusterSettings(), }, sender, ) db := kv.NewDB(ambient, factory, clock, stopper) keyA, keyB, keyC := roachpb.Key("a"), roachpb.Key("b"), roachpb.Key("c") // Register a matcher catching the commit attempt. sender.match(func(ba roachpb.BatchRequest) (*roachpb.BatchResponse, *roachpb.Error) { if et, ok := ba.GetArg(roachpb.EndTxn); !ok { return nil, nil } else if !et.(*roachpb.EndTxnRequest).Commit { return nil, nil } return nil, roachpb.NewErrorf("injected err") }) // Register a matcher catching the rollback attempt. sender.match(func(ba roachpb.BatchRequest) (*roachpb.BatchResponse, *roachpb.Error) { et, ok := ba.GetArg(roachpb.EndTxn) if !ok { return nil, nil } etReq := et.(*roachpb.EndTxnRequest) if etReq.Commit { return nil, nil } expLocks := []roachpb.Span{{Key: keyA}, {Key: keyB, EndKey: keyC}} locks := etReq.LockSpans if !reflect.DeepEqual(locks, expLocks) { return nil, roachpb.NewErrorf("expected locks %s, got: %s", expLocks, locks) } resp := ba.CreateReply() // Set the response's txn to the Aborted status (as the server would). This // will make the TxnCoordSender stop the heartbeat loop. resp.Txn = ba.Txn.Clone() resp.Txn.Status = roachpb.ABORTED return resp, nil }) txn := kv.NewTxn(ctx, db, roachpb.NodeID(1)) txnHeader := roachpb.Header{ Txn: txn.TestingCloneTxn(), } b := txn.NewBatch() b.Put(keyA, "test value") b.ScanForUpdate(keyB, keyC) if err := txn.CommitInBatch(ctx, b); !testutils.IsError(err, "injected err") { t.Fatal(err) } // Now send a rollback and verify that the TxnCoordSender attaches the locks // to it. if _, pErr := kv.SendWrappedWith( ctx, txn, txnHeader, &roachpb.EndTxnRequest{Commit: false}, ); pErr != nil { t.Fatal(pErr) } // As always, check that the rollback we just sent stops the heartbeat loop. testutils.SucceedsSoon(t, func() error { if txn.Sender().(*TxnCoordSender).IsTracking() { return fmt.Errorf("still tracking") } return nil }) }
explode_data.jsonl/76900
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1063 }
[ 2830, 3393, 3966, 4872, 1454, 37119, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 16867, 1487, 77940, 1155, 568, 7925, 1155, 340, 20985, 1669, 2266, 19047, 741, 84165, 1669, 305, 17257, 7121, 26104, 3203, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRepository_Save(t *testing.T) { ctx := context.Background() mockController := gomock.NewController(t) defer mockController.Finish() mockRepository := mock_gateway.NewMockObjectStorage(mockController) repository := NewRepository(mockRepository, env.Object.BucketNameOriginal, env.Object.BucketNameThumbnail) validIMG := graffiti.RandomImage(image.Rect(0, 0, 10, 10)) testCases := []struct { name string image *domain.Image saveErr error expectedErr error }{ { name: "OK: JPEG and IsNotThumbnail", image: &domain.Image{ Name: "test", Format: domain.ImageFormatJPEG, Image: validIMG, }, }, { name: "OK: JPEG and IsThumbnail", image: &domain.Image{ Name: "test", Format: domain.ImageFormatJPEG, IsThumbnail: true, Image: validIMG, }, }, { name: "OK: GIF and IsNotThumbnail", image: &domain.Image{ Name: "test", Format: domain.ImageFormatGIF, Image: validIMG, }, }, { name: "OK: GIF and IsThumbnail", image: &domain.Image{ Name: "test", Format: domain.ImageFormatGIF, IsThumbnail: true, Image: validIMG, }, }, { name: "OK: PNG and IsNotThumbnail", image: &domain.Image{ Name: "test", Format: domain.ImageFormatPNG, Image: validIMG, }, }, { name: "OK: PNG and IsThumbnail", image: &domain.Image{ Name: "test", Format: domain.ImageFormatPNG, IsThumbnail: true, Image: validIMG, }, }, { name: "NG: nil Image", image: &domain.Image{}, expectedErr: xerrors.Errorf("failed to Encode: %w", xerrors.New("misspecified image")), }, { name: "NG: failed to Save", image: &domain.Image{ Name: "test", Format: domain.ImageFormatJPEG, IsThumbnail: true, Image: validIMG, }, saveErr: testutil.ErrSome, expectedErr: testutil.ErrSome, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { func() { object, err := tc.image.Encode() if err != nil { return } bucketName := repository.bucketNameOriginal if tc.image.IsThumbnail { bucketName = repository.bucketNameThumbnail } mockRepository.EXPECT().Save(ctx, object, tc.image.Name+"."+tc.image.Format.String(), bucketName).Return(tc.saveErr) }() err := repository.Save(ctx, tc.image) if tc.expectedErr != nil { assert.EqualError(t, err, tc.expectedErr.Error()) return } assert.NoError(t, err) }) } }
explode_data.jsonl/40502
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1195 }
[ 2830, 3393, 4624, 78746, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 2822, 77333, 2051, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 7860, 2051, 991, 18176, 2822, 77333, 4624, 1669, 7860, 64049, 7121, 11571, 1190, 5793,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDecryptConfigFile(t *testing.T) { sessionDK = nil result, err := EncryptConfigFile([]byte("test"), []byte("key")) if err != nil { t.Fatal(err) } _, err = DecryptConfigFile(result, nil) if err == nil { t.Fatal("Test failed. Expected different result") } _, err = DecryptConfigFile([]byte("test"), nil) if err == nil { t.Fatal("Test failed. Expected different result") } _, err = DecryptConfigFile([]byte("test"), []byte("AAAAAAAAAAAAAAAA")) if err == nil { t.Fatalf("Test failed. Expected %s", errAESBlockSize) } result, err = EncryptConfigFile([]byte("test"), []byte("key")) if err != nil { t.Fatal(err) } _, err = DecryptConfigFile(result, []byte("key")) if err != nil { t.Fatal(err) } }
explode_data.jsonl/11026
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 281 }
[ 2830, 3393, 89660, 2648, 1703, 1155, 353, 8840, 836, 8, 341, 25054, 18301, 284, 2092, 271, 9559, 11, 1848, 1669, 55459, 2648, 1703, 10556, 3782, 445, 1944, 3975, 3056, 3782, 445, 792, 5455, 743, 1848, 961, 2092, 341, 197, 3244, 26133, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestInsertAssetStats(t *testing.T) { tt := test.Start(t) defer tt.Finish() test.ResetHorizonDB(t, tt.HorizonDB) q := &Q{tt.HorizonSession()} tt.Assert.NoError(q.InsertAssetStats([]ExpAssetStat{}, 1)) assetStats := []ExpAssetStat{ ExpAssetStat{ AssetType: xdr.AssetTypeAssetTypeCreditAlphanum4, AssetIssuer: "GBRPYHIL2CI3FNQ4BXLFMNDLFJUNPU2HY3ZMFSHONUCEOASW7QC7OX2H", AssetCode: "USD", Amount: "1", NumAccounts: 2, }, ExpAssetStat{ AssetType: xdr.AssetTypeAssetTypeCreditAlphanum12, AssetIssuer: "GBRPYHIL2CI3FNQ4BXLFMNDLFJUNPU2HY3ZMFSHONUCEOASW7QC7OX2H", AssetCode: "ETHER", Amount: "23", NumAccounts: 1, }, } tt.Assert.NoError(q.InsertAssetStats(assetStats, 1)) for _, assetStat := range assetStats { got, err := q.GetAssetStat(assetStat.AssetType, assetStat.AssetCode, assetStat.AssetIssuer) tt.Assert.NoError(err) tt.Assert.Equal(got, assetStat) } }
explode_data.jsonl/42370
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 439 }
[ 2830, 3393, 13780, 16604, 16635, 1155, 353, 8840, 836, 8, 341, 3244, 83, 1669, 1273, 12101, 1155, 340, 16867, 17853, 991, 18176, 741, 18185, 36660, 39601, 16973, 3506, 1155, 11, 17853, 3839, 269, 16973, 3506, 340, 18534, 1669, 609, 48, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCreateHandlePipe(t *testing.T) { cmd := &commandCreate{} expected := &handler.Resource{ Params: &paramsCreate{ opts: &servers.CreateOpts{ Name: "server1", }, }, } actual := &handler.Resource{ Params: &paramsCreate{ opts: &servers.CreateOpts{}, }, } err := cmd.HandlePipe(actual, "server1") th.AssertNoErr(t, err) th.AssertEquals(t, expected.Params.(*paramsCreate).opts.Name, actual.Params.(*paramsCreate).opts.Name) }
explode_data.jsonl/75790
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 193 }
[ 2830, 3393, 4021, 6999, 34077, 1155, 353, 8840, 836, 8, 341, 25920, 1669, 609, 5631, 4021, 16094, 42400, 1669, 609, 17905, 20766, 515, 197, 197, 4870, 25, 609, 3519, 4021, 515, 298, 64734, 25, 609, 67696, 7251, 43451, 515, 571, 21297, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPlacementTiflashCheck(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) require.NoError(t, failpoint.Enable("github.com/pingcap/tidb/infoschema/mockTiFlashStoreCount", `return(true)`)) defer func() { err := failpoint.Disable("github.com/pingcap/tidb/infoschema/mockTiFlashStoreCount") require.NoError(t, err) }() tk.MustExec("use test") tk.MustExec("drop placement policy if exists p1") tk.MustExec("drop table if exists tp") tk.MustExec("create placement policy p1 primary_region='r1' regions='r1'") defer tk.MustExec("drop placement policy if exists p1") tk.MustExec(`CREATE TABLE tp (id INT) PARTITION BY RANGE (id) ( PARTITION p0 VALUES LESS THAN (100), PARTITION p1 VALUES LESS THAN (1000) )`) defer tk.MustExec("drop table if exists tp") tk.MustExec("alter table tp set tiflash replica 1") err := tk.ExecToErr("alter table tp placement policy p1") require.True(t, dbterror.ErrIncompatibleTiFlashAndPlacement.Equal(err)) err = tk.ExecToErr("alter table tp partition p0 placement policy p1") require.True(t, dbterror.ErrIncompatibleTiFlashAndPlacement.Equal(err)) tk.MustQuery("show create table tp").Check(testkit.Rows("" + "tp CREATE TABLE `tp` (\n" + " `id` int(11) DEFAULT NULL\n" + ") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin\n" + "PARTITION BY RANGE (`id`)\n" + "(PARTITION `p0` VALUES LESS THAN (100),\n" + " PARTITION `p1` VALUES LESS THAN (1000))")) tk.MustExec("drop table tp") tk.MustExec(`CREATE TABLE tp (id INT) placement policy p1 PARTITION BY RANGE (id) ( PARTITION p0 VALUES LESS THAN (100), PARTITION p1 VALUES LESS THAN (1000) )`) err = tk.ExecToErr("alter table tp set tiflash replica 1") require.True(t, dbterror.ErrIncompatibleTiFlashAndPlacement.Equal(err)) tk.MustQuery("show create table tp").Check(testkit.Rows("" + "tp CREATE TABLE `tp` (\n" + " `id` int(11) DEFAULT NULL\n" + ") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin /*T![placement] PLACEMENT POLICY=`p1` */\n" + "PARTITION BY RANGE (`id`)\n" + "(PARTITION `p0` VALUES LESS THAN (100),\n" + " PARTITION `p1` VALUES LESS THAN (1000))")) tk.MustExec("drop table tp") tk.MustExec(`CREATE TABLE tp (id INT) PARTITION BY RANGE (id) ( PARTITION p0 VALUES LESS THAN (100) placement policy p1 , PARTITION p1 VALUES LESS THAN (1000) )`) err = tk.ExecToErr("alter table tp set tiflash replica 1") require.True(t, dbterror.ErrIncompatibleTiFlashAndPlacement.Equal(err)) tk.MustQuery("show create table tp").Check(testkit.Rows("" + "tp CREATE TABLE `tp` (\n" + " `id` int(11) DEFAULT NULL\n" + ") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin\n" + "PARTITION BY RANGE (`id`)\n" + "(PARTITION `p0` VALUES LESS THAN (100) /*T![placement] PLACEMENT POLICY=`p1` */,\n" + " PARTITION `p1` VALUES LESS THAN (1000))")) tk.MustExec("drop table tp") tk.MustExec(`CREATE TABLE tp (id INT) PLACEMENT POLICY p1 PARTITION BY RANGE (id) ( PARTITION p0 VALUES LESS THAN (100), PARTITION p1 VALUES LESS THAN (1000) )`) err = tk.ExecToErr("alter table tp set tiflash replica 1") require.True(t, dbterror.ErrIncompatibleTiFlashAndPlacement.Equal(err)) tk.MustQuery("show create table tp").Check(testkit.Rows("" + "tp CREATE TABLE `tp` (\n" + " `id` int(11) DEFAULT NULL\n" + ") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin /*T![placement] PLACEMENT POLICY=`p1` */\n" + "PARTITION BY RANGE (`id`)\n" + "(PARTITION `p0` VALUES LESS THAN (100),\n" + " PARTITION `p1` VALUES LESS THAN (1000))")) tk.MustExec("drop table tp") tk.MustExec(`CREATE TABLE tp (id INT) PARTITION BY RANGE (id) ( PARTITION p0 VALUES LESS THAN (100) PLACEMENT POLICY p1, PARTITION p1 VALUES LESS THAN (1000) )`) err = tk.ExecToErr("alter table tp set tiflash replica 1") require.True(t, dbterror.ErrIncompatibleTiFlashAndPlacement.Equal(err)) tk.MustQuery("show create table tp").Check(testkit.Rows("" + "tp CREATE TABLE `tp` (\n" + " `id` int(11) DEFAULT NULL\n" + ") ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin\n" + "PARTITION BY RANGE (`id`)\n" + "(PARTITION `p0` VALUES LESS THAN (100) /*T![placement] PLACEMENT POLICY=`p1` */,\n" + " PARTITION `p1` VALUES LESS THAN (1000))")) }
explode_data.jsonl/77387
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1806 }
[ 2830, 3393, 28237, 51, 333, 24686, 3973, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 741, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 340, 17957, 35699, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReceiveLogsOp(t *testing.T) { tt, err := obsreporttest.SetupTelemetry() require.NoError(t, err) t.Cleanup(func() { require.NoError(t, tt.Shutdown(context.Background())) }) parentCtx, parentSpan := tt.TracerProvider.Tracer("test").Start(context.Background(), t.Name()) defer parentSpan.End() params := []testParams{ {items: 13, err: errFake}, {items: 42, err: nil}, } for i, param := range params { rec := NewReceiver(ReceiverSettings{ ReceiverID: receiver, Transport: transport, ReceiverCreateSettings: tt.ToReceiverCreateSettings(), }) ctx := rec.StartLogsOp(parentCtx) assert.NotNil(t, ctx) rec.EndLogsOp(ctx, format, params[i].items, param.err) } spans := tt.SpanRecorder.Ended() require.Equal(t, len(params), len(spans)) var acceptedLogRecords, refusedLogRecords int for i, span := range spans { assert.Equal(t, "receiver/"+receiver.String()+"/LogsReceived", span.Name()) switch params[i].err { case nil: acceptedLogRecords += params[i].items require.Contains(t, span.Attributes(), attribute.KeyValue{Key: obsmetrics.AcceptedLogRecordsKey, Value: attribute.Int64Value(int64(params[i].items))}) require.Contains(t, span.Attributes(), attribute.KeyValue{Key: obsmetrics.RefusedLogRecordsKey, Value: attribute.Int64Value(0)}) assert.Equal(t, codes.Unset, span.Status().Code) case errFake: refusedLogRecords += params[i].items require.Contains(t, span.Attributes(), attribute.KeyValue{Key: obsmetrics.AcceptedLogRecordsKey, Value: attribute.Int64Value(0)}) require.Contains(t, span.Attributes(), attribute.KeyValue{Key: obsmetrics.RefusedLogRecordsKey, Value: attribute.Int64Value(int64(params[i].items))}) assert.Equal(t, codes.Error, span.Status().Code) assert.Equal(t, params[i].err.Error(), span.Status().Description) default: t.Fatalf("unexpected param: %v", params[i]) } } require.NoError(t, obsreporttest.CheckReceiverLogs(tt, receiver, transport, int64(acceptedLogRecords), int64(refusedLogRecords))) }
explode_data.jsonl/57967
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 771 }
[ 2830, 3393, 14742, 51053, 7125, 1155, 353, 8840, 836, 8, 341, 3244, 83, 11, 1848, 1669, 7448, 11736, 1944, 39820, 6639, 35958, 741, 17957, 35699, 1155, 11, 1848, 340, 3244, 727, 60639, 18552, 368, 314, 1373, 35699, 1155, 11, 17853, 1084...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetUsersInTeam(t *testing.T) { th := Setup().InitBasic().InitSystemAdmin() defer th.TearDown() Client := th.Client teamId := th.BasicTeam.Id rusers, resp := Client.GetUsersInTeam(teamId, 0, 60, "") CheckNoError(t, resp) for _, u := range rusers { CheckUserSanitization(t, u) } rusers, resp = Client.GetUsersInTeam(teamId, 0, 60, resp.Etag) CheckEtag(t, rusers, resp) rusers, resp = Client.GetUsersInTeam(teamId, 0, 1, "") CheckNoError(t, resp) if len(rusers) != 1 { t.Fatal("should be 1 per page") } rusers, resp = Client.GetUsersInTeam(teamId, 1, 1, "") CheckNoError(t, resp) if len(rusers) != 1 { t.Fatal("should be 1 per page") } rusers, resp = Client.GetUsersInTeam(teamId, 10000, 100, "") CheckNoError(t, resp) if len(rusers) != 0 { t.Fatal("should be no users") } Client.Logout() _, resp = Client.GetUsersInTeam(teamId, 0, 60, "") CheckUnauthorizedStatus(t, resp) user := th.CreateUser() Client.Login(user.Email, user.Password) _, resp = Client.GetUsersInTeam(teamId, 0, 60, "") CheckForbiddenStatus(t, resp) _, resp = th.SystemAdminClient.GetUsersInTeam(teamId, 0, 60, "") CheckNoError(t, resp) }
explode_data.jsonl/21539
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 472 }
[ 2830, 3393, 1949, 7137, 641, 14597, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1005, 3803, 15944, 1005, 3803, 2320, 7210, 741, 16867, 270, 836, 682, 4454, 741, 71724, 1669, 270, 11716, 198, 197, 9196, 764, 1669, 270, 48868, 14597...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestUnmarshalLocalTime(t *testing.T) { examples := []struct { name string in string out LocalTime }{ { name: "normal", in: "07:32:00", out: LocalTime{ Hour: 7, Minute: 32, Second: 0, Nanosecond: 0, }, }, { name: "with nanoseconds", in: "00:32:00.999999", out: LocalTime{ Hour: 0, Minute: 32, Second: 0, Nanosecond: 999999000, }, }, } for i, example := range examples { toml := fmt.Sprintf(`Time = %s`, example.in) t.Run(fmt.Sprintf("ToLocalTime_%d_%s", i, example.name), func(t *testing.T) { type dateStruct struct { Time LocalTime } var obj dateStruct err := Unmarshal([]byte(toml), &obj) if err != nil { t.Fatal(err) } if obj.Time != example.out { t.Errorf("expected '%s', got '%s'", example.out, obj.Time) } }) } }
explode_data.jsonl/46363
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 459 }
[ 2830, 3393, 1806, 27121, 7319, 1462, 1155, 353, 8840, 836, 8, 341, 8122, 4023, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 17430, 256, 914, 198, 197, 13967, 220, 8774, 1462, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTearDownWithoutRuntime(t *testing.T) { testCases := []struct { podCIDR []string expectedPodCIDR []string ip string }{ { podCIDR: []string{"10.0.0.0/24"}, expectedPodCIDR: []string{"10.0.0.0/24"}, ip: "10.0.0.1", }, { podCIDR: []string{"10.0.0.1/24"}, expectedPodCIDR: []string{"10.0.0.0/24"}, ip: "10.0.0.1", }, { podCIDR: []string{"2001:beef::/48"}, expectedPodCIDR: []string{"2001:beef::/48"}, ip: "2001:beef::1", }, { podCIDR: []string{"2001:beef::1/48"}, expectedPodCIDR: []string{"2001:beef::/48"}, ip: "2001:beef::1", }, } for _, tc := range testCases { fhost := nettest.NewFakeHost(nil) fhost.Legacy = false mockcni := &mockcni.MockCNI{} fexec := &fakeexec.FakeExec{ CommandScript: []fakeexec.FakeCommandAction{}, LookPathFunc: func(file string) (string, error) { return fmt.Sprintf("/fake-bin/%s", file), nil }, } ips := make(map[kubecontainer.ContainerID]utilsets.String) kubenet := newFakeKubenetPlugin(ips, fexec, fhost) kubenet.loConfig = &libcni.NetworkConfig{ Network: &types.NetConf{ Name: "loopback-fake", Type: "loopback", }, } kubenet.cniConfig = mockcni kubenet.iptables = ipttest.NewFake() details := make(map[string]interface{}) details[network.NET_PLUGIN_EVENT_POD_CIDR_CHANGE_DETAIL_CIDR] = strings.Join(tc.podCIDR, ",") kubenet.Event(network.NET_PLUGIN_EVENT_POD_CIDR_CHANGE, details) if len(kubenet.podCIDRs) != len(tc.podCIDR) { t.Errorf("generated podCidr: %q, expecting: %q are not of the same length", kubenet.podCIDRs, tc.podCIDR) continue } for idx := range tc.podCIDR { if kubenet.podCIDRs[idx].String() != tc.expectedPodCIDR[idx] { t.Errorf("generated podCidr: %q, expecting: %q", kubenet.podCIDRs[idx].String(), tc.expectedPodCIDR[idx]) } } existingContainerID := kubecontainer.BuildContainerID("docker", "123") kubenet.podIPs[existingContainerID] = utilsets.NewString(tc.ip) mockcni.On("DelNetwork", mock.AnythingOfType("*context.timerCtx"), mock.AnythingOfType("*libcni.NetworkConfig"), mock.AnythingOfType("*libcni.RuntimeConf")).Return(nil) if err := kubenet.TearDownPod("namespace", "name", existingContainerID); err != nil { t.Fatalf("Unexpected error in TearDownPod: %v", err) } // Assert that the CNI DelNetwork made it through and we didn't crash // without a runtime. mockcni.AssertExpectations(t) } }
explode_data.jsonl/4479
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1192 }
[ 2830, 3393, 51, 682, 4454, 26040, 15123, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 3223, 347, 54146, 49, 260, 3056, 917, 198, 197, 42400, 23527, 54146, 49, 3056, 917, 198, 197, 46531, 1060, 914, 198, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLoadTree(t *testing.T) { repo, cleanup := repository.TestRepository(t) defer cleanup() if rtest.BenchArchiveDirectory == "" { t.Skip("benchdir not set, skipping") } // archive a few files sn := archiver.TestSnapshot(t, repo, rtest.BenchArchiveDirectory, nil) rtest.OK(t, repo.Flush(context.Background())) _, err := repo.LoadTree(context.TODO(), *sn.Tree) rtest.OK(t, err) }
explode_data.jsonl/71938
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 149 }
[ 2830, 3393, 5879, 6533, 1155, 353, 8840, 836, 8, 341, 17200, 5368, 11, 21290, 1669, 12542, 8787, 4624, 1155, 340, 16867, 21290, 2822, 743, 435, 1944, 1785, 19762, 42502, 9310, 621, 1591, 341, 197, 3244, 57776, 445, 27024, 3741, 537, 738...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDecayConst(t *testing.T) { want := 7.302030826339803e-10 got := DecayConst(Id("Cs137")) if got != want { t.Errorf("want %v, got %v", want, got) } }
explode_data.jsonl/31993
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 72 }
[ 2830, 3393, 4900, 352, 19167, 1155, 353, 8840, 836, 8, 341, 50780, 1669, 220, 22, 13, 18, 15, 17, 15, 18, 15, 23, 17, 21, 18, 18, 24, 23, 15, 18, 68, 12, 16, 15, 198, 3174, 354, 1669, 97286, 19167, 55924, 445, 33545, 16, 18, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRouterPanicHandler(t *testing.T) { router := New() panicHandled := false router.PanicHandler = func(ctx *fasthttp.RequestCtx, p interface{}) { panicHandled = true } router.Handle("PUT", "/user/:name", func(_ *fasthttp.RequestCtx) { panic("oops!") }) defer func() { if rcv := recover(); rcv != nil { t.Fatal("handling panic failed") } }() s := &fasthttp.Server{ Handler: router.Handler, } rw := &readWriter{} ch := make(chan error) rw.r.WriteString(string("PUT /user/gopher HTTP/1.1\r\n\r\n")) go func() { ch <- s.ServeConn(rw) }() select { case err := <-ch: if err != nil { t.Fatalf("return error %s", err) } case <-time.After(100 * time.Millisecond): t.Fatalf("timeout") } if !panicHandled { t.Fatal("simulating failed") } }
explode_data.jsonl/42492
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 339 }
[ 2830, 3393, 9523, 47, 31270, 3050, 1155, 353, 8840, 836, 8, 341, 67009, 1669, 1532, 741, 30764, 90909, 1669, 895, 271, 67009, 1069, 31270, 3050, 284, 2915, 7502, 353, 9349, 1254, 9659, 23684, 11, 281, 3749, 28875, 341, 197, 30764, 90909...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInOrderForArgs(t *testing.T) { tests := []struct { description string buildArtifact ArtifactBuilder artifactLen int concurrency int dependency map[int][]int expected []Artifact err error }{ { description: "runs in parallel for 2 artifacts with no dependency", buildArtifact: func(_ context.Context, _ io.Writer, _ *latest.Artifact, tag string) (string, error) { return tag, nil }, artifactLen: 2, expected: []Artifact{ {ImageName: "artifact1", Tag: "artifact1@tag1"}, {ImageName: "artifact2", Tag: "artifact2@tag2"}, }, }, { description: "runs in parallel for 5 artifacts with dependencies", buildArtifact: func(_ context.Context, _ io.Writer, _ *latest.Artifact, tag string) (string, error) { return tag, nil }, dependency: map[int][]int{ 0: {2, 3}, 1: {3}, 2: {1}, 3: {4}, }, artifactLen: 5, expected: []Artifact{ {ImageName: "artifact1", Tag: "artifact1@tag1"}, {ImageName: "artifact2", Tag: "artifact2@tag2"}, {ImageName: "artifact3", Tag: "artifact3@tag3"}, {ImageName: "artifact4", Tag: "artifact4@tag4"}, {ImageName: "artifact5", Tag: "artifact5@tag5"}, }, }, { description: "runs with max concurrency of 2 for 5 artifacts with dependencies", buildArtifact: func(_ context.Context, _ io.Writer, _ *latest.Artifact, tag string) (string, error) { return tag, nil }, dependency: map[int][]int{ 0: {2, 3}, 1: {3}, 2: {1}, 3: {4}, }, artifactLen: 5, concurrency: 2, expected: []Artifact{ {ImageName: "artifact1", Tag: "artifact1@tag1"}, {ImageName: "artifact2", Tag: "artifact2@tag2"}, {ImageName: "artifact3", Tag: "artifact3@tag3"}, {ImageName: "artifact4", Tag: "artifact4@tag4"}, {ImageName: "artifact5", Tag: "artifact5@tag5"}, }, }, { description: "runs in parallel should return for 0 artifacts", artifactLen: 0, expected: nil, }, { description: "build fails for artifacts without dependencies", buildArtifact: func(c context.Context, _ io.Writer, a *latest.Artifact, tag string) (string, error) { if a.ImageName == "artifact2" { return "", fmt.Errorf(`some error occurred while building "artifact2"`) } select { case <-c.Done(): return "", c.Err() case <-time.After(5 * time.Second): return tag, nil } }, artifactLen: 5, expected: nil, err: fmt.Errorf(`some error occurred while building "artifact2"`), }, { description: "build fails for artifacts with dependencies", buildArtifact: func(_ context.Context, _ io.Writer, a *latest.Artifact, tag string) (string, error) { if a.ImageName == "artifact2" { return "", fmt.Errorf(`some error occurred while building "artifact2"`) } return tag, nil }, dependency: map[int][]int{ 0: {1}, 1: {2}, 2: {3}, 3: {4}, }, artifactLen: 5, expected: nil, err: fmt.Errorf(`some error occurred while building "artifact2"`), }, } for _, test := range tests { testutil.Run(t, test.description, func(t *testutil.T) { artifacts := make([]*latest.Artifact, test.artifactLen) tags := tag.ImageTags{} for i := 0; i < test.artifactLen; i++ { a := fmt.Sprintf("artifact%d", i+1) artifacts[i] = &latest.Artifact{ImageName: a} tags[a] = fmt.Sprintf("%s@tag%d", a, i+1) } setDependencies(artifacts, test.dependency) initializeEvents() actual, err := InOrder(context.Background(), ioutil.Discard, tags, artifacts, test.buildArtifact, test.concurrency, NewArtifactStore()) t.CheckDeepEqual(test.expected, actual) t.CheckDeepEqual(test.err, err, cmp.Comparer(errorsComparer)) }) } }
explode_data.jsonl/40729
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1608 }
[ 2830, 3393, 641, 4431, 2461, 4117, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 42407, 256, 914, 198, 197, 69371, 85578, 86082, 3297, 198, 197, 197, 63722, 11271, 256, 526, 198, 197, 37203, 15973, 256, 526, 198, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewUser(t *testing.T) { var ( user *u.User ) user, err := u.NewUser( "test_endpoint", "test_remote_ip", "test_key_auth", "test_key_p256_dh") assert.NoError(t, err) assert.Equal(t, "test_endpoint", user.Endpoint) assert.Equal(t, "test_remote_ip", user.RemoteIP) assert.Equal(t, "test_key_auth", user.KeyAuth) assert.Equal(t, "test_key_p256_dh", user.KeyP256DH) assert.Equal(t, false, user.CreatedAt().IsZero()) }
explode_data.jsonl/82651
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 200 }
[ 2830, 3393, 3564, 1474, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 19060, 353, 84, 7344, 198, 197, 692, 19060, 11, 1848, 1669, 575, 7121, 1474, 1006, 197, 197, 1, 1944, 36699, 756, 197, 197, 1, 1944, 36425, 10385, 756, 197, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExecuteFailOnDuplicateNamesInDifferentProjects(t *testing.T) { environment := environment.NewEnvironment("dev", "Dev", "", "https://url/to/dev/environment", "DEV") apis := testGetExecuteApis() path := util.ReplacePathSeparators("../../cmd/monaco/test-resources/duplicate-name-test") projects, err := project.LoadProjectsToDeploy("project1, project2", apis, path, util.NewFileReader()) assert.NilError(t, err) errors := execute(environment, projects, true, "", false) assert.ErrorContains(t, errors[0], "duplicate UID 'calculated-metrics-log/metric' found in") }
explode_data.jsonl/80404
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 191 }
[ 2830, 3393, 17174, 19524, 1925, 53979, 7980, 641, 69123, 29958, 1155, 353, 8840, 836, 8, 341, 197, 23294, 1669, 4573, 7121, 12723, 445, 3583, 497, 330, 14592, 497, 7342, 330, 2428, 1110, 1085, 32429, 35061, 54482, 497, 330, 31819, 5130, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInit(t *testing.T) { var ( calls = make(Set) taskMessages = make(Set) taskTypes = make(Set) home = mkTestDirectory(t) ) defer osRemoveAll(home) func() { denyAllDevmapper() DmSetDevDir = func(dir string) int { calls["DmSetDevDir"] = true expectedDir := "/dev" if dir != expectedDir { t.Fatalf("Wrong libdevmapper call\nExpected: DmSetDevDir(%v)\nReceived: DmSetDevDir(%v)\n", expectedDir, dir) } return 0 } LogWithErrnoInit = func() { calls["DmLogWithErrnoInit"] = true } var task1 CDmTask DmTaskCreate = func(taskType int) *CDmTask { calls["DmTaskCreate"] = true taskTypes[fmt.Sprintf("%d", taskType)] = true return &task1 } DmTaskSetName = func(task *CDmTask, name string) int { calls["DmTaskSetName"] = true expectedTask := &task1 if task != expectedTask { t.Fatalf("Wrong libdevmapper call\nExpected: DmTaskSetName(%v)\nReceived: DmTaskSetName(%v)\n", expectedTask, task) } // FIXME: use Set.AssertRegexp() if !strings.HasPrefix(name, "docker-") && !strings.HasPrefix(name, "/dev/mapper/docker-") || !strings.HasSuffix(name, "-pool") && !strings.HasSuffix(name, "-base") { t.Fatalf("Wrong libdevmapper call\nExpected: DmTaskSetName(%v)\nReceived: DmTaskSetName(%v)\n", "docker-...-pool", name) } return 1 } DmTaskRun = func(task *CDmTask) int { calls["DmTaskRun"] = true expectedTask := &task1 if task != expectedTask { t.Fatalf("Wrong libdevmapper call\nExpected: DmTaskRun(%v)\nReceived: DmTaskRun(%v)\n", expectedTask, task) } return 1 } DmTaskGetInfo = func(task *CDmTask, info *Info) int { calls["DmTaskGetInfo"] = true expectedTask := &task1 if task != expectedTask { t.Fatalf("Wrong libdevmapper call\nExpected: DmTaskGetInfo(%v)\nReceived: DmTaskGetInfo(%v)\n", expectedTask, task) } // This will crash if info is not dereferenceable info.Exists = 0 return 1 } DmTaskSetSector = func(task *CDmTask, sector uint64) int { calls["DmTaskSetSector"] = true expectedTask := &task1 if task != expectedTask { t.Fatalf("Wrong libdevmapper call\nExpected: DmTaskSetSector(%v)\nReceived: DmTaskSetSector(%v)\n", expectedTask, task) } if expectedSector := uint64(0); sector != expectedSector { t.Fatalf("Wrong libdevmapper call to DmTaskSetSector\nExpected: %v\nReceived: %v\n", expectedSector, sector) } return 1 } DmTaskSetMessage = func(task *CDmTask, message string) int { calls["DmTaskSetMessage"] = true expectedTask := &task1 if task != expectedTask { t.Fatalf("Wrong libdevmapper call\nExpected: DmTaskSetSector(%v)\nReceived: DmTaskSetSector(%v)\n", expectedTask, task) } taskMessages[message] = true return 1 } DmTaskDestroy = func(task *CDmTask) { calls["DmTaskDestroy"] = true expectedTask := &task1 if task != expectedTask { t.Fatalf("Wrong libdevmapper call\nExpected: DmTaskDestroy(%v)\nReceived: DmTaskDestroy(%v)\n", expectedTask, task) } } DmTaskAddTarget = func(task *CDmTask, start, size uint64, ttype, params string) int { calls["DmTaskSetTarget"] = true expectedTask := &task1 if task != expectedTask { t.Fatalf("Wrong libdevmapper call\nExpected: DmTaskDestroy(%v)\nReceived: DmTaskDestroy(%v)\n", expectedTask, task) } if start != 0 { t.Fatalf("Wrong start: %d != %d", start, 0) } if ttype != "thin" && ttype != "thin-pool" { t.Fatalf("Wrong ttype: %s", ttype) } // Quick smoke test if params == "" { t.Fatalf("Params should not be empty") } return 1 } fakeCookie := uint(4321) DmTaskSetCookie = func(task *CDmTask, cookie *uint, flags uint16) int { calls["DmTaskSetCookie"] = true expectedTask := &task1 if task != expectedTask { t.Fatalf("Wrong libdevmapper call\nExpected: DmTaskDestroy(%v)\nReceived: DmTaskDestroy(%v)\n", expectedTask, task) } if flags != 0 { t.Fatalf("Cookie flags should be 0 (not %x)", flags) } *cookie = fakeCookie return 1 } DmUdevWait = func(cookie uint) int { calls["DmUdevWait"] = true if cookie != fakeCookie { t.Fatalf("Wrong cookie: %d != %d", cookie, fakeCookie) } return 1 } DmTaskSetAddNode = func(task *CDmTask, addNode AddNodeType) int { if addNode != AddNodeOnCreate { t.Fatalf("Wrong AddNoteType: %v (expected %v)", addNode, AddNodeOnCreate) } calls["DmTaskSetAddNode"] = true return 1 } execRun = func(name string, args ...string) error { calls["execRun"] = true if name != "mkfs.ext4" { t.Fatalf("Expected %s to be executed, not %s", "mkfs.ext4", name) } return nil } driver, err := Init(home) if err != nil { t.Fatal(err) } defer func() { if err := driver.Cleanup(); err != nil { t.Fatal(err) } }() }() // Put all tests in a funciton to make sure the garbage collection will // occur. // Call GC to cleanup runtime.Finalizers runtime.GC() calls.Assert(t, "DmSetDevDir", "DmLogWithErrnoInit", "DmTaskSetName", "DmTaskRun", "DmTaskGetInfo", "DmTaskDestroy", "execRun", "DmTaskCreate", "DmTaskSetTarget", "DmTaskSetCookie", "DmUdevWait", "DmTaskSetSector", "DmTaskSetMessage", "DmTaskSetAddNode", ) taskTypes.Assert(t, "0", "6", "17") taskMessages.Assert(t, "create_thin 0", "set_transaction_id 0 1") }
explode_data.jsonl/45476
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2351 }
[ 2830, 3393, 3803, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 1444, 5583, 286, 284, 1281, 52474, 340, 197, 49115, 15820, 284, 1281, 52474, 340, 197, 49115, 4173, 262, 284, 1281, 52474, 340, 197, 197, 5117, 260, 284, 23789, 2271, 93...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSelect_CFTask(t *testing.T) { taskPrompt := "TASK PLX" taskHelp := "NO" testTasks := []string{"abc", "db-migrate"} testDefaultTask := "db-migrate" testCases := map[string]struct { inDefaultCluster string inOpts []GetDeployedTaskOpts mockStore func(*mocks.MockConfigLister) mockPrompt func(*mocks.MockPrompter) mockCF func(*mocks.MockTaskStackDescriber) wantedErr error wantedTask string }{ "choose an existing task": { inOpts: []GetDeployedTaskOpts{ TaskWithAppEnv("phonetool", "prod-iad"), }, mockStore: func(m *mocks.MockConfigLister) {}, mockCF: func(m *mocks.MockTaskStackDescriber) { m.EXPECT().ListTaskStacks("phonetool", "prod-iad").Return([]deploy.TaskStackInfo{ { StackName: "copilot-abc", App: "phonetool", Env: "prod-iad", }, { StackName: "copilot-db-migrate", App: "phonetool", Env: "prod-iad", }, }, nil) }, mockPrompt: func(m *mocks.MockPrompter) { m.EXPECT().SelectOne( gomock.Any(), gomock.Any(), []string{ "abc", "db-migrate", }, ).Return("abc", nil) }, wantedErr: nil, wantedTask: testTasks[0], }, "error when retrieving stacks": { inOpts: []GetDeployedTaskOpts{ TaskWithAppEnv("phonetool", "prod-iad"), }, mockStore: func(m *mocks.MockConfigLister) {}, mockCF: func(m *mocks.MockTaskStackDescriber) { m.EXPECT().ListTaskStacks("phonetool", "prod-iad").Return(nil, errors.New("some error")) }, mockPrompt: func(m *mocks.MockPrompter) {}, wantedErr: errors.New("get tasks in environment prod-iad: some error"), }, "with default cluster task": { inOpts: []GetDeployedTaskOpts{ TaskWithDefaultCluster(), }, mockStore: func(m *mocks.MockConfigLister) {}, mockCF: func(m *mocks.MockTaskStackDescriber) { m.EXPECT().ListDefaultTaskStacks().Return([]deploy.TaskStackInfo{ { StackName: "task-oneoff", }, { StackName: "copilot-db-migrate", }, }, nil) }, mockPrompt: func(m *mocks.MockPrompter) { m.EXPECT().SelectOne( gomock.Any(), gomock.Any(), []string{ "oneoff", "db-migrate", }, ).Return("db-migrate", nil) }, wantedErr: nil, wantedTask: testDefaultTask, }, "with error getting default cluster tasks": { inOpts: []GetDeployedTaskOpts{ TaskWithDefaultCluster(), }, mockStore: func(m *mocks.MockConfigLister) {}, mockCF: func(m *mocks.MockTaskStackDescriber) { m.EXPECT().ListDefaultTaskStacks().Return(nil, errors.New("some error")) }, mockPrompt: func(m *mocks.MockPrompter) {}, wantedErr: errors.New("get tasks in default cluster: some error"), }, } for name, tc := range testCases { t.Run(name, func(t *testing.T) { // GIVEN ctrl := gomock.NewController(t) defer ctrl.Finish() p := mocks.NewMockPrompter(ctrl) s := mocks.NewMockConfigLister(ctrl) cf := mocks.NewMockTaskStackDescriber(ctrl) tc.mockPrompt(p) tc.mockCF(cf) tc.mockStore(s) sel := CFTaskSelect{ Select: &Select{ prompt: p, config: s, }, cfStore: cf, } // WHEN choice, err := sel.Task(taskPrompt, taskHelp, tc.inOpts...) // THEN if tc.wantedErr != nil { require.EqualError(t, err, tc.wantedErr.Error()) } else { require.Equal(t, tc.wantedTask, choice) } }) } }
explode_data.jsonl/44900
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1654 }
[ 2830, 3393, 3379, 920, 3994, 1073, 1155, 353, 8840, 836, 8, 341, 49115, 54615, 1669, 330, 65813, 10315, 55, 698, 49115, 12689, 1669, 330, 8996, 698, 18185, 25449, 1669, 3056, 917, 4913, 13683, 497, 330, 1999, 1448, 34479, 16707, 18185, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBuildGetIncidentRuleRequest_Validate(t *testing.T) { var err error request := &GetIncidentRulesRequest{} err = request.Validate() assert.Equal(t, err.Error(), errors.New("Service Id cannot be empty.").Error()) request.ServiceId = "id" err = request.Validate() assert.Nil(t, err) assert.Equal(t, request.ResourcePath(), "/v1/services/id/incident-rules") assert.Equal(t, request.Method(), http.MethodGet) }
explode_data.jsonl/55217
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 11066, 1949, 39245, 1713, 11337, 1900, 62, 17926, 1155, 353, 8840, 836, 8, 341, 2405, 1848, 1465, 198, 23555, 1669, 609, 1949, 39245, 1713, 26008, 1900, 16094, 9859, 284, 1681, 47667, 741, 6948, 12808, 1155, 11, 1848, 6141, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWalletBalance(t *testing.T) { app, mockApi, buffer, done := NewMockAppWithFullAPI(t, WithCategory("wallet", walletBalance)) defer done() ctx, cancel := context.WithCancel(context.Background()) defer cancel() addr, err := address.NewIDAddress(1234) assert.NoError(t, err) balance := big.NewInt(1234) mockApi.EXPECT().WalletBalance(ctx, addr).Return(balance, nil) //stm: @CLI_WALLET_BALANCE_001 err = app.Run([]string{"wallet", "balance", "f01234"}) assert.NoError(t, err) assert.Contains(t, buffer.String(), balance.String()) }
explode_data.jsonl/8252
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 202 }
[ 2830, 3393, 38259, 21190, 1155, 353, 8840, 836, 8, 341, 28236, 11, 7860, 6563, 11, 4147, 11, 2814, 1669, 1532, 11571, 2164, 2354, 9432, 7082, 1155, 11, 3085, 6746, 445, 35735, 497, 15085, 21190, 1171, 16867, 2814, 2822, 20985, 11, 9121,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateDomainClaims(t *testing.T) { refATSIng := &v1beta1.Ingress{ ObjectMeta: v1.ObjectMeta{ Name: "test-ats-ingress-ref", Namespace: "test-ns-ref", Annotations: map[string]string{ string(DefaultDomain): "test-ats-ref1.company.com", string(Aliases): "test-ats-ref2.company.com", string(Ports): "80", }, }, } refIstioIng := &v1beta1.Ingress{ ObjectMeta: v1.ObjectMeta{ Name: "test-istio-ingress-ref", Namespace: "test-ns-ref", Annotations: map[string]string{ string(IngressClass): Istio, }, }, Spec: v1beta1.IngressSpec{ Rules: []v1beta1.IngressRule{ { Host: "test-istio-ref1.company.com", }, { Host: "test-istio-ref2.company.com", }, }, }, } helper.SetIndexer(cache.NewIndexer( cache.DeletionHandlingMetaNamespaceKeyFunc, cache.Indexers{ ATS: helper.GetProviderByName(ATS).DomainsIndexFunc, Istio: helper.GetProviderByName(Istio).DomainsIndexFunc, })) helper.indexer.Add(refATSIng) helper.indexer.Add(refIstioIng) tests := []struct { name string input *v1beta1.Ingress expected error }{ { "should pass for an empty ingress spec", &v1beta1.Ingress{}, nil, }, { "should pass for an ATS ingress with no duplicate domains", &v1beta1.Ingress{ ObjectMeta: v1.ObjectMeta{ Name: "test-ingress", Namespace: "test-namespace", Annotations: map[string]string{ string(DefaultDomain): "test1.company.com", string(Aliases): "test2.company.com,test3.company.com", string(Ports): "80", }, }, Spec: v1beta1.IngressSpec{ Backend: &v1beta1.IngressBackend{ ServiceName: "test2-svc", ServicePort: intstr.FromInt(80), }, }, }, nil, }, { "should pass for an istio ingress with no duplicate domains", &v1beta1.Ingress{ ObjectMeta: v1.ObjectMeta{ Name: "test-ingress", Namespace: "test-namespace", Annotations: map[string]string{ string(IngressClass): Istio, }, }, Spec: v1beta1.IngressSpec{ Rules: []v1beta1.IngressRule{ { Host: "test1.company.com", }, { Host: "test2.company.com", }, }, }, }, nil, }, { "should pass for an ATS ingress update on same object", &v1beta1.Ingress{ ObjectMeta: v1.ObjectMeta{ Name: "test-ats-ingress-ref", Namespace: "test-ns-ref", Annotations: map[string]string{ string(DefaultDomain): "test-ats-ref1.company.com", string(Aliases): "test-ats-ref2.company.com, test-ats-ref3.company.com", string(Ports): "80", }, }, Spec: v1beta1.IngressSpec{ Backend: &v1beta1.IngressBackend{ ServiceName: "test2-svc", ServicePort: intstr.FromInt(80), }, }, }, nil, }, { "should pass for an istio ingress update on same object", &v1beta1.Ingress{ ObjectMeta: v1.ObjectMeta{ Name: "test-istio-ingress-ref", Namespace: "test-ns-ref", Annotations: map[string]string{ string(IngressClass): Istio, }, }, Spec: v1beta1.IngressSpec{ Rules: []v1beta1.IngressRule{ { Host: "test-istio-ref1.company.com", }, { Host: "test-istio-ref2.company.com", }, { Host: "test-istio-ref3.company.com", }, }, }, }, nil, }, { "should fail for an ATS ingress with duplicate domains", &v1beta1.Ingress{ ObjectMeta: v1.ObjectMeta{ Name: "test-ingress", Namespace: "test-namespace", Annotations: map[string]string{ string(DefaultDomain): "test1.company.com", string(Aliases): "test2.company.com,test-ats-ref2.company.com", string(Ports): "80", }, }, Spec: v1beta1.IngressSpec{ Backend: &v1beta1.IngressBackend{ ServiceName: "test2-svc", ServicePort: intstr.FromInt(80), }, }, }, errors.New("Domain test-ats-ref2.company.com already exists. Ingress test-ats-ingress-ref in " + "namespace test-ns-ref owns this domain."), }, { "should fail for an istio ingress with duplicate domains", &v1beta1.Ingress{ ObjectMeta: v1.ObjectMeta{ Name: "test-ingress", Namespace: "test-namespace", Annotations: map[string]string{ string(IngressClass): Istio, }, }, Spec: v1beta1.IngressSpec{ Rules: []v1beta1.IngressRule{ { Host: "test1.company.com", }, { Host: "test-istio-ref2.company.com", }, }, }, }, errors.New("Domain test-istio-ref2.company.com already exists. Ingress test-istio-ingress-ref " + "in namespace test-ns-ref owns this domain."), }, { "should pass for an ATS ingress with hosts same as Istio hosts", &v1beta1.Ingress{ ObjectMeta: v1.ObjectMeta{ Name: "test-ingress2", Namespace: "test-ns2", Annotations: map[string]string{ string(DefaultDomain): "test-istio-ref1.company.com", string(Aliases): "test-istio-ref2.company.com", string(Ports): "80", }, }, Spec: v1beta1.IngressSpec{ Backend: &v1beta1.IngressBackend{ ServiceName: "test2-svc", ServicePort: intstr.FromInt(80), }, }, }, nil, }, { "should pass for an istio ingress with hosts same as ATS domains", &v1beta1.Ingress{ ObjectMeta: v1.ObjectMeta{ Name: "test-ingress2", Namespace: "test-ns2", Annotations: map[string]string{ string(IngressClass): Istio, }, }, Spec: v1beta1.IngressSpec{ Rules: []v1beta1.IngressRule{ { Host: "test-ats-ref1.company.com", }, { Host: "test-ats-ref2.company.com", }, }, }, }, nil, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { err := helper.validateDomainClaims(test.input, helper.GetProvider(test.input).GetDomains(test.input)) if test.expected == nil { assert.Nil(t, err, test.name) } else if assert.NotNil(t, err, test.name) { assert.Equal(t, test.expected.Error(), err.Error(), test.name) } }) } helper.indexer.Delete(refIstioIng) helper.indexer.Delete(refATSIng) }
explode_data.jsonl/15646
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 3159 }
[ 2830, 3393, 17926, 13636, 51133, 1155, 353, 8840, 836, 8, 1476, 59504, 49107, 25416, 1669, 609, 85, 16, 19127, 16, 5337, 2483, 515, 197, 23816, 12175, 25, 348, 16, 80222, 515, 298, 21297, 25, 414, 330, 1944, 12, 1862, 83905, 673, 4300...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDeleteDevice(t *testing.T) { th.SetupHTTP() defer th.TeardownHTTP() url := "/API/SoEntryFGHA" th.Mux.HandleFunc(url, func(w http.ResponseWriter, r *http.Request) { th.TestMethod(t, r, "POST") th.TestHeader(t, r, "X-Auth-Token", fakeclient.TokenID) th.TestJSONRequest(t, r, deleteRequest) w.WriteHeader(http.StatusOK) w.Header().Add("Content-Type", "application/json") fmt.Fprintf(w, deleteResponse) }) gtHost1 := security.GtHostInDelete{ HostName: "CES11811", } gtHost2 := security.GtHostInDelete{ HostName: "CES11812", } deleteOpts := security.DeleteOpts{ SOKind: "DH", TenantID: "9ee80f2a926c49f88f166af47df4e9f5", GtHost: [2]security.GtHostInDelete{gtHost1, gtHost2}, } actual, err := security.Delete(fakeclient.ServiceClient(), "CES11811", deleteOpts).Extract() th.AssertNoErr(t, err) th.CheckDeepEquals(t, &deleteResult, actual) }
explode_data.jsonl/45987
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 381 }
[ 2830, 3393, 6435, 6985, 1155, 353, 8840, 836, 8, 341, 70479, 39820, 9230, 741, 16867, 270, 94849, 37496, 9230, 2822, 19320, 1669, 3521, 7082, 14, 4416, 5874, 12001, 17020, 698, 70479, 1321, 2200, 63623, 6522, 11, 2915, 3622, 1758, 37508, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLinkNeighExists(t *testing.T) { mockNetLinkOps := new(mocks.NetLinkOps) mockLink := new(netlink_mocks.Link) // below is defined in net_linux.go netLinkOps = mockNetLinkOps tests := []struct { desc string inputLink netlink.Link inputNeigIP net.IP inputMacAddr net.HardwareAddr errExp bool outBoolFlag bool onRetArgsNetLinkLibOpers []ovntest.TestifyMockHelper onRetArgsLinkIfaceOpers []ovntest.TestifyMockHelper }{ { desc: "test path when NeighList() returns error", inputLink: mockLink, errExp: true, outBoolFlag: false, onRetArgsNetLinkLibOpers: []ovntest.TestifyMockHelper{ {OnCallMethodName: "NeighList", OnCallMethodArgType: []string{"int", "int"}, RetArgList: []interface{}{[]netlink.Neigh{}, fmt.Errorf("mock error")}}, }, onRetArgsLinkIfaceOpers: []ovntest.TestifyMockHelper{ {OnCallMethodName: "Attrs", OnCallMethodArgType: []string{}, RetArgList: []interface{}{&netlink.LinkAttrs{Name: "testIfaceName", Index: 1}}}, {OnCallMethodName: "Attrs", OnCallMethodArgType: []string{}, RetArgList: []interface{}{&netlink.LinkAttrs{Name: "testIfaceName", Index: 1}}}, }, }, { desc: "test path when NeighList() returns empty list and no error", inputLink: mockLink, outBoolFlag: false, onRetArgsNetLinkLibOpers: []ovntest.TestifyMockHelper{ {OnCallMethodName: "NeighList", OnCallMethodArgType: []string{"int", "int"}, RetArgList: []interface{}{[]netlink.Neigh{}, nil}}, }, onRetArgsLinkIfaceOpers: []ovntest.TestifyMockHelper{ {OnCallMethodName: "Attrs", OnCallMethodArgType: []string{}, RetArgList: []interface{}{&netlink.LinkAttrs{Name: "testIfaceName", Index: 1}}}, }, }, { desc: "test path where MAC/IP binding is established", inputLink: mockLink, inputNeigIP: ovntest.MustParseIP("192.169.1.12"), inputMacAddr: ovntest.MustParseMAC("0A:58:FD:98:00:01"), outBoolFlag: true, onRetArgsNetLinkLibOpers: []ovntest.TestifyMockHelper{ {OnCallMethodName: "NeighList", OnCallMethodArgType: []string{"int", "int"}, RetArgList: []interface{}{ []netlink.Neigh{ {IP: ovntest.MustParseIP("192.169.1.12"), HardwareAddr: ovntest.MustParseMAC("0A:58:FD:98:00:01"), State: netlink.NUD_PERMANENT}, }, nil, }, }, }, onRetArgsLinkIfaceOpers: []ovntest.TestifyMockHelper{ {OnCallMethodName: "Attrs", OnCallMethodArgType: []string{}, RetArgList: []interface{}{&netlink.LinkAttrs{Name: "testIfaceName", Index: 1}}}, }, }, { desc: "test path where MAC/IP bindings DOES NOT exist", inputLink: mockLink, inputNeigIP: ovntest.MustParseIP("192.169.1.15"), inputMacAddr: ovntest.MustParseMAC("0A:58:FD:98:00:01"), onRetArgsNetLinkLibOpers: []ovntest.TestifyMockHelper{ {OnCallMethodName: "NeighList", OnCallMethodArgType: []string{"int", "int"}, RetArgList: []interface{}{ []netlink.Neigh{ {IP: ovntest.MustParseIP("192.169.1.12"), HardwareAddr: ovntest.MustParseMAC("0A:58:FD:98:00:01"), State: netlink.NUD_PERMANENT}, }, nil, }, }, }, onRetArgsLinkIfaceOpers: []ovntest.TestifyMockHelper{ {OnCallMethodName: "Attrs", OnCallMethodArgType: []string{}, RetArgList: []interface{}{&netlink.LinkAttrs{Name: "testIfaceName", Index: 1}}}, }, }, } for i, tc := range tests { t.Run(fmt.Sprintf("%d:%s", i, tc.desc), func(t *testing.T) { ovntest.ProcessMockFnList(&mockNetLinkOps.Mock, tc.onRetArgsNetLinkLibOpers) ovntest.ProcessMockFnList(&mockLink.Mock, tc.onRetArgsLinkIfaceOpers) flag, err := LinkNeighExists(tc.inputLink, tc.inputNeigIP, tc.inputMacAddr) t.Log(flag, err) if tc.errExp { assert.Error(t, err) } else { assert.Nil(t, err) } if tc.outBoolFlag { assert.True(t, flag) } else { assert.False(t, flag) } mockNetLinkOps.AssertExpectations(t) mockLink.AssertExpectations(t) }) } }
explode_data.jsonl/30960
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1819 }
[ 2830, 3393, 3939, 8813, 1090, 15575, 1155, 353, 8840, 836, 8, 341, 77333, 6954, 3939, 38904, 1669, 501, 1255, 25183, 16993, 3939, 38904, 340, 77333, 3939, 1669, 501, 30723, 2080, 717, 25183, 22534, 340, 197, 322, 3685, 374, 4512, 304, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAccAWSENI_ignoreExternalAttachment(t *testing.T) { var conf ec2.NetworkInterface resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, IDRefreshName: "aws_network_interface.bar", Providers: testAccProviders, CheckDestroy: testAccCheckAWSENIDestroy, Steps: []resource.TestStep{ resource.TestStep{ Config: testAccAWSENIConfigExternalAttachment, Check: resource.ComposeTestCheckFunc( testAccCheckAWSENIExists("aws_network_interface.bar", &conf), testAccCheckAWSENIAttributes(&conf), testAccCheckAWSENIMakeExternalAttachment("aws_instance.foo", &conf), ), }, }, }) }
explode_data.jsonl/30825
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 261 }
[ 2830, 3393, 14603, 14419, 925, 14912, 58493, 25913, 33569, 1155, 353, 8840, 836, 8, 341, 2405, 2335, 11942, 17, 30149, 5051, 271, 50346, 8787, 1155, 11, 5101, 31363, 515, 197, 197, 4703, 3973, 25, 414, 2915, 368, 314, 1273, 14603, 4703,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStartExecutionSpanWithoutPayload(t *testing.T) { defer reset() startTime := time.Now() startExecutionSpan(startTime, "", LambdaInvokeEventHeaders{}) assert.Equal(t, startTime, currentExecutionInfo.startTime) assert.NotEqual(t, 0, currentExecutionInfo.traceID) assert.NotEqual(t, 0, currentExecutionInfo.spanID) }
explode_data.jsonl/2344
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 117 }
[ 2830, 3393, 3479, 20294, 12485, 26040, 29683, 1155, 353, 8840, 836, 8, 341, 16867, 7585, 741, 21375, 1462, 1669, 882, 13244, 741, 21375, 20294, 12485, 88090, 11, 7342, 44521, 17604, 1556, 10574, 37790, 6948, 12808, 1155, 11, 22858, 11, 14...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestListDaemonSets(t *testing.T) { tests := []struct { description string namespace string query *query.Query expected *api.ListResult expectedErr error }{ { "test name filter", "bar", &query.Query{ Pagination: &query.Pagination{ Limit: 1, Offset: 0, }, SortBy: query.FieldName, Ascending: false, Filters: map[query.Field]query.Value{query.FieldName: query.Value("foo2")}, }, &api.ListResult{ Items: []interface{}{ foo2, }, TotalItems: 1, }, nil, }, } getter := prepare() for _, test := range tests { t.Run(test.description, func(t *testing.T) { got, err := getter.List(test.namespace, test.query) if test.expectedErr != nil && err != test.expectedErr { t.Errorf("expected error, got nothing") } else if err != nil { t.Fatal(err) } if diff := cmp.Diff(got, test.expected); diff != "" { t.Errorf("%T differ (-got, +want): %s", test.expected, diff) } }) } }
explode_data.jsonl/55029
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 460 }
[ 2830, 3393, 852, 89177, 30175, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 42407, 914, 198, 197, 56623, 256, 914, 198, 197, 27274, 981, 353, 1631, 15685, 198, 197, 42400, 262, 353, 2068, 5814, 2077, 198, 197, 4240...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestHandlerFlushHandler(t *testing.T) { conf := internal.NewConfigurationWithDefaults() viper.Set(configuration.ViperKeyScopeStrategy, "DEPRECATED_HIERARCHICAL_SCOPE_STRATEGY") viper.Set(configuration.ViperKeyIssuerURL, "http://hydra.localhost") reg := internal.NewRegistryMemory(t, conf) cl := reg.ClientManager() store := reg.OAuth2Storage() h := oauth2.NewHandler(reg, conf) for _, r := range flushRequests { _ = cl.CreateClient(context.Background(), r.Client.(*client.Client)) require.NoError(t, store.CreateAccessTokenSession(context.Background(), r.ID, r)) } r := x.NewRouterAdmin() h.SetRoutes(r, r.RouterPublic(), func(h http.Handler) http.Handler { return h }) ts := httptest.NewServer(r) defer ts.Close() c := hydra.NewHTTPClientWithConfig(nil, &hydra.TransportConfig{Schemes: []string{"http"}, Host: urlx.ParseOrPanic(ts.URL).Host}) ds := new(oauth2.Session) ctx := context.Background() _, err := c.Admin.FlushInactiveOAuth2Tokens(admin.NewFlushInactiveOAuth2TokensParams().WithBody(&models.FlushInactiveOAuth2TokensRequest{NotAfter: strfmt.DateTime(time.Now().Add(-time.Hour * 24))})) require.NoError(t, err) _, err = store.GetAccessTokenSession(ctx, "flush-1", ds) require.NoError(t, err) _, err = store.GetAccessTokenSession(ctx, "flush-2", ds) require.NoError(t, err) _, err = store.GetAccessTokenSession(ctx, "flush-3", ds) require.NoError(t, err) _, err = c.Admin.FlushInactiveOAuth2Tokens(admin.NewFlushInactiveOAuth2TokensParams().WithBody(&models.FlushInactiveOAuth2TokensRequest{NotAfter: strfmt.DateTime(time.Now().Add(-(lifespan + time.Hour/2)))})) require.NoError(t, err) _, err = store.GetAccessTokenSession(ctx, "flush-1", ds) require.NoError(t, err) _, err = store.GetAccessTokenSession(ctx, "flush-2", ds) require.NoError(t, err) _, err = store.GetAccessTokenSession(ctx, "flush-3", ds) require.Error(t, err) _, err = c.Admin.FlushInactiveOAuth2Tokens(admin.NewFlushInactiveOAuth2TokensParams().WithBody(&models.FlushInactiveOAuth2TokensRequest{NotAfter: strfmt.DateTime(time.Now())})) require.NoError(t, err) _, err = store.GetAccessTokenSession(ctx, "flush-1", ds) require.NoError(t, err) _, err = store.GetAccessTokenSession(ctx, "flush-2", ds) require.Error(t, err) _, err = store.GetAccessTokenSession(ctx, "flush-3", ds) require.Error(t, err) }
explode_data.jsonl/12555
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 886 }
[ 2830, 3393, 3050, 46874, 3050, 1155, 353, 8840, 836, 8, 341, 67850, 1669, 5306, 7121, 7688, 2354, 16273, 741, 5195, 12858, 4202, 48724, 5058, 12858, 1592, 10803, 19816, 11, 330, 1150, 57713, 2039, 16289, 10790, 15571, 61725, 7159, 97221, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLogLevels(t *testing.T) { mlevel := ModuleLevels{} mlevel.SetLevel("module-xyz-info", api.INFO) mlevel.SetLevel("module-xyz-debug", api.DEBUG) mlevel.SetLevel("module-xyz-error", api.ERROR) mlevel.SetLevel("module-xyz-warning", api.WARNING) //Run info level checks assert.True(t, mlevel.IsEnabledFor("module-xyz-info", api.INFO)) assert.False(t, mlevel.IsEnabledFor("module-xyz-info", api.DEBUG)) assert.True(t, mlevel.IsEnabledFor("module-xyz-info", api.ERROR)) assert.True(t, mlevel.IsEnabledFor("module-xyz-info", api.WARNING)) //Run debug level checks assert.True(t, mlevel.IsEnabledFor("module-xyz-debug", api.INFO)) assert.True(t, mlevel.IsEnabledFor("module-xyz-debug", api.DEBUG)) assert.True(t, mlevel.IsEnabledFor("module-xyz-debug", api.ERROR)) assert.True(t, mlevel.IsEnabledFor("module-xyz-debug", api.WARNING)) //Run info level checks assert.False(t, mlevel.IsEnabledFor("module-xyz-error", api.INFO)) assert.False(t, mlevel.IsEnabledFor("module-xyz-error", api.DEBUG)) assert.True(t, mlevel.IsEnabledFor("module-xyz-error", api.ERROR)) assert.False(t, mlevel.IsEnabledFor("module-xyz-error", api.WARNING)) //Run info level checks assert.False(t, mlevel.IsEnabledFor("module-xyz-warning", api.INFO)) assert.False(t, mlevel.IsEnabledFor("module-xyz-warning", api.DEBUG)) assert.True(t, mlevel.IsEnabledFor("module-xyz-warning", api.ERROR)) assert.True(t, mlevel.IsEnabledFor("module-xyz-warning", api.WARNING)) //Run default log level check --> which is info currently assert.True(t, mlevel.IsEnabledFor("module-xyz-random-module", api.INFO)) assert.False(t, mlevel.IsEnabledFor("module-xyz-random-module", api.DEBUG)) assert.True(t, mlevel.IsEnabledFor("module-xyz-random-module", api.ERROR)) assert.True(t, mlevel.IsEnabledFor("module-xyz-random-module", api.WARNING)) }
explode_data.jsonl/46791
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 671 }
[ 2830, 3393, 2201, 46991, 1155, 353, 8840, 836, 8, 1476, 2109, 3294, 1669, 13711, 46991, 31483, 2109, 3294, 4202, 4449, 445, 4352, 12, 28854, 12505, 497, 6330, 38317, 340, 2109, 3294, 4202, 4449, 445, 4352, 12, 28854, 71051, 497, 6330, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_ObjectTracker_TryCancelExpect_InfiniteRetries(t *testing.T) { g := gomega.NewWithT(t) ot := newObjTracker(schema.GroupVersionKind{}, func() objData { return objData{retries: -1} }) const count = 10 ct := makeCTSlice("ct-", count) for i := 0; i < len(ct); i++ { ot.Expect(ct[i]) } g.Expect(ot.Satisfied()).NotTo(gomega.BeTrue(), "should not be satisfied before ExpectationsDone") ot.ExpectationsDone() // Skip the first one for i := 1; i < len(ct); i++ { g.Expect(ot.Satisfied()).NotTo(gomega.BeTrue(), "should not be satisfied before observations are done") ot.Observe(ct[i]) } g.Expect(ot.Satisfied()).NotTo(gomega.BeTrue(), "one expectation should remain after two retries") for i := 0; i < 20; i++ { ot.TryCancelExpect(ct[0]) g.Expect(ot.Satisfied()).NotTo(gomega.BeTrue(), "expectation should remain due to infinite retries") } }
explode_data.jsonl/52325
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 347 }
[ 2830, 3393, 27839, 31133, 1139, 884, 9269, 17536, 25972, 25722, 12020, 4019, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 342, 32696, 7121, 2354, 51, 1155, 340, 197, 354, 1669, 74259, 31133, 42735, 5407, 5637, 10629, 22655, 2915, 368, 2839, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUpdateListenBacklog(t *testing.T) { c := context.New(t, defaultMTU) defer c.Cleanup() // Create listener. var wq waiter.Queue ep, err := c.Stack().NewEndpoint(tcp.ProtocolNumber, ipv4.ProtocolNumber, &wq) if err != nil { t.Fatalf("NewEndpoint failed: %v", err) } if err := ep.Bind(tcpip.FullAddress{}); err != nil { t.Fatalf("Bind failed: %v", err) } if err := ep.Listen(10); err != nil { t.Fatalf("Listen failed: %v", err) } // Update the backlog with another Listen() on the same endpoint. if err := ep.Listen(20); err != nil { t.Fatalf("Listen failed to update backlog: %v", err) } ep.Close() }
explode_data.jsonl/22315
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 246 }
[ 2830, 3393, 4289, 38714, 3707, 839, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 2266, 7121, 1155, 11, 1638, 8505, 52, 340, 16867, 272, 727, 60639, 2822, 197, 322, 4230, 11446, 624, 2405, 289, 80, 67169, 50251, 198, 96626, 11, 1848, 1669...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestPropertyDelete(t *testing.T) { dummy := newObject(ObjectType) cases := []invokeTestCase{ {args: wrapArgs(newProperty(nil, nil, wrapFuncForTest(func(f *Frame, o *Object) (*Object, *BaseException) { return None, nil })), dummy), want: None}, {args: wrapArgs(newProperty(nil, nil, wrapFuncForTest(func(f *Frame, o *Object) (*Object, *BaseException) { return nil, f.RaiseType(ValueErrorType, "bar") })), dummy), wantExc: mustCreateException(ValueErrorType, "bar")}, {args: wrapArgs(newProperty(nil, nil, nil), dummy), wantExc: mustCreateException(AttributeErrorType, "can't delete attribute")}, } for _, cas := range cases { if err := runInvokeMethodTestCase(PropertyType, "__delete__", &cas); err != "" { t.Error(err) } } }
explode_data.jsonl/79890
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 257 }
[ 2830, 3393, 3052, 6435, 1155, 353, 8840, 836, 8, 341, 2698, 8574, 1669, 501, 1190, 12526, 929, 340, 1444, 2264, 1669, 3056, 22430, 16458, 515, 197, 197, 90, 2116, 25, 15061, 4117, 1755, 3052, 27907, 11, 2092, 11, 15061, 9626, 2461, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeltaAnalyzer(t *testing.T) { t.Run("without previous indexing", func(t *testing.T) { previous := []Property(nil) next := []Property{ { Name: "prop1", Items: []Countable{ { Data: []byte("value1"), TermFrequency: 7, }, { Data: []byte("value2"), TermFrequency: 3, }, }, }, { Name: "prop2", Items: []Countable{ { Data: []byte("value3"), TermFrequency: 7, }, { Data: []byte("value4"), TermFrequency: 3, }, }, }, } res := Delta(previous, next) assert.Equal(t, next, res.ToAdd) assert.Len(t, res.ToDelete, 0) }) t.Run("with previous indexing and no changes", func(t *testing.T) { previous := []Property{ { Name: "prop1", Items: []Countable{ { Data: []byte("value1"), TermFrequency: 7, }, { Data: []byte("value2"), TermFrequency: 3, }, }, }, { Name: "prop2", Items: []Countable{ { Data: []byte("value3"), TermFrequency: 7, }, { Data: []byte("value4"), TermFrequency: 3, }, }, }, } next := []Property{ { Name: "prop1", Items: []Countable{ { Data: []byte("value1"), TermFrequency: 7, }, { Data: []byte("value2"), TermFrequency: 3, }, }, }, { Name: "prop2", Items: []Countable{ { Data: []byte("value3"), TermFrequency: 7, }, { Data: []byte("value4"), TermFrequency: 3, }, }, }, } res := Delta(previous, next) assert.Len(t, res.ToDelete, 0) assert.Len(t, res.ToAdd, 0) }) t.Run("with previous indexing - only additions", func(t *testing.T) { previous := []Property{ { Name: "prop1", Items: []Countable{ { Data: []byte("value2"), TermFrequency: 3, }, }, }, { Name: "prop2", Items: []Countable{ { Data: []byte("value4"), TermFrequency: 3, }, }, }, } next := []Property{ { Name: "prop1", Items: []Countable{ { Data: []byte("value1"), TermFrequency: 7, }, { Data: []byte("value2"), TermFrequency: 3, }, }, }, { Name: "prop2", Items: []Countable{ { Data: []byte("value3"), TermFrequency: 7, }, { Data: []byte("value4"), TermFrequency: 3, }, }, }, } expectedAdd := []Property{ { Name: "prop1", Items: []Countable{ { Data: []byte("value1"), TermFrequency: 7, }, }, }, { Name: "prop2", Items: []Countable{ { Data: []byte("value3"), TermFrequency: 7, }, }, }, } res := Delta(previous, next) assert.Equal(t, expectedAdd, res.ToAdd) assert.Len(t, res.ToDelete, 0) }) t.Run("with previous indexing - both additions and deletions", func(t *testing.T) { previous := []Property{ { Name: "prop1", Items: []Countable{ { Data: []byte("value2"), TermFrequency: 3, }, }, }, { Name: "prop2", Items: []Countable{ { Data: []byte("value4"), TermFrequency: 3, }, }, }, } next := []Property{ { Name: "prop1", Items: []Countable{ { Data: []byte("value1"), TermFrequency: 7, }, }, }, { Name: "prop2", Items: []Countable{ { Data: []byte("value3"), TermFrequency: 7, }, { Data: []byte("value4"), TermFrequency: 3, }, }, }, } expectedAdd := []Property{ { Name: "prop1", Items: []Countable{ { Data: []byte("value1"), TermFrequency: 7, }, }, }, { Name: "prop2", Items: []Countable{ { Data: []byte("value3"), TermFrequency: 7, }, }, }, } expectedDelete := []Property{ { Name: "prop1", Items: []Countable{ { Data: []byte("value2"), TermFrequency: 3, }, }, }, } res := Delta(previous, next) assert.Equal(t, expectedAdd, res.ToAdd) assert.Equal(t, expectedDelete, res.ToDelete) }) }
explode_data.jsonl/35908
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2589 }
[ 2830, 3393, 20277, 54911, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 28996, 3681, 51980, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 197, 19702, 1669, 3056, 3052, 27907, 340, 197, 28144, 1669, 3056, 3052, 515, 298, 197, 515, 57...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBase64DecodeString(t *testing.T) { for _, tt := range []struct { name string input ByteMatch want string }{ { name: "base64_decode bare", input: ByteMatch{ Kind: b64Decode, }, want: `base64_decode;`, }, { name: "base64_decode some options", input: ByteMatch{ Kind: b64Decode, NumBytes: "1", Options: []string{"relative"}, }, want: `base64_decode:bytes 1,relative;`, }, { name: "base64_decode all options", input: ByteMatch{ Kind: b64Decode, NumBytes: "1", Offset: 2, Options: []string{"relative"}, }, want: `base64_decode:bytes 1,offset 2,relative;`, }, } { got := tt.input.base64DecodeString() if got != tt.want { t.Fatalf("%s: got %v -- expected %v", tt.name, got, tt.want) } } }
explode_data.jsonl/59684
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 388 }
[ 2830, 3393, 3978, 21, 19, 32564, 703, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 3056, 1235, 341, 197, 11609, 220, 914, 198, 197, 22427, 10906, 8331, 198, 197, 50780, 220, 914, 198, 197, 59403, 197, 197, 515, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBackendLoadWriteTo(t *testing.T) { env, cleanup := withTestEnvironment(t) defer cleanup() // setup backend which only works if it's WriteTo method is correctly propagated upwards env.gopts.backendInnerTestHook = func(r restic.Backend) (restic.Backend, error) { return &onlyLoadWithWriteToBackend{Backend: r}, nil } testSetupBackupData(t, env) // add some data, but make sure that it isn't cached during upload opts := BackupOptions{} env.gopts.NoCache = true testRunBackup(t, "", []string{filepath.Join(env.testdata, "0", "0", "9")}, opts, env.gopts) // loading snapshots must still work env.gopts.NoCache = false firstSnapshot := testRunList(t, "snapshots", env.gopts) rtest.Assert(t, len(firstSnapshot) == 1, "expected one snapshot, got %v", firstSnapshot) }
explode_data.jsonl/43577
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 281 }
[ 2830, 3393, 29699, 5879, 7985, 1249, 1155, 353, 8840, 836, 8, 341, 57538, 11, 21290, 1669, 448, 2271, 12723, 1155, 340, 16867, 21290, 2822, 197, 322, 6505, 19163, 892, 1172, 4278, 421, 432, 594, 9645, 1249, 1714, 374, 12440, 85050, 4395...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRoaringPostingsListInsert(t *testing.T) { d := NewPostingsList() d.Insert(1) require.True(t, d.Contains(1)) require.Equal(t, 1, d.Len()) // Idempotency of inserts. d.Insert(1) require.Equal(t, 1, d.Len()) require.True(t, d.Contains(1)) }
explode_data.jsonl/64012
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 113 }
[ 2830, 3393, 38872, 3249, 4133, 819, 852, 13780, 1155, 353, 8840, 836, 8, 341, 2698, 1669, 1532, 4133, 819, 852, 741, 2698, 23142, 7, 16, 340, 17957, 32443, 1155, 11, 294, 11545, 7, 16, 1171, 17957, 12808, 1155, 11, 220, 16, 11, 294,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetEmojiList(t *testing.T) { th := Setup().InitBasic() defer th.TearDown() Client := th.Client EnableCustomEmoji := *th.App.Config().ServiceSettings.EnableCustomEmoji defer func() { th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.EnableCustomEmoji = EnableCustomEmoji }) }() th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.EnableCustomEmoji = true }) emojis := []*model.Emoji{ { CreatorId: th.BasicUser.Id, Name: model.NewId(), }, { CreatorId: th.BasicUser.Id, Name: model.NewId(), }, { CreatorId: th.BasicUser.Id, Name: model.NewId(), }, } for idx, emoji := range emojis { newEmoji, resp := Client.CreateEmoji(emoji, utils.CreateTestGif(t, 10, 10), "image.gif") CheckNoError(t, resp) emojis[idx] = newEmoji } listEmoji, resp := Client.GetEmojiList(0, 100) CheckNoError(t, resp) for _, emoji := range emojis { found := false for _, savedEmoji := range listEmoji { if emoji.Id == savedEmoji.Id { found = true break } } if !found { t.Fatalf("failed to get emoji with id %v, %v", emoji.Id, len(listEmoji)) } } _, resp = Client.DeleteEmoji(emojis[0].Id) CheckNoError(t, resp) listEmoji, resp = Client.GetEmojiList(0, 100) CheckNoError(t, resp) found := false for _, savedEmoji := range listEmoji { if savedEmoji.Id == emojis[0].Id { found = true break } if found { t.Fatalf("should not get a deleted emoji %v", emojis[0].Id) } } listEmoji, resp = Client.GetEmojiList(0, 1) CheckNoError(t, resp) if len(listEmoji) != 1 { t.Fatal("should only return 1") } listEmoji, resp = Client.GetSortedEmojiList(0, 100, model.EMOJI_SORT_BY_NAME) CheckNoError(t, resp) if len(listEmoji) == 0 { t.Fatal("should return more than 0") } }
explode_data.jsonl/76082
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 806 }
[ 2830, 3393, 1949, 92731, 852, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1005, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 741, 71724, 1669, 270, 11716, 271, 197, 11084, 10268, 92731, 1669, 353, 339, 5105, 10753, 1005, 1860, 60...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEth_protocolVersion(t *testing.T) { expectedRes := hexutil.Uint(ethermint.ProtocolVersion) rpcRes := call(t, "eth_protocolVersion", []string{}) var res hexutil.Uint err := res.UnmarshalJSON(rpcRes.Result) require.NoError(t, err) t.Logf("Got protocol version: %s\n", res.String()) require.Equal(t, expectedRes, res, "expected: %s got: %s\n", expectedRes.String(), rpcRes.Result) }
explode_data.jsonl/840
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 151 }
[ 2830, 3393, 65390, 34880, 5637, 1155, 353, 8840, 836, 8, 341, 42400, 1061, 1669, 12371, 1314, 71869, 7, 2723, 67791, 54096, 5637, 692, 7000, 3992, 1061, 1669, 1618, 1155, 11, 330, 769, 34880, 5637, 497, 3056, 917, 6257, 692, 2405, 592, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPushExporterServicer_Submit_Histogram(t *testing.T) { srv, exp := makeTestCustomPushExporter(t) err := submitNewMetric(exp, prometheus_models.MetricType_HISTOGRAM, sampleGatewayContext) assert.NoError(t, err) assert.Equal(t, 5, totalMetricCount(srv)) err = submitNewMetric(exp, prometheus_models.MetricType_HISTOGRAM, sampleGatewayContext) assert.NoError(t, err) assert.Equal(t, 10, totalMetricCount(srv)) assert.Equal(t, len(srv.FamiliesByName), 3) for name, fam := range srv.FamiliesByName { assert.Equal(t, prometheus_models.MetricType_GAUGE, *fam.Type) for _, metric := range fam.Metric { assert.True(t, tests.HasLabel(metric.Label, "testLabel", "testValue")) if strings.HasSuffix(name, bucketPostfix) { assert.True(t, tests.HasLabelName(metric.Label, histogramBucketLabelName)) } } } }
explode_data.jsonl/61834
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 326 }
[ 2830, 3393, 16644, 88025, 39159, 12999, 36359, 1763, 2039, 28499, 1155, 353, 8840, 836, 8, 341, 1903, 10553, 11, 1343, 1669, 1281, 2271, 10268, 16644, 88025, 1155, 340, 9859, 1669, 9318, 3564, 54310, 25865, 11, 2706, 39705, 30792, 1321, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRunsVschemaMigrations(t *testing.T) { cluster, err := startCluster() defer cluster.TearDown() args := os.Args defer resetFlags(args) assert.NoError(t, err) assertColumnVindex(t, cluster, columnVindex{keyspace: "test_keyspace", table: "test_table", vindex: "my_vdx", vindexType: "hash", column: "id"}) assertColumnVindex(t, cluster, columnVindex{keyspace: "app_customer", table: "customers", vindex: "hash", vindexType: "hash", column: "id"}) // Add Hash vindex via vtgate execution on table err = addColumnVindex(cluster, "test_keyspace", "alter vschema on test_table1 add vindex my_vdx (id)") assert.NoError(t, err) assertColumnVindex(t, cluster, columnVindex{keyspace: "test_keyspace", table: "test_table1", vindex: "my_vdx", vindexType: "hash", column: "id"}) }
explode_data.jsonl/58891
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 288 }
[ 2830, 3393, 73920, 53, 17349, 44, 17824, 1155, 353, 8840, 836, 8, 341, 197, 18855, 11, 1848, 1669, 1191, 28678, 741, 16867, 10652, 836, 682, 4454, 741, 31215, 1669, 2643, 51015, 198, 16867, 7585, 9195, 7356, 692, 6948, 35699, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIteratorMapper(t *testing.T) { cur := query.RowCursor([]query.Row{ { Time: 0, Series: query.Series{ Name: "cpu", Tags: ParseTags("host=A"), }, Values: []interface{}{float64(1), "a"}, }, { Time: 5, Series: query.Series{ Name: "cpu", Tags: ParseTags("host=A"), }, Values: []interface{}{float64(3), "c"}, }, { Time: 2, Series: query.Series{ Name: "cpu", Tags: ParseTags("host=B"), }, Values: []interface{}{float64(2), "b"}, }, { Time: 8, Series: query.Series{ Name: "cpu", Tags: ParseTags("host=B"), }, Values: []interface{}{float64(8), "h"}, }, }, []influxql.VarRef{ {Val: "val1", Type: influxql.Float}, {Val: "val2", Type: influxql.String}, }) opt := query.IteratorOptions{ Ascending: true, Aux: []influxql.VarRef{ {Val: "val1", Type: influxql.Float}, {Val: "val2", Type: influxql.String}, }, } itr := query.NewIteratorMapper(cur, nil, []query.IteratorMap{ query.FieldMap{Index: 0}, query.FieldMap{Index: 1}, query.TagMap("host"), }, opt) if a, err := Iterators([]query.Iterator{itr}).ReadAll(); err != nil { t.Fatalf("unexpected error: %s", err) } else if !deep.Equal(a, [][]query.Point{ {&query.FloatPoint{Name: "cpu", Tags: ParseTags("host=A"), Time: 0, Aux: []interface{}{float64(1), "a", "A"}}}, {&query.FloatPoint{Name: "cpu", Tags: ParseTags("host=A"), Time: 5, Aux: []interface{}{float64(3), "c", "A"}}}, {&query.FloatPoint{Name: "cpu", Tags: ParseTags("host=B"), Time: 2, Aux: []interface{}{float64(2), "b", "B"}}}, {&query.FloatPoint{Name: "cpu", Tags: ParseTags("host=B"), Time: 8, Aux: []interface{}{float64(8), "h", "B"}}}, }) { t.Errorf("unexpected points: %s", spew.Sdump(a)) } }
explode_data.jsonl/61196
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 794 }
[ 2830, 3393, 11951, 10989, 1155, 353, 8840, 836, 8, 341, 33209, 1669, 3239, 14657, 14543, 10556, 1631, 14657, 515, 197, 197, 515, 298, 67567, 25, 220, 15, 345, 298, 7568, 4699, 25, 3239, 37128, 515, 571, 21297, 25, 330, 16475, 756, 571...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestContains(t *testing.T) { committee, signers := GenerateTestCommittee() nonExist, _, _ := crypto.GenerateTestKeyPair() assert.True(t, committee.Contains(signers[0].Address())) assert.False(t, committee.Contains(nonExist)) }
explode_data.jsonl/35316
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 84 }
[ 2830, 3393, 23805, 1155, 353, 8840, 836, 8, 341, 197, 56785, 11, 1841, 388, 1669, 19813, 2271, 33441, 6547, 741, 197, 6280, 25613, 11, 8358, 716, 1669, 19028, 57582, 2271, 1592, 12443, 2822, 6948, 32443, 1155, 11, 12801, 11545, 40046, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestCommitInFailedTransaction(t *testing.T) { db := openTestConn(t) defer db.Close() txn, err := db.Begin() if err != nil { t.Fatal(err) } rows, err := txn.Query("SELECT error") if err == nil { rows.Close() t.Fatal("expected failure") } err = txn.Commit() if err != ErrInFailedTransaction { t.Fatalf("expected ErrInFailedTransaction; got %#v", err) } }
explode_data.jsonl/63418
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 155 }
[ 2830, 3393, 33441, 641, 9408, 8070, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 1787, 2271, 9701, 1155, 340, 16867, 2927, 10421, 2822, 3244, 42967, 11, 1848, 1669, 2927, 28467, 741, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGitPermissions_CreateGitTokenWithBranch(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() clients := &client.AggregatedClient{ Ctx: context.Background(), } var d *schema.ResourceData var token string var err error d = getGitPermissionsResource(t, gitProjectID, "", gitBranchNameValid) token, err = createGitToken(d, clients) assert.Empty(t, token) assert.NotNil(t, err) d = getGitPermissionsResource(t, gitProjectID, gitRepositoryID, gitBranchNameValid) token, err = createGitToken(d, clients) assert.NotEmpty(t, token) assert.Nil(t, err) assert.Equal(t, gitTokenBranch, token) }
explode_data.jsonl/33475
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 235 }
[ 2830, 3393, 46562, 23851, 34325, 46562, 3323, 2354, 18197, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 197, 31869, 1669, 609, 2972, 49850, 93040, 2959, 515, 197, 6258, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLoadData(t *testing.T) { validSQL := []string{ "load data from s3 'x.txt'", "load data from s3 manifest 'x.txt'", "load data from s3 file 'x.txt'", "load data infile 'x.txt' into table 'c'", "load data from s3 'x.txt' into table x"} for _, tcase := range validSQL { _, err := Parse(tcase) require.NoError(t, err) } }
explode_data.jsonl/27187
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 146 }
[ 2830, 3393, 5879, 1043, 1155, 353, 8840, 836, 8, 341, 56322, 6688, 1669, 3056, 917, 515, 197, 197, 1, 1078, 821, 504, 274, 18, 364, 87, 3909, 38330, 197, 197, 1, 1078, 821, 504, 274, 18, 14455, 364, 87, 3909, 38330, 197, 197, 1, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_databasesNextView(t *testing.T) { testcases := []struct { current string want string }{ {current: "databases_general", want: "databases_sessions"}, {current: "databases_sessions", want: "databases_general"}, {current: "unknown", want: "databases_general"}, } for _, tc := range testcases { assert.Equal(t, tc.want, databasesNextView(tc.current)) } }
explode_data.jsonl/69229
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 139 }
[ 2830, 3393, 814, 23822, 5847, 851, 1155, 353, 8840, 836, 8, 341, 18185, 23910, 1669, 3056, 1235, 341, 197, 20121, 914, 198, 197, 50780, 262, 914, 198, 197, 59403, 197, 197, 90, 3231, 25, 330, 67, 23822, 39177, 497, 1366, 25, 330, 67...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAwsAwsSdkGoV2Connect(t *testing.T) { t.Parallel() resolver := aws.EndpointResolverFunc(func(service, region string) (aws.Endpoint, error) { return aws.Endpoint{URL: ConnectUrl()}, nil }) cfg, err := config.LoadDefaultConfig(context.TODO(), config.WithCredentialsProvider(aws.AnonymousCredentials{}), config.WithEndpointResolver(resolver), config.WithHTTPClient(&http.Client{Timeout: time.Second}), config.WithRetryer(func() aws.Retryer { return aws.NopRetryer{} }), ) if err != nil { panic(err) } client := s3.NewFromConfig(cfg) start := time.Now() _, err = client.ListBuckets(context.TODO(), &s3.ListBucketsInput{}) assertTimeout(t, start, err, "Client.Timeout exceeded") }
explode_data.jsonl/46940
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 272 }
[ 2830, 3393, 47359, 47359, 57175, 10850, 53, 17, 14611, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 10202, 7921, 1669, 31521, 90409, 18190, 9626, 18552, 21656, 11, 5537, 914, 8, 320, 8635, 90409, 11, 1465, 8, 341, 197, 853, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSortedISOCtrlStrEmpty(t *testing.T) { // given attrs := make(map[string]*sysl.Attribute) // when actual := getSortedISOCtrlStr(attrs) // then assert.Equal(t, "", actual) }
explode_data.jsonl/58734
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 76 }
[ 2830, 3393, 51051, 1637, 7612, 9599, 2580, 3522, 1155, 353, 8840, 836, 8, 341, 197, 322, 2661, 198, 197, 20468, 1669, 1281, 9147, 14032, 8465, 7791, 75, 33775, 692, 197, 322, 979, 198, 88814, 1669, 633, 51051, 1637, 7612, 9599, 2580, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestTerminator(t *testing.T) { c := NewClient().(*client) n := newTestTerminatorConn() conn := c.newConnection(n) conn.w.Add(1) go conn.terminator() close(conn.t) conn.w.Wait() assert.False(t, n.c) }
explode_data.jsonl/17384
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 91 }
[ 2830, 3393, 21209, 1065, 850, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 1532, 2959, 1005, 4071, 2972, 692, 9038, 1669, 501, 2271, 21209, 1065, 850, 9701, 741, 32917, 1669, 272, 4618, 4526, 1445, 692, 32917, 1418, 1904, 7, 16, 340, 306...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestEC2CredentialsBuildCanonicalHeadersV4(t *testing.T) { headers := map[string]string{ "Foo": "bar", "Baz": "qux", } signedHeaders := "foo;baz" expected := "foo:bar\nbaz:qux\n" testhelper.CheckEquals(t, expected, ec2tokens.EC2CredentialsBuildCanonicalHeadersV4(headers, signedHeaders)) }
explode_data.jsonl/68587
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 128 }
[ 2830, 3393, 7498, 17, 27025, 11066, 70914, 10574, 53, 19, 1155, 353, 8840, 836, 8, 341, 67378, 1669, 2415, 14032, 30953, 515, 197, 197, 1, 40923, 788, 330, 2257, 756, 197, 197, 63590, 1370, 788, 330, 446, 87, 756, 197, 532, 1903, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnmarshaling(t *testing.T) { for _, tt := range unmarshalingTests { // Make a new instance of the type of our expected object. p := reflect.New(reflect.TypeOf(tt.pb).Elem()).Interface().(proto.Message) err := tt.unmarshaler.Unmarshal(strings.NewReader(tt.json), p) if err != nil { t.Errorf("unmarshaling %s: %v", tt.desc, err) continue } // For easier diffs, compare text strings of the protos. exp := proto.MarshalTextString(tt.pb) act := proto.MarshalTextString(p) if string(exp) != string(act) { t.Errorf("%s: got [%s] want [%s]", tt.desc, act, exp) } } }
explode_data.jsonl/63120
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 249 }
[ 2830, 3393, 1806, 36239, 6132, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 650, 36239, 6132, 18200, 341, 197, 197, 322, 7405, 264, 501, 2867, 315, 279, 943, 315, 1039, 3601, 1633, 624, 197, 3223, 1669, 8708, 7121, 133...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetSwapOrderHistory(t *testing.T) { t.Parallel() if !areTestAPIKeysSet() || !canManipulateRealOrders { t.Skip("skipping test, either api keys or manipulaterealorders isnt set correctly") } _, err := c.GetSwapOrderHistory("", "", swapTestPair, 1, 10, "", "") if err != nil { t.Error(err) } }
explode_data.jsonl/42949
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 119 }
[ 2830, 3393, 1949, 46179, 4431, 13424, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 743, 753, 546, 2271, 7082, 8850, 1649, 368, 1369, 753, 4814, 92876, 6334, 12768, 24898, 341, 197, 3244, 57776, 445, 4886, 5654, 1273, 11, 2987, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestEvalInitProvider(t *testing.T) { n := &EvalInitProvider{ Addr: addrs.ProviderConfig{Type: "foo"}, } provider := &MockProvider{} ctx := &MockEvalContext{InitProviderProvider: provider} if _, err := n.Eval(ctx); err != nil { t.Fatalf("err: %s", err) } if !ctx.InitProviderCalled { t.Fatal("should be called") } if ctx.InitProviderAddr.String() != "provider.foo" { t.Fatalf("wrong provider address %s", ctx.InitProviderAddr) } }
explode_data.jsonl/3213
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 182 }
[ 2830, 3393, 54469, 3803, 5179, 1155, 353, 8840, 836, 8, 341, 9038, 1669, 609, 54469, 3803, 5179, 515, 197, 197, 13986, 25, 912, 5428, 36208, 2648, 90, 929, 25, 330, 7975, 7115, 197, 532, 197, 19979, 1669, 609, 11571, 5179, 16094, 2098...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestVerifyToken(t *testing.T) { tokenIsValidWindow, errTokenPayload := VerifyToken(tokenTest, &testLocalSessions, nil) if !tokenIsValidWindow { t.Fail() t.Logf("token window is not valid") } if errTokenPayload != nil { t.Fail() t.Logf(errTokenPayload.Error()) } }
explode_data.jsonl/11053
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 109 }
[ 2830, 3393, 32627, 3323, 1155, 353, 8840, 836, 8, 341, 43947, 55470, 4267, 11, 1848, 3323, 29683, 1669, 25429, 3323, 13274, 2271, 11, 609, 1944, 7319, 59062, 11, 2092, 340, 743, 753, 5839, 55470, 4267, 341, 197, 3244, 57243, 741, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestApkMetadata_FileOwner(t *testing.T) { tests := []struct { metadata ApkMetadata expected []string }{ { metadata: ApkMetadata{ Files: []ApkFileRecord{ {Path: "/somewhere"}, {Path: "/else"}, }, }, expected: []string{ "/else", "/somewhere", }, }, { metadata: ApkMetadata{ Files: []ApkFileRecord{ {Path: "/somewhere"}, {Path: ""}, }, }, expected: []string{ "/somewhere", }, }, } for _, test := range tests { t.Run(strings.Join(test.expected, ","), func(t *testing.T) { var i interface{} i = test.metadata actual := i.(FileOwner).OwnedFiles() for _, d := range deep.Equal(test.expected, actual) { t.Errorf("diff: %+v", d) } }) } }
explode_data.jsonl/41180
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 364 }
[ 2830, 3393, 10611, 74, 14610, 34061, 13801, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 2109, 7603, 5232, 74, 14610, 198, 197, 42400, 3056, 917, 198, 197, 59403, 197, 197, 515, 298, 2109, 7603, 25, 5232, 74, 14610...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestHostHeaderReplacedUsingForward(t *testing.T) { var requestHost string backend := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { requestHost = r.Host w.Write([]byte("Hello, client")) })) defer backend.Close() upstream := newFakeUpstream(backend.URL, false, 30*time.Second) proxyHostHeader := "test2.com" upstream.host.UpstreamHeaders = http.Header{"Host": []string{proxyHostHeader}} // set up proxy p := &Proxy{ Next: httpserver.EmptyNext, // prevents panic in some cases when test fails Upstreams: []Upstream{upstream}, } r := httptest.NewRequest("GET", "/", nil) r.Host = "test.com" w := httptest.NewRecorder() p.ServeHTTP(w, r) if proxyHostHeader != requestHost { t.Fatalf("Expected %s as a Host header got %s\n", proxyHostHeader, requestHost) } }
explode_data.jsonl/64241
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 301 }
[ 2830, 3393, 9296, 4047, 693, 36369, 16429, 25925, 1155, 353, 8840, 836, 8, 341, 2405, 1681, 9296, 914, 198, 197, 20942, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 23555...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseVersion(t *testing.T) { testcases := []struct { version string want parsedVersion }{{ version: "1.6", want: parsedVersion{ Major: 1, Minor: 6, }, }, { version: "1.7", want: parsedVersion{ Major: 1, Minor: 7, }, }, { version: "nightly", want: parsedVersion{ IsNightly: true, }, }, { version: "nightly-20180205", want: parsedVersion{ IsNightly: true, Modifier: "-20180205", }, }, { version: "unknown", want: parsedVersion{ Modifier: "unknown", }, }, { version: "1.7-RC3", want: parsedVersion{ Major: 1, Minor: 7, Modifier: "-RC3", }, }, { version: "1.7.2-RC0", want: parsedVersion{ Major: 1, Minor: 7, Modifier: ".2-RC0", }, }, { version: "test_version", want: parsedVersion{ Modifier: "test_version", }, }} for _, testcase := range testcases { got, err := parseVersion(testcase.version) if err != nil { t.Errorf("parseVersion(%q) = %#v", testcase.version, err) } if !cmp.Equal(got, testcase.want) { t.Errorf("parseVersion(%q) = %#v, want: %#v", testcase.version, got, testcase.want) } } }
explode_data.jsonl/35510
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 540 }
[ 2830, 3393, 14463, 5637, 1155, 353, 8840, 836, 8, 341, 18185, 23910, 1669, 3056, 1235, 341, 197, 74954, 914, 198, 197, 50780, 262, 15676, 5637, 198, 197, 15170, 515, 197, 74954, 25, 330, 16, 13, 21, 756, 197, 50780, 25, 15676, 5637, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAESSIVPrimitive(t *testing.T) { km, err := registry.GetKeyManager(testutil.AESSIVTypeURL) if err != nil { t.Fatalf("cannot obtain AESSIV key manager: %s", err) } m, err := km.NewKey(nil) if err != nil { t.Errorf("km.NewKey(nil) = _, %v; want _, nil", err) } key, _ := m.(*aspb.AesSivKey) serializedKey, _ := proto.Marshal(key) p, err := km.Primitive(serializedKey) if err != nil { t.Errorf("km.Primitive(%v) = %v; want nil", serializedKey, err) } if err := validateAESSIVPrimitive(p, key); err != nil { t.Errorf("validateAESSIVPrimitive(p, key) = %v; want nil", err) } }
explode_data.jsonl/58683
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 260 }
[ 2830, 3393, 32, 9996, 3090, 33313, 1155, 353, 8840, 836, 8, 341, 197, 16017, 11, 1848, 1669, 19424, 51723, 2043, 8623, 1314, 875, 9996, 3090, 929, 3144, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 33260, 6851, 362, 9996, 30...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestDefaultLemParsRemove(t *testing.T) { keeper, ctx := keepertest.PmKeeper(t) items := createNDefaultLemPars(keeper, ctx, 10) for _, item := range items { keeper.RemoveDefaultLemPars(ctx, item.Index, ) _, found := keeper.GetDefaultLemPars(ctx, item.Index, ) require.False(t, found) } }
explode_data.jsonl/71393
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 3675, 43, 336, 47, 1561, 13021, 1155, 353, 8840, 836, 8, 341, 197, 18861, 11, 5635, 1669, 2506, 83386, 1069, 76, 77233, 1155, 340, 46413, 1669, 1855, 45, 3675, 43, 336, 47, 1561, 7, 18861, 11, 5635, 11, 220, 16, 15, 34...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2