text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestNewSecureGRPCServer(t *testing.T) { t.Parallel() testAddress := "localhost:9055" srv, err := comm.NewGRPCServer(testAddress, comm.ServerConfig{ ConnectionTimeout: 250 * time.Millisecond, SecOpts: &comm.SecureOptions{ UseTLS: true, Certificate: []byte(selfSignedCertPEM), Key: []byte(selfSignedKeyPEM)}}) //check for error if err != nil { t.Fatalf("Failed to return new GRPC server: %v", err) } //make sure our properties are as expected //resolve the address addr, err := net.ResolveTCPAddr("tcp", testAddress) assert.Equal(t, srv.Address(), addr.String()) assert.Equal(t, srv.Listener().Addr().String(), addr.String()) //check the server certificate cert, _ := tls.X509KeyPair([]byte(selfSignedCertPEM), []byte(selfSignedKeyPEM)) assert.Equal(t, srv.ServerCertificate(), cert) //TLSEnabled should be true assert.Equal(t, srv.TLSEnabled(), true) //MutualTLSRequired should be false assert.Equal(t, srv.MutualTLSRequired(), false) //register the GRPC test server testpb.RegisterTestServiceServer(srv.Server(), &testServiceServer{}) //start the server go srv.Start() defer srv.Stop() //should not be needed time.Sleep(10 * time.Millisecond) //create the client credentials certPool := x509.NewCertPool() if !certPool.AppendCertsFromPEM([]byte(selfSignedCertPEM)) { t.Fatal("Failed to append certificate to client credentials") } creds := credentials.NewClientTLSFromCert(certPool, "") //GRPC client options var dialOptions []grpc.DialOption dialOptions = append(dialOptions, grpc.WithTransportCredentials(creds)) //invoke the EmptyCall service _, err = invokeEmptyCall(testAddress, dialOptions) if err != nil { t.Fatalf("GRPC client failed to invoke the EmptyCall service on %s: %v", testAddress, err) } else { t.Log("GRPC client successfully invoked the EmptyCall service: " + testAddress) } tlsVersions := []string{"SSL30", "TLS10", "TLS11"} for counter, tlsVersion := range []uint16{tls.VersionSSL30, tls.VersionTLS10, tls.VersionTLS11} { tlsVersion := tlsVersion t.Run(tlsVersions[counter], func(t *testing.T) { t.Parallel() _, err := invokeEmptyCall(testAddress, []grpc.DialOption{grpc.WithTransportCredentials( credentials.NewTLS(&tls.Config{ RootCAs: certPool, MinVersion: tlsVersion, MaxVersion: tlsVersion, })), grpc.WithBlock()}) t.Logf("TLSVersion [%d] failed with [%s]", tlsVersion, err) assert.Error(t, err, "Should not have been able to connect with TLS version < 1.2") assert.Contains(t, err.Error(), "context deadline exceeded") }) } }
explode_data.jsonl/38622
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 980 }
[ 2830, 3393, 3564, 49813, 8626, 4872, 5475, 1155, 353, 8840, 836, 8, 1476, 3244, 41288, 7957, 741, 18185, 4286, 1669, 330, 8301, 25, 24, 15, 20, 20, 698, 1903, 10553, 11, 1848, 1669, 1063, 7121, 8626, 4872, 5475, 8623, 4286, 11, 1063, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestScoreClusterWithAddOn(t *testing.T) { cases := []struct { name string placement *clusterapivbeta1.Placement clusters []*clusterapiv1.ManagedCluster existingAddOnScores []runtime.Object expectedScores map[string]int64 }{ { name: "no addon scores", placement: testinghelpers.NewPlacement("test", "test").WithScoreCoordinateAddOn("test", "score1", 1).Build(), clusters: []*clusterapiv1.ManagedCluster{ testinghelpers.NewManagedCluster("cluster1").Build(), testinghelpers.NewManagedCluster("cluster2").Build(), testinghelpers.NewManagedCluster("cluster3").Build(), }, existingAddOnScores: []runtime.Object{}, expectedScores: map[string]int64{"cluster1": 0, "cluster2": 0, "cluster3": 0}, }, { name: "part of addon scores generated", placement: testinghelpers.NewPlacement("test", "test").WithScoreCoordinateAddOn("test", "score1", 1).Build(), clusters: []*clusterapiv1.ManagedCluster{ testinghelpers.NewManagedCluster("cluster1").Build(), testinghelpers.NewManagedCluster("cluster2").Build(), testinghelpers.NewManagedCluster("cluster3").Build(), }, existingAddOnScores: []runtime.Object{ testinghelpers.NewAddOnPlacementScore("cluster1", "test").WithScore("score1", 30).Build(), }, expectedScores: map[string]int64{"cluster1": 30, "cluster2": 0, "cluster3": 0}, }, { name: "part of addon scores expired", placement: testinghelpers.NewPlacement("test", "test").WithScoreCoordinateAddOn("test", "score1", 1).Build(), clusters: []*clusterapiv1.ManagedCluster{ testinghelpers.NewManagedCluster("cluster1").Build(), testinghelpers.NewManagedCluster("cluster2").Build(), testinghelpers.NewManagedCluster("cluster3").Build(), }, existingAddOnScores: []runtime.Object{ testinghelpers.NewAddOnPlacementScore("cluster1", "test").WithScore("score1", 30).WithValidUntil(expiredTime).Build(), testinghelpers.NewAddOnPlacementScore("cluster2", "test").WithScore("score1", 40).Build(), testinghelpers.NewAddOnPlacementScore("cluster3", "test").WithScore("score1", 50).Build(), }, expectedScores: map[string]int64{"cluster1": 0, "cluster2": 40, "cluster3": 50}, }, { name: "all the addon scores generated", placement: testinghelpers.NewPlacement("test", "test").WithScoreCoordinateAddOn("test", "score1", 1).Build(), clusters: []*clusterapiv1.ManagedCluster{ testinghelpers.NewManagedCluster("cluster1").Build(), testinghelpers.NewManagedCluster("cluster2").Build(), testinghelpers.NewManagedCluster("cluster3").Build(), }, existingAddOnScores: []runtime.Object{ testinghelpers.NewAddOnPlacementScore("cluster1", "test").WithScore("score1", 30).Build(), testinghelpers.NewAddOnPlacementScore("cluster2", "test").WithScore("score1", 40).Build(), testinghelpers.NewAddOnPlacementScore("cluster3", "test").WithScore("score1", 50).Build(), }, expectedScores: map[string]int64{"cluster1": 30, "cluster2": 40, "cluster3": 50}, }, } AddOnClock = testingclock.NewFakeClock(fakeTime) for _, c := range cases { t.Run(c.name, func(t *testing.T) { addon := &AddOn{ handle: testinghelpers.NewFakePluginHandle(t, nil, c.existingAddOnScores...), prioritizerName: "AddOn/test/score1", resourceName: "test", scoreName: "score1", } scoreResult := addon.Score(context.TODO(), c.placement, c.clusters) scores := scoreResult.Scores err := scoreResult.Err if err != nil { t.Errorf("Expect no error, but got %v", err) } if !apiequality.Semantic.DeepEqual(scores, c.expectedScores) { t.Errorf("Expect score %v, but got %v", c.expectedScores, scores) } }) } }
explode_data.jsonl/73327
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1500 }
[ 2830, 3393, 10570, 28678, 2354, 2212, 1925, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 394, 914, 198, 197, 197, 16101, 1843, 353, 18855, 391, 344, 19127, 16, 21368, 6346, 198, 197, 39407, 14605, 310, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMainFatal(t *testing.T) { assert := assert.New(t) tmpdir, err := ioutil.TempDir(testDir, "") assert.NoError(err) defer os.RemoveAll(tmpdir) var exitStatus int savedExitFunc := exitFunc exitFunc = func(status int) { exitStatus = status } savedErrorFile := defaultErrorFile output := filepath.Join(tmpdir, "output") f, err := os.OpenFile(output, os.O_CREATE|os.O_WRONLY|os.O_SYNC, testFileMode) assert.NoError(err) defaultErrorFile = f defer func() { f.Close() defaultErrorFile = savedErrorFile exitFunc = savedExitFunc }() exitError := errors.New("hello world") fatal(exitError) assert.Equal(exitStatus, 1) text, err := katautils.GetFileContents(output) assert.NoError(err) trimmed := strings.TrimSpace(text) assert.Equal(exitError.Error(), trimmed) }
explode_data.jsonl/52197
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 303 }
[ 2830, 3393, 6202, 62396, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 20082, 3741, 11, 1848, 1669, 43144, 65009, 6184, 8623, 6184, 11, 14676, 6948, 35699, 3964, 340, 16867, 2643, 84427, 10368, 3741, 692, 2405, 4869, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDualDelegatingProvider_Read(t *testing.T) { testCases := map[string]struct { manifests map[string]string numObjs int invKind inventory.InventoryInfo isError bool }{ "Basic ResourceGroup inventory object created": { manifests: map[string]string{ "Kptfile": kptFile, "pod-a.yaml": podA, }, numObjs: 2, invKind: &InventoryResourceGroup{}, isError: false, }, "Only ResourceGroup inventory object created": { manifests: map[string]string{ "Kptfile": kptFile, }, numObjs: 1, invKind: &InventoryResourceGroup{}, isError: false, }, "ResourceGroup inventory object with multiple objects": { manifests: map[string]string{ "pod-a.yaml": podA, "Kptfile": kptFile, "deployment-a.yaml": deploymentA, }, numObjs: 3, invKind: &InventoryResourceGroup{}, isError: false, }, "Basic ConfigMap inventory object created": { manifests: map[string]string{ "inventory-template.yaml": configMapInv, "deployment-a.yaml": deploymentA, }, numObjs: 2, invKind: &inventory.InventoryConfigMap{}, isError: false, }, "Only ConfigMap inventory object created": { manifests: map[string]string{ "inventory-template.yaml": configMapInv, }, numObjs: 1, invKind: &inventory.InventoryConfigMap{}, isError: false, }, "ConfigMap inventory object with multiple objects": { manifests: map[string]string{ "deployment-a.yaml": deploymentA, "inventory-template.yaml": configMapInv, "pod-a.yaml": podA, }, numObjs: 3, invKind: &inventory.InventoryConfigMap{}, isError: false, }, "No inventory manifests is an error": { manifests: map[string]string{ "pod-a.yaml": podA, "deployment-a.yaml": deploymentA, }, numObjs: 2, isError: true, }, "Multiple manifests is an error": { manifests: map[string]string{ "inventory-template.yaml": configMapInv, "Kptfile": kptFile, "pod-a.yaml": podA, }, numObjs: 3, isError: true, }, } for tn, tc := range testCases { t.Run(tn, func(t *testing.T) { // Create the fake factory tf := cmdtesting.NewTestFactory().WithNamespace("test-namespace") defer tf.Cleanup() // Set up the yaml manifests (including Kptfile) in temp dir. dir, err := ioutil.TempDir("", "provider-test") assert.NoError(t, err) for filename, content := range tc.manifests { p := filepath.Join(dir, filename) err := ioutil.WriteFile(p, []byte(content), 0600) assert.NoError(t, err) } // Read objects using provider ManifestReader. loader := NewDualDelegatingManifestReader(tf) mr, err := loader.ManifestReader(nil, []string{dir}) if err != nil { t.Fatalf("unexpected error %v", err) } objs, err := mr.Read() assert.NoError(t, err) if tc.numObjs != len(objs) { t.Errorf("expected to read (%d) objs, got (%d)", tc.numObjs, len(objs)) } inv, _, err := loader.InventoryInfo(objs) if tc.isError { if err == nil { t.Errorf("expected error on ManifestReader, but received none.") } return } if inv == nil { t.Errorf("inventory object not found") } if reflect.TypeOf(tc.invKind) != reflect.TypeOf(inv) { t.Errorf("expected inventory kind (%s), got (%s)", reflect.TypeOf(tc.invKind), reflect.TypeOf(inv)) } }) } }
explode_data.jsonl/21338
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1505 }
[ 2830, 3393, 85074, 1912, 1937, 1095, 5179, 38381, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 2415, 14032, 60, 1235, 341, 197, 197, 42315, 82, 2415, 14032, 30953, 198, 197, 22431, 4121, 2519, 256, 526, 198, 197, 197, 14057, 10629,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestDescribeLoadBalancerOnUpdate(t *testing.T) { awsServices := NewFakeAWSServices() c, _ := newAWSCloud(strings.NewReader("[global]"), awsServices) awsServices.elb.expectDescribeLoadBalancers("aid") c.UpdateLoadBalancer(TestClusterName, &v1.Service{ObjectMeta: metav1.ObjectMeta{Name: "myservice", UID: "id"}}, []*v1.Node{}) }
explode_data.jsonl/12858
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 124 }
[ 2830, 3393, 74785, 5879, 93825, 1925, 4289, 1155, 353, 8840, 836, 8, 341, 197, 8635, 11025, 1669, 1532, 52317, 14419, 1220, 2161, 741, 1444, 11, 716, 1669, 501, 14419, 3540, 52178, 51442, 68587, 10937, 9752, 60, 3975, 31521, 11025, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSuccessfulInCacheAncestryByID(t *testing.T) { jsonResponse := `{ "bloodline_id": 7, "description": "The Gallente prize political activism more so than other Empires. Many devote their efforts towards one or more causes that suit their ambitions. Activists understand that things will never change for the better unless someone has the courage to fight the good fight.", "icon_id": 1653, "id": 13, "name": "Activists", "short_description": "Making the universe a better place, one fight at a time." }` b := []byte(jsonResponse) mockCachingClient := &MockCachingClient{ MockAdd: func(ctx context.Context, key string, value []byte, ttl int64) { //Method returns nothing so needs no implementation }, MockCheck: func(ctx context.Context, key string) (bool, []byte) { return true, b }, } CachingClient = mockCachingClient var testID = 1 resp, err := AncestryByID(context.Background(), &testID) if err != nil { t.Errorf(helpers.ErrorWasNotNil, err) } var responseName = "Activists" if *resp.Name != responseName { t.Errorf(helpers.ResponseWasNotAsExpected) } }
explode_data.jsonl/58674
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 364 }
[ 2830, 3393, 36374, 641, 8233, 2082, 346, 4617, 60572, 1155, 353, 8840, 836, 8, 341, 30847, 2582, 1669, 1565, 515, 197, 197, 1, 50005, 1056, 842, 788, 220, 22, 345, 197, 197, 67993, 788, 330, 785, 24877, 6817, 21882, 4948, 54180, 803, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSessionCacheGetSTS(t *testing.T) { testGetAWSClient( t, "STS", func(t *testing.T, cache *sessionCache, region *string, role Role) { iface := cache.GetSTS(role) if iface == nil { t.Fail() return } }) }
explode_data.jsonl/18774
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 107 }
[ 2830, 3393, 5283, 8233, 1949, 80725, 1155, 353, 8840, 836, 8, 341, 18185, 1949, 36136, 2959, 1006, 197, 3244, 11, 330, 80725, 756, 197, 29244, 1155, 353, 8840, 836, 11, 6500, 353, 5920, 8233, 11, 5537, 353, 917, 11, 3476, 15404, 8, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEngineOptsValidation(t *testing.T) { cases := []struct { opts EngineOpts query string fail bool expError error }{ { opts: EngineOpts{EnableAtModifier: false}, query: "metric @ 100", fail: true, expError: ErrValidationAtModifierDisabled, }, { opts: EngineOpts{EnableAtModifier: false}, query: "rate(metric[1m] @ 100)", fail: true, expError: ErrValidationAtModifierDisabled, }, { opts: EngineOpts{EnableAtModifier: false}, query: "rate(metric[1h:1m] @ 100)", fail: true, expError: ErrValidationAtModifierDisabled, }, { opts: EngineOpts{EnableAtModifier: false}, query: "metric @ start()", fail: true, expError: ErrValidationAtModifierDisabled, }, { opts: EngineOpts{EnableAtModifier: false}, query: "rate(metric[1m] @ start())", fail: true, expError: ErrValidationAtModifierDisabled, }, { opts: EngineOpts{EnableAtModifier: false}, query: "rate(metric[1h:1m] @ start())", fail: true, expError: ErrValidationAtModifierDisabled, }, { opts: EngineOpts{EnableAtModifier: false}, query: "metric @ end()", fail: true, expError: ErrValidationAtModifierDisabled, }, { opts: EngineOpts{EnableAtModifier: false}, query: "rate(metric[1m] @ end())", fail: true, expError: ErrValidationAtModifierDisabled, }, { opts: EngineOpts{EnableAtModifier: false}, query: "rate(metric[1h:1m] @ end())", fail: true, expError: ErrValidationAtModifierDisabled, }, { opts: EngineOpts{EnableAtModifier: true}, query: "metric @ 100", }, { opts: EngineOpts{EnableAtModifier: true}, query: "rate(metric[1m] @ start())", }, { opts: EngineOpts{EnableAtModifier: true}, query: "rate(metric[1h:1m] @ end())", }, { opts: EngineOpts{EnableNegativeOffset: false}, query: "metric offset -1s", fail: true, expError: ErrValidationNegativeOffsetDisabled, }, { opts: EngineOpts{EnableNegativeOffset: true}, query: "metric offset -1s", }, { opts: EngineOpts{EnableAtModifier: true, EnableNegativeOffset: true}, query: "metric @ 100 offset -2m", }, { opts: EngineOpts{EnableAtModifier: true, EnableNegativeOffset: true}, query: "metric offset -2m @ 100", }, } for _, c := range cases { eng := NewEngine(c.opts) _, err1 := eng.NewInstantQuery(nil, c.query, time.Unix(10, 0)) _, err2 := eng.NewRangeQuery(nil, c.query, time.Unix(0, 0), time.Unix(10, 0), time.Second) if c.fail { require.Equal(t, c.expError, err1) require.Equal(t, c.expError, err2) } else { require.Nil(t, err1) require.Nil(t, err2) } } }
explode_data.jsonl/35565
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1076 }
[ 2830, 3393, 4571, 43451, 13799, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 64734, 257, 8200, 43451, 198, 197, 27274, 262, 914, 198, 197, 63052, 257, 1807, 198, 197, 48558, 1454, 1465, 198, 197, 59403, 197, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFindSockets(t *testing.T) { tmpdir, err := ioutil.TempDir("", "socktest") assert.NoError(t, err) defer func() { err := os.Remove(tmpdir) assert.NoError(t, err) }() c := &Ceph{ CephBinary: "foo", OsdPrefix: "ceph-osd", MonPrefix: "ceph-mon", SocketDir: tmpdir, SocketSuffix: "asok", CephUser: "client.admin", CephConfig: "/etc/ceph/ceph.conf", GatherAdminSocketStats: true, GatherClusterStats: false, } for _, st := range sockTestParams { createTestFiles(tmpdir, st) sockets, err := findSockets(c) assert.NoError(t, err) for i := 1; i <= st.osds; i++ { assertFoundSocket(t, tmpdir, typeOsd, i, sockets) } for i := 1; i <= st.mons; i++ { assertFoundSocket(t, tmpdir, typeMon, i, sockets) } cleanupTestFiles(tmpdir, st) } }
explode_data.jsonl/25191
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 446 }
[ 2830, 3393, 9885, 50, 19601, 1155, 353, 8840, 836, 8, 341, 20082, 3741, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 13199, 1944, 1138, 6948, 35699, 1155, 11, 1848, 340, 16867, 2915, 368, 341, 197, 9859, 1669, 2643, 13270, 10368, 374...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTypeSystem_ObjectsMustAdhereToInterfaceTheyImplement_RejectsAnObjectWhichImplementsAnInterfaceFieldAlongWithAdditionalRequiredArguments(t *testing.T) { anotherInterface := graphql.NewInterface(graphql.InterfaceConfig{ Name: "AnotherInterface", ResolveType: func(p graphql.ResolveTypeParams) *graphql.Object { return nil }, Fields: graphql.Fields{ "field": &graphql.Field{ Type: graphql.String, Args: graphql.FieldConfigArgument{ "input": &graphql.ArgumentConfig{ Type: graphql.String, }, }, }, }, }) anotherObject := graphql.NewObject(graphql.ObjectConfig{ Name: "AnotherObject", Interfaces: []*graphql.Interface{anotherInterface}, Fields: graphql.Fields{ "field": &graphql.Field{ Type: graphql.String, Args: graphql.FieldConfigArgument{ "input": &graphql.ArgumentConfig{ Type: graphql.String, }, "anotherInput": &graphql.ArgumentConfig{ Type: graphql.NewNonNull(graphql.String), }, }, }, }, }) _, err := schemaWithObjectFieldOfType(anotherObject) expectedError := `AnotherObject.field(anotherInput:) is of required type "String!" but is not also provided by the interface AnotherInterface.field.` if err == nil || err.Error() != expectedError { t.Fatalf("Expected error: %v, got %v", expectedError, err) } }
explode_data.jsonl/79195
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 509 }
[ 2830, 3393, 929, 2320, 62, 11543, 31776, 2589, 6739, 1249, 5051, 6865, 62980, 50693, 583, 82, 2082, 1190, 23085, 1427, 4674, 2082, 5051, 1877, 49809, 2354, 29019, 8164, 19139, 1155, 353, 8840, 836, 8, 341, 197, 41963, 5051, 1669, 48865, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCommitNoTransaction(t *testing.T) { t.Parallel() db := &DB{} if err := db.Commit(context.Background()); err.Error() != "context has no transaction" { t.Errorf("unexpected error value: %v", err) } }
explode_data.jsonl/25445
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 78 }
[ 2830, 3393, 33441, 2753, 8070, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 20939, 1669, 609, 3506, 16094, 743, 1848, 1669, 2927, 53036, 5378, 19047, 13426, 1848, 6141, 368, 961, 330, 2147, 702, 902, 7745, 1, 341, 197, 3244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestServicecheckAnswerBlock(t *testing.T) { convey.Convey("checkAnswerBlock", t, func() { block := s.checkAnswerBlock(context.Background(), 0) convey.So(block, convey.ShouldNotBeNil) }) }
explode_data.jsonl/21132
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 71 }
[ 2830, 3393, 1860, 2028, 16141, 4713, 1155, 353, 8840, 836, 8, 341, 37203, 5617, 4801, 5617, 445, 2028, 16141, 4713, 497, 259, 11, 2915, 368, 341, 197, 47996, 1669, 274, 9093, 16141, 4713, 5378, 19047, 1507, 220, 15, 340, 197, 37203, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestRouteGroupInsert(t *testing.T) { y := RouteGroup("") m := new(MockMiddleware) y.Insert(m) if len(y.middleware) != 1 { t.Fatalf("Added 1 middleware, found %d in the list.", len(y.routes)) } if y.middleware[0] != m { t.Fatal("Added a middleware. Stored one seems to be different") } }
explode_data.jsonl/35808
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 123 }
[ 2830, 3393, 4899, 2808, 13780, 1155, 353, 8840, 836, 8, 341, 14522, 1669, 9572, 2808, 31764, 2109, 1669, 501, 66436, 24684, 692, 14522, 23142, 1255, 692, 743, 2422, 7021, 43884, 8, 961, 220, 16, 341, 197, 3244, 30762, 445, 19337, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFilterByRepo(t *testing.T) { apps := []argoappv1.Application{ { Spec: argoappv1.ApplicationSpec{ Source: argoappv1.ApplicationSource{ RepoURL: "git@github.com:owner/repo.git", }, }, }, { Spec: argoappv1.ApplicationSpec{ Source: argoappv1.ApplicationSource{ RepoURL: "git@github.com:owner/otherrepo.git", }, }, }, } t.Run("Empty filter", func(t *testing.T) { res := FilterByRepo(apps, "") assert.Len(t, res, 2) }) t.Run("Match", func(t *testing.T) { res := FilterByRepo(apps, "git@github.com:owner/repo.git") assert.Len(t, res, 1) }) t.Run("No match", func(t *testing.T) { res := FilterByRepo(apps, "git@github.com:owner/willnotmatch.git") assert.Len(t, res, 0) }) }
explode_data.jsonl/49995
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 348 }
[ 2830, 3393, 5632, 1359, 25243, 1155, 353, 8840, 836, 8, 341, 197, 27635, 1669, 3056, 12088, 676, 85, 16, 17521, 515, 197, 197, 515, 298, 7568, 992, 25, 1392, 78, 676, 85, 16, 17521, 8327, 515, 571, 197, 3608, 25, 1392, 78, 676, 85...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHeadTracker_ReconnectOnError(t *testing.T) { t.Parallel() g := gomega.NewGomegaWithT(t) store, cleanup := cltest.NewStore(t) defer cleanup() logger := store.Config.CreateProductionLogger() ethClient := new(mocks.Client) sub := new(mocks.Subscription) ethClient.On("ChainID", mock.Anything).Maybe().Return(store.Config.ChainID(), nil) ethClient.On("SubscribeNewHead", mock.Anything, mock.Anything).Return(sub, nil) ethClient.On("SubscribeNewHead", mock.Anything, mock.Anything).Return(nil, errors.New("cannot reconnect")) ethClient.On("SubscribeNewHead", mock.Anything, mock.Anything).Return(sub, nil) chErr := make(chan error) sub.On("Unsubscribe").Return() sub.On("Err").Return((<-chan error)(chErr)) store.EthClient = ethClient checker := &cltest.MockHeadTrackable{} ht := services.NewHeadTracker(logger, store, []strpkg.HeadTrackable{checker}, cltest.NeverSleeper{}) // connect assert.Nil(t, ht.Start()) g.Eventually(func() int32 { return checker.ConnectedCount() }).Should(gomega.Equal(int32(1))) assert.Equal(t, int32(0), checker.DisconnectedCount()) assert.Equal(t, int32(0), checker.OnNewLongestChainCount()) // trigger reconnect loop chErr <- errors.New("Test error to force reconnect") g.Eventually(func() int32 { return checker.ConnectedCount() }).Should(gomega.Equal(int32(2))) g.Consistently(func() int32 { return checker.ConnectedCount() }).Should(gomega.Equal(int32(2))) assert.Equal(t, int32(1), checker.DisconnectedCount()) assert.Equal(t, int32(0), checker.OnNewLongestChainCount()) // stop assert.NoError(t, ht.Stop()) }
explode_data.jsonl/9096
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 568 }
[ 2830, 3393, 12346, 31133, 50693, 6459, 74945, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 3174, 1669, 342, 32696, 7121, 38, 32696, 2354, 51, 1155, 692, 57279, 11, 21290, 1669, 1185, 1944, 7121, 6093, 1155, 340, 16867, 21290, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetNodeIPs(t *testing.T) { testCases := []struct { isIPv6 bool devAddresses map[string][]string expectIPs []string }{ // case 0 { devAddresses: map[string][]string{"eth0": {"1.2.3.4"}, "lo": {"127.0.0.1"}}, expectIPs: []string{"1.2.3.4"}, }, // case 1 { devAddresses: map[string][]string{"lo": {"127.0.0.1"}}, expectIPs: []string{}, }, // case 2 { devAddresses: map[string][]string{}, expectIPs: []string{}, }, // case 3 { devAddresses: map[string][]string{"encap0": {"10.20.30.40", "fe80::200:ff:fe01:1"}, "lo": {"127.0.0.1", "::1"}, "docker0": {"172.17.0.1"}}, expectIPs: []string{"10.20.30.40", "172.17.0.1"}, }, // case 4 { devAddresses: map[string][]string{"encaps9": {"10.20.30.40"}, "lo": {"127.0.0.1", "::1"}, "encap7": {"1000::", "10.20.30.31"}}, expectIPs: []string{"10.20.30.40", "10.20.30.31"}, }, // case 5 { devAddresses: map[string][]string{"kube-ipvs0": {"2000::", "1.2.3.4"}, "lo": {"127.0.0.1", "::1"}, "encap7": {"1000::", "10.20.30.31"}}, expectIPs: []string{"10.20.30.31"}, }, // case 6 { devAddresses: map[string][]string{"kube-ipvs0": {"1.2.3.4", "2.3.4.5"}, "lo": {"127.0.0.1", "::1"}}, expectIPs: []string{}, }, // case 7 { devAddresses: map[string][]string{"kube-ipvs0": {"1.2.3.4", "2.3.4.5"}}, expectIPs: []string{}, }, // case 8 { devAddresses: map[string][]string{"kube-ipvs0": {"1.2.3.4", "2.3.4.5"}, "eth5": {"3.4.5.6"}, "lo": {"127.0.0.1", "::1"}}, expectIPs: []string{"3.4.5.6"}, }, // case 9 { devAddresses: map[string][]string{"ipvs0": {"1.2.3.4"}, "lo": {"127.0.0.1", "::1"}, "encap7": {"10.20.30.31"}}, expectIPs: []string{"10.20.30.31", "1.2.3.4"}, }, // case 10 { isIPv6: true, devAddresses: map[string][]string{"ipvs0": {"1.2.3.4", "1000::"}, "lo": {"127.0.0.1", "::1"}, "encap7": {"10.20.30.31", "2000::", "fe80::200:ff:fe01:1"}}, expectIPs: []string{"1000::", "2000::"}, }, // case 11 { isIPv6: true, devAddresses: map[string][]string{"ipvs0": {"1.2.3.4", "1000::"}, "lo": {"127.0.0.1", "::1"}, "encap7": {"10.20.30.31", "2000::", "fe80::200:ff:fe01:1"}, "kube-ipvs0": {"1.2.3.4", "2.3.4.5", "2000::"}}, expectIPs: []string{"1000::"}, }, } for i := range testCases { fake := netlinktest.NewFakeNetlinkHandle() fake.IsIPv6 = testCases[i].isIPv6 for dev, addresses := range testCases[i].devAddresses { fake.SetLocalAddresses(dev, addresses...) } r := realIPGetter{nl: fake} ips, err := r.NodeIPs() if err != nil { t.Errorf("Unexpected error: %v", err) } ipStrs := sets.NewString() for _, ip := range ips { ipStrs.Insert(ip.String()) } if !ipStrs.Equal(sets.NewString(testCases[i].expectIPs...)) { t.Errorf("case[%d], unexpected mismatch, expected: %v, got: %v", i, testCases[i].expectIPs, ips) } } }
explode_data.jsonl/44348
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1506 }
[ 2830, 3393, 1949, 1955, 3298, 82, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 19907, 58056, 21, 981, 1807, 198, 197, 27302, 52290, 2415, 14032, 45725, 917, 198, 197, 24952, 3298, 82, 262, 3056, 917, 198, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestHandleRealmsUpdate(t *testing.T) { t.Parallel() ctx := project.TestContext(t) harness := envstest.NewServerConfig(t, testDatabaseInstance) c := admin.New(harness.Config, harness.Cacher, harness.Database, harness.AuthProvider, harness.RateLimiter, harness.Renderer) handler := harness.WithCommonMiddlewares(c.HandleRealmsUpdate()) t.Run("middleware", func(t *testing.T) { t.Parallel() envstest.ExerciseSessionMissing(t, handler) envstest.ExerciseUserMissing(t, handler) }) t.Run("failure", func(t *testing.T) { t.Parallel() c := admin.New(harness.Config, harness.Cacher, harness.BadDatabase, harness.AuthProvider, harness.RateLimiter, harness.Renderer) handler := harness.WithCommonMiddlewares(c.HandleRealmsUpdate()) ctx := ctx ctx = controller.WithSession(ctx, &sessions.Session{}) ctx = controller.WithUser(ctx, &database.User{}) w, r := envstest.BuildFormRequest(ctx, t, http.MethodPost, "/", nil) r = mux.SetURLVars(r, map[string]string{"id": "1"}) handler.ServeHTTP(w, r) if got, want := w.Code, http.StatusInternalServerError; got != want { t.Errorf("Expected %d to be %d", got, want) } }) t.Run("renders", func(t *testing.T) { t.Parallel() ctx := ctx ctx = controller.WithSession(ctx, &sessions.Session{}) ctx = controller.WithUser(ctx, &database.User{}) w, r := envstest.BuildFormRequest(ctx, t, http.MethodGet, "/", nil) r = mux.SetURLVars(r, map[string]string{"id": "1"}) handler.ServeHTTP(w, r) if got, want := w.Code, http.StatusOK; got != want { t.Errorf("Expected %d to be %d", got, want) } }) t.Run("updates", func(t *testing.T) { t.Parallel() ctx := ctx ctx = controller.WithSession(ctx, &sessions.Session{}) ctx = controller.WithUser(ctx, &database.User{}) w, r := envstest.BuildFormRequest(ctx, t, http.MethodPost, "/", &url.Values{ "can_use_system_sms_config": []string{"1"}, "can_use_system_email_config": []string{"1"}, "short_code_max_minutes": []string{"60"}, }) r = mux.SetURLVars(r, map[string]string{"id": "1"}) handler.ServeHTTP(w, r) if got, want := w.Code, http.StatusSeeOther; got != want { t.Errorf("Expected %d to be %d", got, want) } if got, want := w.Header().Get("Location"), "/admin/realms/1/edit"; got != want { t.Errorf("Expected %q to be %q", got, want) } }) }
explode_data.jsonl/53831
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 930 }
[ 2830, 3393, 6999, 12768, 1011, 4289, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 20985, 1669, 2390, 8787, 1972, 1155, 340, 9598, 23518, 1669, 6105, 267, 477, 7121, 5475, 2648, 1155, 11, 1273, 5988, 2523, 692, 1444, 1669, 3986...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestVersion(t *testing.T) { t.Run("VERSION not specified", func(t *testing.T) { expected := fmt.Sprintf( "\n\tNot specify ldflags (which link version) during go build\n\tgo version: %s %s/%s", runtime.Version(), runtime.GOOS, runtime.GOARCH) assert.Equal(t, expected, GetVersion()) }) t.Run("VERSION constly specified", func(t *testing.T) { VERSION = "Release-v3.100.200" BUILD_TIME = "2020-09-11T07:05:04Z" COMMIT_HASH = "fb2481c2" COMMIT_TIME = "2020-09-11T07:00:29Z" expected := fmt.Sprintf( "\n\tfree5GC version: %s"+ "\n\tbuild time: %s"+ "\n\tcommit hash: %s"+ "\n\tcommit time: %s"+ "\n\tgo version: %s %s/%s", VERSION, BUILD_TIME, COMMIT_HASH, COMMIT_TIME, runtime.Version(), runtime.GOOS, runtime.GOARCH) assert.Equal(t, expected, GetVersion()) fmt.Println(VERSION) }) t.Run("VERSION capture by system", func(t *testing.T) { var stdout []byte var err error VERSION = "Release-v3.100.200" // VERSION using free5gc's version (git tag), we static set it here stdout, err = exec.Command("bash", "-c", "date -u +\"%Y-%m-%dT%H:%M:%SZ\"").Output() if err != nil { t.Errorf("err: %+v\n", err) } BUILD_TIME = strings.TrimSuffix(string(stdout), "\n") stdout, err = exec.Command("bash", "-c", "git log --pretty=\"%H\" -1 | cut -c1-8").Output() if err != nil { t.Errorf("err: %+v\n", err) } COMMIT_HASH = strings.TrimSuffix(string(stdout), "\n") stdout, err = exec.Command("bash", "-c", "git log --pretty=\"%ai\" -1 | awk '{time=$1\"T\"$2\"Z\"; print time}'").Output() if err != nil { t.Errorf("err: %+v\n", err) } fmt.Println("Insert Data") COMMIT_TIME = strings.TrimSuffix(string(stdout), "\n") expected := fmt.Sprintf( "\n\tfree5GC version: %s"+ "\n\tbuild time: %s"+ "\n\tcommit hash: %s"+ "\n\tcommit time: %s"+ "\n\tgo version: %s %s/%s", VERSION, BUILD_TIME, COMMIT_HASH, COMMIT_TIME, runtime.Version(), runtime.GOOS, runtime.GOARCH) assert.Equal(t, expected, GetVersion()) fmt.Println(VERSION) }) }
explode_data.jsonl/11607
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1016 }
[ 2830, 3393, 5637, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 17636, 537, 5189, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 42400, 1669, 8879, 17305, 1006, 298, 197, 11934, 77, 4955, 2623, 13837, 26129, 11161, 320, 8206, 2656, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMakeSprintUpdateStartDate(t *testing.T) { assert := assert.New(t) evt := sdk.AgileSprintUpdateMutation{} ts, err := time.Parse("2006-01-02", "2020-09-22") assert.NoError(err) date := sdk.NewDateWithTime(ts) evt.Set.StartDate = date update, updated, err := makeSprintUpdate("5", &evt) assert.NoError(err) assert.True(updated) assert.Equal("{\"id\":5,\"startDate\":\"2020-09-22T00:00:00+00:00\"}", sdk.Stringify(update)) }
explode_data.jsonl/3685
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 183 }
[ 2830, 3393, 8078, 50, 1350, 4289, 44530, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 197, 28734, 1669, 45402, 49850, 457, 50, 1350, 4289, 53998, 16094, 57441, 11, 1848, 1669, 882, 8937, 445, 17, 15, 15, 21, 12, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAlertmanager(t *testing.T) { s, err := e2e.NewScenario(networkName) require.NoError(t, err) defer s.Close() require.NoError(t, writeFileToSharedDir(s, "alertmanager_configs/user-1.yaml", []byte(cortexAlertmanagerUserConfigYaml))) alertmanager := e2ecortex.NewAlertmanager( "alertmanager", mergeFlags( AlertmanagerFlags(), AlertmanagerLocalFlags(), ), "", ) require.NoError(t, s.StartAndWaitReady(alertmanager)) require.NoError(t, alertmanager.WaitSumMetrics(e2e.Equals(1), "cortex_alertmanager_config_last_reload_successful")) require.NoError(t, alertmanager.WaitSumMetrics(e2e.Greater(0), "cortex_alertmanager_config_hash")) c, err := e2ecortex.NewClient("", "", alertmanager.HTTPEndpoint(), "", "user-1") require.NoError(t, err) cfg, err := c.GetAlertmanagerConfig(context.Background()) require.NoError(t, err) // Ensure the returned status config matches alertmanager_test_fixtures/user-1.yaml require.NotNil(t, cfg) require.Equal(t, "example_receiver", cfg.Route.Receiver) require.Len(t, cfg.Route.GroupByStr, 1) require.Equal(t, "example_groupby", cfg.Route.GroupByStr[0]) require.Len(t, cfg.Receivers, 1) require.Equal(t, "example_receiver", cfg.Receivers[0].Name) // Ensure no service-specific metrics prefix is used by the wrong service. assertServiceMetricsPrefixes(t, AlertManager, alertmanager) // Test compression by inspecting the response Headers req, err := http.NewRequest("GET", fmt.Sprintf("http://%s/api/v1/alerts", alertmanager.HTTPEndpoint()), nil) require.NoError(t, err) req.Header.Set("X-Scope-OrgID", "user-1") req.Header.Set("Accept-Encoding", "gzip") ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) defer cancel() // Execute HTTP request res, err := http.DefaultClient.Do(req.WithContext(ctx)) require.NoError(t, err) defer res.Body.Close() // We assert on the Vary header as the minimum response size for enabling compression is 1500 bytes. // This is enough to know whenever the handler for compression is enabled or not. require.Equal(t, "Accept-Encoding", res.Header.Get("Vary")) }
explode_data.jsonl/60597
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 740 }
[ 2830, 3393, 9676, 13297, 1155, 353, 8840, 836, 8, 341, 1903, 11, 1848, 1669, 384, 17, 68, 7121, 54031, 46542, 675, 340, 17957, 35699, 1155, 11, 1848, 340, 16867, 274, 10421, 2822, 17957, 35699, 1155, 11, 92820, 1249, 16997, 6184, 1141, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSuccessfulFindCommitsRequestWithAltGitObjectDirs(t *testing.T) { server, serverSocketPath := startTestServices(t) defer server.Stop() client, conn := newCommitServiceClient(t, serverSocketPath) defer conn.Close() committerName := "Scrooge McDuck" committerEmail := "scrooge@mcduck.com" testRepoCopy, testRepoCopyPath, cleanupFn := testhelper.NewTestRepoWithWorktree(t) defer cleanupFn() cmd := exec.Command("git", "-C", testRepoCopyPath, "-c", fmt.Sprintf("user.name=%s", committerName), "-c", fmt.Sprintf("user.email=%s", committerEmail), "commit", "--allow-empty", "-m", "An empty commit") altObjectsDir := "./alt-objects" currentHead := testhelper.CreateCommitInAlternateObjectDirectory(t, testRepoCopyPath, altObjectsDir, cmd) testCases := []struct { desc string altDirs []string expectedCount int }{ { desc: "present GIT_ALTERNATE_OBJECT_DIRECTORIES", altDirs: []string{altObjectsDir}, expectedCount: 1, }, { desc: "empty GIT_ALTERNATE_OBJECT_DIRECTORIES", altDirs: []string{}, expectedCount: 0, }, } for _, testCase := range testCases { t.Run(testCase.desc, func(t *testing.T) { testRepoCopy.GitAlternateObjectDirectories = testCase.altDirs request := &gitalypb.FindCommitsRequest{ Repository: testRepoCopy, Revision: currentHead, Limit: 1, } ctx, cancel := testhelper.Context() defer cancel() c, err := client.FindCommits(ctx, request) require.NoError(t, err) receivedCommits := []*gitalypb.GitCommit{} for { resp, err := c.Recv() if err == io.EOF { break } else if err != nil { t.Fatal(err) } receivedCommits = append(receivedCommits, resp.GetCommits()...) } require.Equal(t, testCase.expectedCount, len(receivedCommits), "number of commits received") }) } }
explode_data.jsonl/26119
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 785 }
[ 2830, 3393, 36374, 9885, 17977, 1199, 1900, 2354, 26017, 46562, 1190, 97384, 1155, 353, 8840, 836, 8, 341, 41057, 11, 3538, 10286, 1820, 1669, 1191, 2271, 11025, 1155, 340, 16867, 3538, 30213, 2822, 25291, 11, 4534, 1669, 501, 33441, 1860...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestWithBeforeScenario(t *testing.T) { c := false suite := NewSuite(t, WithFeaturesPath("features/empty.feature"), WithBeforeScenario(func(ctx Context) { c = true })) suite.Run() if err := assert.Equals(true, c); err != nil { t.Error(err) } }
explode_data.jsonl/69375
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 2354, 10227, 54031, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 895, 198, 96572, 1669, 1532, 28000, 1155, 11, 3085, 21336, 1820, 445, 20304, 14, 3194, 29591, 3975, 3085, 10227, 54031, 18552, 7502, 9608, 8, 341, 197, 1444, 284,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEBSVolumes(t *testing.T) { // Describe all of our test cases: 1 failure and 4 success cases cases := []struct { RegionName string AllRegions bool ExpectedCount int ExpectError bool }{ { RegionName: "us-east-1", ExpectedCount: 2, }, { RegionName: "us-east-2", ExpectedCount: 0, }, { RegionName: "af-south-1", ExpectedCount: 6, }, { RegionName: "undefined-region", ExpectError: true, }, { AllRegions: true, ExpectedCount: 8, }, } // Loop through each test case for _, c := range cases { // Create our fake service factory sf := fakeEBSServiceFactory{ RegionName: c.RegionName, DRResponse: ec2Regions, } // Create a mock activity monitor mon := &mock.ActivityMonitorImpl{} // Invoke our EBSVolumes function actualCount := EBSVolumes(sf, mon, c.AllRegions) // Did we expect an error? if c.ExpectError { // Did it fail to arrive? if !mon.ErrorOccured { t.Error("Expected an error to occur, but it did not... :^(") } } else if mon.ErrorOccured { t.Errorf("Unexpected error occurred: %s", mon.ErrorMessage) } else if actualCount != c.ExpectedCount { t.Errorf("Error: EBSVolumes returned %d; expected %d", actualCount, c.ExpectedCount) } else if mon.ProgramExited { t.Errorf("Unexpected Exit: The program unexpected exited with status code=%d", mon.ExitCode) } } }
explode_data.jsonl/50285
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 561 }
[ 2830, 3393, 36, 7347, 96325, 1155, 353, 8840, 836, 8, 341, 197, 322, 60785, 678, 315, 1039, 1273, 5048, 25, 220, 16, 7901, 323, 220, 19, 2393, 5048, 198, 1444, 2264, 1669, 3056, 1235, 341, 197, 197, 14091, 675, 262, 914, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestPodReadOnlyFilesystem(t *testing.T) { _, s, closeFn := framework.RunAnAPIServer(nil) defer closeFn() isReadOnly := true ns := framework.CreateTestingNamespace("pod-readonly-root", t) defer framework.DeleteTestingNamespace(ns, t) client := clientset.NewForConfigOrDie(&restclient.Config{Host: s.URL, ContentConfig: restclient.ContentConfig{GroupVersion: &schema.GroupVersion{Group: "", Version: "v1"}}}) pod := &v1.Pod{ ObjectMeta: metav1.ObjectMeta{ Name: "xxx", }, Spec: v1.PodSpec{ Containers: []v1.Container{ { Name: "fake-name", Image: "fakeimage", SecurityContext: &v1.SecurityContext{ ReadOnlyRootFilesystem: &isReadOnly, }, }, }, }, } if _, err := client.CoreV1().Pods(ns.Name).Create(context.TODO(), pod, metav1.CreateOptions{}); err != nil { t.Errorf("Failed to create pod: %v", err) } integration.DeletePodOrErrorf(t, client, ns.Name, pod.Name) }
explode_data.jsonl/73842
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 380 }
[ 2830, 3393, 23527, 20914, 1703, 8948, 1155, 353, 8840, 836, 8, 341, 197, 6878, 274, 11, 3265, 24911, 1669, 12626, 16708, 2082, 2537, 1637, 2836, 27907, 340, 16867, 3265, 24911, 2822, 19907, 20914, 1669, 830, 198, 84041, 1669, 12626, 7251,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestParseSdp2RawContext(t *testing.T) { sdpCtx, err := ParseSdp2RawContext([]byte(goldenSdp)) assert.Equal(t, nil, err) nazalog.Debugf("sdp=%+v", sdpCtx) }
explode_data.jsonl/55591
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 78 }
[ 2830, 3393, 14463, 50, 9796, 17, 20015, 1972, 1155, 353, 8840, 836, 8, 341, 1903, 9796, 23684, 11, 1848, 1669, 14775, 50, 9796, 17, 20015, 1972, 10556, 3782, 3268, 813, 268, 50, 9796, 1171, 6948, 12808, 1155, 11, 2092, 11, 1848, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestHashingMsgRouter(t *testing.T) { keys := [][]byte{ []byte("foo1"), []byte("foo2"), []byte("foo3"), []byte("foo4"), []byte("foo5"), } for _, tc := range []struct { partitionCount int }{ {partitionCount: 10}, {partitionCount: 5}, } { t.Run(fmt.Sprintf("partitionCount=%d", tc.partitionCount), func(t *testing.T) { msgRouter := newHashingMsgRouter(tc.partitionCount) for _, key := range keys { p1 := msgRouter.Route(key) p2 := msgRouter.Route(key) if p1 != p2 { t.Errorf("Route() returned different partitions for same key %v", key) } if p1 < 0 || p1 >= tc.partitionCount { t.Errorf("Route() returned partition out of range: %v", p1) } } }) } }
explode_data.jsonl/66483
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 324 }
[ 2830, 3393, 6370, 287, 6611, 9523, 1155, 353, 8840, 836, 8, 341, 80112, 1669, 52931, 3782, 515, 197, 197, 1294, 3782, 445, 7975, 16, 4461, 197, 197, 1294, 3782, 445, 7975, 17, 4461, 197, 197, 1294, 3782, 445, 7975, 18, 4461, 197, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestScanNullValue(t *testing.T) { user := GetUser("scan_null_value", Config{}) DB.Create(&user) if err := DB.Model(&user).Update("age", nil).Error; err != nil { t.Fatalf("failed to update column age for struct, got error %v", err) } var result User if err := DB.First(&result, "id = ?", user.ID).Error; err != nil { t.Fatalf("failed to query struct data with null age, got error %v", err) } AssertEqual(t, result, user) users := []User{ *GetUser("scan_null_value_for_slice_1", Config{}), *GetUser("scan_null_value_for_slice_2", Config{}), *GetUser("scan_null_value_for_slice_3", Config{}), } DB.Create(&users) if err := DB.Model(&users[0]).Update("age", nil).Error; err != nil { t.Fatalf("failed to update column age for struct, got error %v", err) } var results []User if err := DB.Find(&results, "name like ?", "scan_null_value_for_slice%").Error; err != nil { t.Fatalf("failed to query slice data with null age, got error %v", err) } }
explode_data.jsonl/48720
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 364 }
[ 2830, 3393, 26570, 3280, 1130, 1155, 353, 8840, 836, 8, 341, 19060, 1669, 85937, 445, 16405, 15162, 3142, 497, 5532, 37790, 45409, 7251, 2099, 872, 692, 743, 1848, 1669, 5952, 5659, 2099, 872, 568, 4289, 445, 424, 497, 2092, 568, 1454, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestCreateMiner(t *testing.T) { ctx := context.Background() v := vm.NewVMWithSingletons(ctx, t) addrs := vm.CreateAccounts(ctx, t, v, 1, big.Mul(big.NewInt(10_000), big.NewInt(1e18)), 93837778) params := power.CreateMinerParams{ Owner: addrs[0], Worker: addrs[0], SealProofType: abi.RegisteredSealProof_StackedDrg32GiBV1, Peer: abi.PeerID("not really a peer id"), } ret, code := v.ApplyMessage(addrs[0], builtin.StoragePowerActorAddr, big.NewInt(1e10), builtin.MethodsPower.CreateMiner, &params) assert.Equal(t, exitcode.Ok, code) minerAddrs, ok := ret.(*power.CreateMinerReturn) require.True(t, ok) // all expectations implicitly expected to be Ok vm.ExpectInvocation{ // Original send to storage power actor To: builtin.StoragePowerActorAddr, Method: builtin.MethodsPower.CreateMiner, Params: vm.ExpectObject(&params), Ret: vm.ExpectObject(ret), SubInvocations: []vm.ExpectInvocation{{ // Storage power requests init actor construct a miner To: builtin.InitActorAddr, Method: builtin.MethodsInit.Exec, SubInvocations: []vm.ExpectInvocation{{ // Miner constructor gets params from original call To: minerAddrs.IDAddress, Method: builtin.MethodConstructor, Params: vm.ExpectObject(&miner.ConstructorParams{ OwnerAddr: params.Owner, WorkerAddr: params.Worker, SealProofType: params.SealProofType, PeerId: params.Peer, }), SubInvocations: []vm.ExpectInvocation{{ // Miner calls back to power actor to enroll its cron event To: builtin.StoragePowerActorAddr, Method: builtin.MethodsPower.EnrollCronEvent, SubInvocations: []vm.ExpectInvocation{}, }}, }}, }}, }.Matches(t, v.Invocations()[0]) }
explode_data.jsonl/68419
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 754 }
[ 2830, 3393, 4021, 6217, 261, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 5195, 1669, 10995, 7121, 11187, 2354, 25915, 82, 7502, 11, 259, 340, 12718, 5428, 1669, 10995, 7251, 41369, 7502, 11, 259, 11, 348, 11, 220, 16,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCrDoubleMergedDeleteAndRecreate(t *testing.T) { test(t, users("alice", "bob"), as(alice, mkdir("a/b/c/d"), write("a/b/c/d/e1/f1", "f1"), write("a/b/c/d/e2/f2", "f2"), ), as(bob, disableUpdates(), ), as(alice, rm("a/b/c/d/e1/f1"), rm("a/b/c/d/e2/f2"), rmdir("a/b/c/d/e1"), rmdir("a/b/c/d/e2"), rmdir("a/b/c/d"), rmdir("a/b/c"), ), as(bob, noSync(), write("a/b/c/d/e1/f1", "f1.2"), write("a/b/c/d/e2/f2", "f2.2"), reenableUpdates(), read("a/b/c/d/e1/f1", "f1.2"), read("a/b/c/d/e2/f2", "f2.2"), ), as(alice, read("a/b/c/d/e1/f1", "f1.2"), read("a/b/c/d/e2/f2", "f2.2"), ), ) }
explode_data.jsonl/31384
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 449 }
[ 2830, 3393, 16001, 7378, 44, 51525, 6435, 3036, 693, 3182, 1155, 353, 8840, 836, 8, 341, 18185, 1155, 345, 197, 90896, 445, 63195, 497, 330, 47086, 4461, 197, 60451, 17643, 558, 345, 298, 88650, 445, 64, 3470, 2899, 3446, 4461, 298, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOffsetPersistence(t *testing.T) { for _, testData := range testOffsetPersistenceTable { err := offsetPersistenceTestRun(testData) if err != nil { t.Error(err) } } }
explode_data.jsonl/11275
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 69 }
[ 2830, 3393, 6446, 71562, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 67348, 1669, 2088, 1273, 6446, 71562, 2556, 341, 197, 9859, 1669, 4347, 71562, 2271, 6727, 8623, 1043, 340, 197, 743, 1848, 961, 2092, 341, 298, 3244, 6141, 3964, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
3
func TestUserManagerRepository_Insert(t *testing.T) { user := &datamodels.User{ NickName: "tom", UserName: "Jack", HashPassword: "2222", } userManager := &UserManagerRepository{ table: "user", } id, err := userManager.Insert(user) if err != nil { panic(err) } user.ID = id }
explode_data.jsonl/24306
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 128 }
[ 2830, 3393, 1474, 2043, 4624, 76417, 1155, 353, 8840, 836, 8, 341, 19060, 1669, 609, 5911, 40259, 82, 7344, 515, 197, 18317, 865, 675, 25, 257, 330, 37401, 756, 197, 197, 18856, 25, 257, 330, 32631, 756, 197, 197, 6370, 4876, 25, 33...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEnqueueJob(t *testing.T) { tt := []struct { desc string options []JobOption makeAssertions func(*testing.T, *sqlx.Tx, int, error) }{ { desc: "simple success", makeAssertions: func(t *testing.T, tx *sqlx.Tx, id int, err error) { assert.Nil(t, err) // grab job from DB and compare it. job := &Job{} err = tx.Get(job, "SELECT * from pgq_jobs WHERE id = $1", id) assert.Equal(t, "someQueue", job.QueueName) assert.Equal(t, []byte("some data"), job.Data) assert.Equal(t, Durations{ time.Minute, time.Minute * 10, time.Minute * 30, }, job.RetryWaits) }, }, { desc: "job options called", options: []JobOption{func(job *Job) { panic("boom") }}, makeAssertions: func(t *testing.T, tx *sqlx.Tx, id int, err error) { assert.Equal(t, "error/panic while applying option, cause: boom", err.Error()) }, }, } db := getTestDB() defer db.Close() for _, tc := range tt { t.Run(tc.desc, func(t *testing.T) { tx, _ := db.Beginx() defer tx.Rollback() id, err := enqueueJob(tx, "someQueue", []byte("some data"), tc.options...) tc.makeAssertions(t, tx, id, err) }) } }
explode_data.jsonl/48958
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 545 }
[ 2830, 3393, 1702, 4584, 12245, 1155, 353, 8840, 836, 8, 341, 3244, 83, 1669, 3056, 1235, 341, 197, 41653, 1843, 914, 198, 197, 35500, 286, 3056, 12245, 5341, 198, 197, 77438, 90206, 2915, 4071, 8840, 836, 11, 353, 3544, 87, 81362, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClient_TranslateFormData(t *testing.T) { setup() defer teardown() fromLang := "" toLang := "en" text := []string{"blahblah"} mux.HandleFunc("/language/translate/v2", func(w http.ResponseWriter, r *http.Request) { r.ParseForm() assertEqual(t, r.Form.Get("key"), "FAKE_API_KEY") assertEqual(t, r.Form.Get("q"), text[0]) assertEqual(t, r.Form.Get("target"), toLang) assertEqual(t, r.Form.Get("source"), fromLang) assertEqual(t, r.Form.Get("format"), "text") }) client.Translate(fromLang, toLang, text) }
explode_data.jsonl/11234
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 228 }
[ 2830, 3393, 2959, 34932, 5075, 55966, 1155, 353, 8840, 836, 8, 341, 84571, 741, 16867, 49304, 2822, 42727, 26223, 1669, 8389, 31709, 26223, 1669, 330, 268, 698, 15425, 1669, 3056, 917, 4913, 70614, 70614, 63159, 2109, 2200, 63623, 4283, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConvertWithCorrectKeyMapNotStringKey(t *testing.T) { lines, err := convertTypes( "Foo", "Bar", ` typedef string UUID struct Foo { 1: optional map<UUID, string> one } struct Bar { 1: optional map<UUID, string> one }`, nil, nil, ) assert.NoError(t, err) assertPrettyEqual(t, trim(` out.One = make(map[structs.UUID]string, len(in.One)) for key1, value2 := range in.One { out.One[structs.UUID(key1)] = string(value2) } `), lines) }
explode_data.jsonl/62058
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 213 }
[ 2830, 3393, 12012, 2354, 33092, 1592, 2227, 2623, 703, 1592, 1155, 353, 8840, 836, 8, 341, 78390, 11, 1848, 1669, 5508, 4173, 1006, 197, 197, 1, 40923, 497, 330, 3428, 756, 197, 197, 3989, 197, 31199, 914, 23698, 271, 197, 6472, 33428...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInfo(t *testing.T) { expectedURL := "/info" client := &Client{ client: newMockClient(func(req *http.Request) (*http.Response, error) { if !strings.HasPrefix(req.URL.Path, expectedURL) { return nil, fmt.Errorf("Expected URL '%s', got '%s'", expectedURL, req.URL) } info := &types.Info{ ID: "daemonID", Containers: 3, } b, err := json.Marshal(info) if err != nil { return nil, err } return &http.Response{ StatusCode: http.StatusOK, Body: ioutil.NopCloser(bytes.NewReader(b)), }, nil }), } info, err := client.Info(context.Background()) if err != nil { t.Fatal(err) } if info.ID != "daemonID" { t.Fatalf("expected daemonID, got %s", info.ID) } if info.Containers != 3 { t.Fatalf("expected 3 containers, got %d", info.Containers) } }
explode_data.jsonl/70206
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 368 }
[ 2830, 3393, 1731, 1155, 353, 8840, 836, 8, 341, 42400, 3144, 1669, 3521, 2733, 698, 25291, 1669, 609, 2959, 515, 197, 25291, 25, 501, 11571, 2959, 18552, 6881, 353, 1254, 9659, 8, 4609, 1254, 12574, 11, 1465, 8, 341, 298, 743, 753, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCountRepoMilestones(t *testing.T) { assert.NoError(t, PrepareTestDatabase()) test := func(repoID int64) { repo := AssertExistsAndLoadBean(t, &Repository{ID: repoID}).(*Repository) count, err := countRepoMilestones(x, repoID) assert.NoError(t, err) assert.EqualValues(t, repo.NumMilestones, count) } test(1) test(2) test(3) count, err := countRepoMilestones(x, NonexistentID) assert.NoError(t, err) assert.EqualValues(t, 0, count) }
explode_data.jsonl/21825
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 188 }
[ 2830, 3393, 2507, 25243, 44, 457, 32510, 1155, 353, 8840, 836, 8, 341, 6948, 35699, 1155, 11, 31166, 2271, 5988, 2398, 18185, 1669, 2915, 50608, 915, 526, 21, 19, 8, 341, 197, 17200, 5368, 1669, 5319, 15575, 3036, 5879, 10437, 1155, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetTTL(t *testing.T) { for _, wantTTL := range []uint8{1, 2, 50, 64, 128, 254, 255} { t.Run(fmt.Sprintf("TTL:%d", wantTTL), func(t *testing.T) { c := context.New(t, 65535) defer c.Cleanup() var err tcpip.Error c.EP, err = c.Stack().NewEndpoint(tcp.ProtocolNumber, ipv4.ProtocolNumber, &waiter.Queue{}) if err != nil { t.Fatalf("NewEndpoint failed: %s", err) } if err := c.EP.SetSockOptInt(tcpip.TTLOption, int(wantTTL)); err != nil { t.Fatalf("SetSockOptInt(TTLOption, %d) failed: %s", wantTTL, err) } { err := c.EP.Connect(tcpip.FullAddress{Addr: context.TestAddr, Port: context.TestPort}) if d := cmp.Diff(&tcpip.ErrConnectStarted{}, err); d != "" { t.Fatalf("c.EP.Connect(...) mismatch (-want +got):\n%s", d) } } // Receive SYN packet. b := c.GetPacket() checker.IPv4(t, b, checker.TTL(wantTTL)) }) } }
explode_data.jsonl/75968
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 428 }
[ 2830, 3393, 1649, 51, 13470, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1366, 51, 13470, 1669, 2088, 3056, 2496, 23, 90, 16, 11, 220, 17, 11, 220, 20, 15, 11, 220, 21, 19, 11, 220, 16, 17, 23, 11, 220, 17, 20, 19, 11, 220, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestInterfaceErrValidation(t *testing.T) { var v2 interface{} = 1 var v1 interface{} = v2 validate := New() errs := validate.Var(v1, "len=1") Equal(t, errs, nil) errs = validate.Var(v2, "len=1") Equal(t, errs, nil) type ExternalCMD struct { Userid string `json:"userid"` Action uint32 `json:"action"` Data interface{} `json:"data,omitempty" validate:"required"` } s := &ExternalCMD{ Userid: "123456", Action: 10000, // Data: 1, } errs = validate.Struct(s) NotEqual(t, errs, nil) Equal(t, len(errs.(ValidationErrors)), 1) AssertError(t, errs, "ExternalCMD.Data", "ExternalCMD.Data", "Data", "Data", "required") type ExternalCMD2 struct { Userid string `json:"userid"` Action uint32 `json:"action"` Data interface{} `json:"data,omitempty" validate:"len=1"` } s2 := &ExternalCMD2{ Userid: "123456", Action: 10000, // Data: 1, } errs = validate.Struct(s2) NotEqual(t, errs, nil) Equal(t, len(errs.(ValidationErrors)), 1) AssertError(t, errs, "ExternalCMD2.Data", "ExternalCMD2.Data", "Data", "Data", "len") s3 := &ExternalCMD2{ Userid: "123456", Action: 10000, Data: 2, } errs = validate.Struct(s3) NotEqual(t, errs, nil) Equal(t, len(errs.(ValidationErrors)), 1) AssertError(t, errs, "ExternalCMD2.Data", "ExternalCMD2.Data", "Data", "Data", "len") type Inner struct { Name string `validate:"required"` } inner := &Inner{ Name: "", } s4 := &ExternalCMD{ Userid: "123456", Action: 10000, Data: inner, } errs = validate.Struct(s4) NotEqual(t, errs, nil) Equal(t, len(errs.(ValidationErrors)), 1) AssertError(t, errs, "ExternalCMD.Data.Name", "ExternalCMD.Data.Name", "Name", "Name", "required") type TestMapStructPtr struct { Errs map[int]interface{} `validate:"gt=0,dive,len=2"` } mip := map[int]interface{}{0: &Inner{"ok"}, 3: nil, 4: &Inner{"ok"}} msp := &TestMapStructPtr{ Errs: mip, } errs = validate.Struct(msp) NotEqual(t, errs, nil) Equal(t, len(errs.(ValidationErrors)), 1) AssertError(t, errs, "TestMapStructPtr.Errs[3]", "TestMapStructPtr.Errs[3]", "Errs[3]", "Errs[3]", "len") type TestMultiDimensionalStructs struct { Errs [][]interface{} `validate:"gt=0,dive,dive"` } var errStructArray [][]interface{} errStructArray = append(errStructArray, []interface{}{&Inner{"ok"}, &Inner{""}, &Inner{""}}) errStructArray = append(errStructArray, []interface{}{&Inner{"ok"}, &Inner{""}, &Inner{""}}) tms := &TestMultiDimensionalStructs{ Errs: errStructArray, } errs = validate.Struct(tms) NotEqual(t, errs, nil) Equal(t, len(errs.(ValidationErrors)), 4) AssertError(t, errs, "TestMultiDimensionalStructs.Errs[0][1].Name", "TestMultiDimensionalStructs.Errs[0][1].Name", "Name", "Name", "required") AssertError(t, errs, "TestMultiDimensionalStructs.Errs[0][2].Name", "TestMultiDimensionalStructs.Errs[0][2].Name", "Name", "Name", "required") AssertError(t, errs, "TestMultiDimensionalStructs.Errs[1][1].Name", "TestMultiDimensionalStructs.Errs[1][1].Name", "Name", "Name", "required") AssertError(t, errs, "TestMultiDimensionalStructs.Errs[1][2].Name", "TestMultiDimensionalStructs.Errs[1][2].Name", "Name", "Name", "required") type TestMultiDimensionalStructsPtr2 struct { Errs [][]*Inner `validate:"gt=0,dive,dive,required"` } var errStructPtr2Array [][]*Inner errStructPtr2Array = append(errStructPtr2Array, []*Inner{{"ok"}, {""}, {""}}) errStructPtr2Array = append(errStructPtr2Array, []*Inner{{"ok"}, {""}, {""}}) errStructPtr2Array = append(errStructPtr2Array, []*Inner{{"ok"}, {""}, nil}) tmsp2 := &TestMultiDimensionalStructsPtr2{ Errs: errStructPtr2Array, } errs = validate.Struct(tmsp2) NotEqual(t, errs, nil) Equal(t, len(errs.(ValidationErrors)), 6) AssertError(t, errs, "TestMultiDimensionalStructsPtr2.Errs[0][1].Name", "TestMultiDimensionalStructsPtr2.Errs[0][1].Name", "Name", "Name", "required") AssertError(t, errs, "TestMultiDimensionalStructsPtr2.Errs[0][2].Name", "TestMultiDimensionalStructsPtr2.Errs[0][2].Name", "Name", "Name", "required") AssertError(t, errs, "TestMultiDimensionalStructsPtr2.Errs[1][1].Name", "TestMultiDimensionalStructsPtr2.Errs[1][1].Name", "Name", "Name", "required") AssertError(t, errs, "TestMultiDimensionalStructsPtr2.Errs[1][2].Name", "TestMultiDimensionalStructsPtr2.Errs[1][2].Name", "Name", "Name", "required") AssertError(t, errs, "TestMultiDimensionalStructsPtr2.Errs[2][1].Name", "TestMultiDimensionalStructsPtr2.Errs[2][1].Name", "Name", "Name", "required") AssertError(t, errs, "TestMultiDimensionalStructsPtr2.Errs[2][2]", "TestMultiDimensionalStructsPtr2.Errs[2][2]", "Errs[2][2]", "Errs[2][2]", "required") m := map[int]interface{}{0: "ok", 3: "", 4: "ok"} errs = validate.Var(m, "len=3,dive,len=2") NotEqual(t, errs, nil) Equal(t, len(errs.(ValidationErrors)), 1) AssertError(t, errs, "[3]", "[3]", "[3]", "[3]", "len") errs = validate.Var(m, "len=2,dive,required") NotEqual(t, errs, nil) Equal(t, len(errs.(ValidationErrors)), 1) AssertError(t, errs, "", "", "", "", "len") arr := []interface{}{"ok", "", "ok"} errs = validate.Var(arr, "len=3,dive,len=2") NotEqual(t, errs, nil) Equal(t, len(errs.(ValidationErrors)), 1) AssertError(t, errs, "[1]", "[1]", "[1]", "[1]", "len") errs = validate.Var(arr, "len=2,dive,required") NotEqual(t, errs, nil) Equal(t, len(errs.(ValidationErrors)), 1) AssertError(t, errs, "", "", "", "", "len") type MyStruct struct { A, B string C interface{} } var a MyStruct a.A = "value" a.C = "nu" errs = validate.Struct(a) Equal(t, errs, nil) }
explode_data.jsonl/77256
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2379 }
[ 2830, 3393, 5051, 7747, 13799, 1155, 353, 8840, 836, 8, 1476, 2405, 348, 17, 3749, 6257, 284, 220, 16, 198, 2405, 348, 16, 3749, 6257, 284, 348, 17, 271, 197, 7067, 1669, 1532, 741, 9859, 82, 1669, 9593, 87968, 3747, 16, 11, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClientDisconnectLoopback(t *testing.T) { defer leaktest.AfterTest(t)() stopper := stop.NewStopper() defer stopper.Stop(context.TODO()) local := startGossip(1, stopper, t, metric.NewRegistry()) local.mu.Lock() lAddr := local.mu.is.NodeAddr local.startClientLocked(&lAddr) local.mu.Unlock() local.manage() testutils.SucceedsSoon(t, func() error { ok := local.findClient(func(c *client) bool { return c.addr.String() == lAddr.String() }) != nil if !ok && verifyServerMaps(local, 0) { return nil } return errors.New("local client still connected to itself") }) }
explode_data.jsonl/18547
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 227 }
[ 2830, 3393, 2959, 60651, 14620, 1419, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 62644, 712, 1669, 2936, 7121, 10674, 712, 741, 16867, 2936, 712, 30213, 5378, 90988, 2398, 8854, 1669, 1191, 38, 41473, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRefHasher(t *testing.T) { // the test struct is used to specify the expected BMT hash for // segment counts between from and to and lengths from 1 to datalength for _, x := range []struct { from int to int expected func([]byte) []byte }{ // all lengths in [0,64] should be: // // sha3hash(data) // { from: 1, to: 2, expected: func(d []byte) []byte { data := make([]byte, 64) copy(data, d) return sha3hash(t, data) }, }, // all lengths in [3,4] should be: // // sha3hash( // sha3hash(data[:64]) // sha3hash(data[64:]) // ) // { from: 3, to: 4, expected: func(d []byte) []byte { data := make([]byte, 128) copy(data, d) return sha3hash(t, sha3hash(t, data[:64]), sha3hash(t, data[64:])) }, }, // all bmttestutil.SegmentCounts in [5,8] should be: // // sha3hash( // sha3hash( // sha3hash(data[:64]) // sha3hash(data[64:128]) // ) // sha3hash( // sha3hash(data[128:192]) // sha3hash(data[192:]) // ) // ) // { from: 5, to: 8, expected: func(d []byte) []byte { data := make([]byte, 256) copy(data, d) return sha3hash(t, sha3hash(t, sha3hash(t, data[:64]), sha3hash(t, data[64:128])), sha3hash(t, sha3hash(t, data[128:192]), sha3hash(t, data[192:]))) }, }, } { for segCount := x.from; segCount <= x.to; segCount++ { for length := 1; length <= segCount*32; length++ { t.Run(fmt.Sprintf("%d_segments_%d_bytes", segCount, length), func(t *testing.T) { data := make([]byte, length) _, err := io.ReadFull(crand.Reader, data) if err != nil { t.Fatal(err) } expected := x.expected(data) actual, err := reference.NewRefHasher(sha3.NewLegacyKeccak256(), segCount).Hash(data) if err != nil { t.Fatal(err) } if !bytes.Equal(actual, expected) { t.Fatalf("expected %x, got %x", expected, actual) } }) } } } }
explode_data.jsonl/73248
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1033 }
[ 2830, 3393, 3945, 6370, 261, 1155, 353, 8840, 836, 8, 341, 197, 322, 279, 1273, 2036, 374, 1483, 311, 13837, 279, 3601, 425, 8505, 5175, 369, 198, 197, 322, 10238, 14579, 1948, 504, 323, 311, 323, 28316, 504, 220, 16, 311, 3258, 278...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestForwardPorts(t *testing.T) { tests := map[string]struct { ports []string clientSends map[int32]string serverSends map[int32]string }{ "forward 1 port with no data either direction": { ports: []string{":5000"}, }, "forward 2 ports with bidirectional data": { ports: []string{":5001", ":6000"}, clientSends: map[int32]string{ 5001: "abcd", 6000: "ghij", }, serverSends: map[int32]string{ 5001: "1234", 6000: "5678", }, }, } for testName, test := range tests { t.Run(testName, func(t *testing.T) { server := httptest.NewServer(fakePortForwardServer(t, testName, test.serverSends, test.clientSends)) defer server.Close() transport, upgrader, err := spdy.RoundTripperFor(&restclient.Config{}) if err != nil { t.Fatal(err) } url, _ := url.Parse(server.URL) dialer := spdy.NewDialer(upgrader, &http.Client{Transport: transport}, "POST", url) stopChan := make(chan struct{}, 1) readyChan := make(chan struct{}) pf, err := New(dialer, test.ports, stopChan, readyChan, os.Stdout, os.Stderr) if err != nil { t.Fatalf("%s: unexpected error calling New: %v", testName, err) } doneChan := make(chan error) go func() { doneChan <- pf.ForwardPorts() }() <-pf.Ready forwardedPorts, err := pf.GetPorts() if err != nil { t.Fatal(err) } remoteToLocalMap := map[int32]int32{} for _, forwardedPort := range forwardedPorts { remoteToLocalMap[int32(forwardedPort.Remote)] = int32(forwardedPort.Local) } clientSend := func(port int32, data string) error { clientConn, err := net.Dial("tcp", fmt.Sprintf("localhost:%d", remoteToLocalMap[port])) if err != nil { return fmt.Errorf("%s: error dialing %d: %s", testName, port, err) } defer clientConn.Close() n, err := clientConn.Write([]byte(data)) if err != nil && err != io.EOF { return fmt.Errorf("%s: Error sending data '%s': %s", testName, data, err) } if n == 0 { return fmt.Errorf("%s: unexpected write of 0 bytes", testName) } b := make([]byte, 4) _, err = clientConn.Read(b) if err != nil && err != io.EOF { return fmt.Errorf("%s: Error reading data: %s", testName, err) } if !bytes.Equal([]byte(test.serverSends[port]), b) { return fmt.Errorf("%s: expected to read '%s', got '%s'", testName, test.serverSends[port], b) } return nil } for port, data := range test.clientSends { if err := clientSend(port, data); err != nil { t.Error(err) } } // tell r.ForwardPorts to stop close(stopChan) // wait for r.ForwardPorts to actually return err = <-doneChan if err != nil { t.Errorf("%s: unexpected error: %s", testName, err) } }) } }
explode_data.jsonl/70908
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1202 }
[ 2830, 3393, 25925, 68273, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 2415, 14032, 60, 1235, 341, 197, 197, 3394, 981, 3056, 917, 198, 197, 25291, 50, 1412, 2415, 18640, 18, 17, 30953, 198, 197, 41057, 50, 1412, 2415, 18640, 18, 17, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUpdatePod(t *testing.T) { testCases := []struct { description string prevPod *v1.Pod newPod *v1.Pod taintedNodes map[string][]v1.Taint expectDelete bool additionalSleep time.Duration }{ { description: "scheduling onto tainted Node", prevPod: testutil.NewPod("pod1", ""), newPod: testutil.NewPod("pod1", "node1"), taintedNodes: map[string][]v1.Taint{ "node1": {createNoExecuteTaint(1)}, }, expectDelete: true, }, { description: "scheduling onto tainted Node with toleration", prevPod: addToleration(testutil.NewPod("pod1", ""), 1, -1), newPod: addToleration(testutil.NewPod("pod1", "node1"), 1, -1), taintedNodes: map[string][]v1.Taint{ "node1": {createNoExecuteTaint(1)}, }, expectDelete: false, }, { description: "removing toleration", prevPod: addToleration(testutil.NewPod("pod1", "node1"), 1, 100), newPod: testutil.NewPod("pod1", "node1"), taintedNodes: map[string][]v1.Taint{ "node1": {createNoExecuteTaint(1)}, }, expectDelete: true, }, { description: "lengthening toleration shouldn't work", prevPod: addToleration(testutil.NewPod("pod1", "node1"), 1, 1), newPod: addToleration(testutil.NewPod("pod1", "node1"), 1, 100), taintedNodes: map[string][]v1.Taint{ "node1": {createNoExecuteTaint(1)}, }, expectDelete: true, additionalSleep: 1500 * time.Millisecond, }, } for _, item := range testCases { stopCh := make(chan struct{}) fakeClientset := fake.NewSimpleClientset() holder := &podHolder{} controller := NewNoExecuteTaintManager(fakeClientset, holder.getPod, getNodeFromClientset(fakeClientset), getPodsAssignedToNode(fakeClientset)) controller.recorder = testutil.NewFakeRecorder() go controller.Run(stopCh) controller.taintedNodes = item.taintedNodes holder.setPod(item.prevPod) controller.PodUpdated(nil, item.prevPod) fakeClientset.ClearActions() time.Sleep(timeForControllerToProgress) holder.setPod(item.newPod) controller.PodUpdated(item.prevPod, item.newPod) // wait a bit time.Sleep(timeForControllerToProgress) if item.additionalSleep > 0 { time.Sleep(item.additionalSleep) } podDeleted := false for _, action := range fakeClientset.Actions() { if action.GetVerb() == "delete" && action.GetResource().Resource == "pods" { podDeleted = true } } if podDeleted != item.expectDelete { t.Errorf("%v: Unexpected test result. Expected delete %v, got %v", item.description, item.expectDelete, podDeleted) } close(stopCh) } }
explode_data.jsonl/47587
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1102 }
[ 2830, 3393, 4289, 23527, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 42407, 257, 914, 198, 197, 50728, 23527, 260, 353, 85, 16, 88823, 198, 197, 8638, 23527, 688, 353, 85, 16, 88823, 198, 197, 3244, 30229, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestReverse(t *testing.T) { testCases := map[string]struct { Path *epic.Path PathReversed *epic.Path }{ "Basic reverse": { Path: &epic.Path{ PktID: epic.PktID{ Timestamp: 1, Counter: 0x02000003, }, PHVF: []byte{1, 2, 3, 4}, LHVF: []byte{5, 6, 7, 8}, ScionPath: &scion.Raw{ Base: scion.Base{ PathMeta: scion.MetaHdr{ CurrINF: 0, CurrHF: 0, SegLen: [3]uint8{2, 2, 0}, }, NumINF: 2, NumHops: 4, }, Raw: append([]byte(nil), rawScionPath...), // copy of rawScionPath }, }, PathReversed: &epic.Path{ PktID: epic.PktID{ Timestamp: 1, Counter: 0x02000003, }, PHVF: []byte{1, 2, 3, 4}, LHVF: []byte{5, 6, 7, 8}, ScionPath: &scion.Raw{ Base: scion.Base{ PathMeta: scion.MetaHdr{ CurrINF: 1, CurrHF: 3, SegLen: [3]uint8{2, 2, 0}, }, NumINF: 2, NumHops: 4, }, Raw: append([]byte(nil), rawScionReversePath...), // copy of rawScionReversePath }, }, }, "Reverse a reversed path": { Path: &epic.Path{ PktID: epic.PktID{ Timestamp: 1, Counter: 0x02000003, }, PHVF: []byte{1, 2, 3, 4}, LHVF: []byte{5, 6, 7, 8}, ScionPath: &scion.Raw{ Base: scion.Base{ PathMeta: scion.MetaHdr{ CurrINF: 1, CurrHF: 3, SegLen: [3]uint8{2, 2, 0}, }, NumINF: 2, NumHops: 4, }, Raw: append([]byte(nil), rawScionReversePath...), // copy of rawScionReversePath }, }, PathReversed: &epic.Path{ PktID: epic.PktID{ Timestamp: 1, Counter: 0x02000003, }, PHVF: []byte{1, 2, 3, 4}, LHVF: []byte{5, 6, 7, 8}, ScionPath: &scion.Raw{ Base: scion.Base{ PathMeta: scion.MetaHdr{ CurrINF: 0, CurrHF: 0, SegLen: [3]uint8{2, 2, 0}, }, NumINF: 2, NumHops: 4, }, Raw: append([]byte(nil), rawScionPath...), // copy of rawScionPath }, }, }, } for name, tc := range testCases { name, tc := name, tc t.Run(name, func(t *testing.T) { got, err := tc.Path.Reverse() assert.NoError(t, err) assert.Equal(t, tc.PathReversed, got) }) } }
explode_data.jsonl/16863
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1328 }
[ 2830, 3393, 45695, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 2415, 14032, 60, 1235, 341, 197, 69640, 260, 353, 747, 292, 17474, 198, 197, 69640, 693, 43776, 353, 747, 292, 17474, 198, 197, 59403, 197, 197, 1, 15944, 9931, 788,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetImageStream(t *testing.T) { tests := []struct { name string imageNS string imageName string imageTag string wantErr bool want *imagev1.ImageStream wantActionsCnt int }{ { name: "Case: Valid request for imagestream of latest version and not namespace qualified", imageNS: "", imageName: "foo", imageTag: "latest", want: fakeImageStream("foo", "testing", []string{"latest"}), wantActionsCnt: 1, }, { name: "Case: Valid explicit request for specific namespace qualified imagestream of specific version", imageNS: "openshift", imageName: "foo", imageTag: "latest", want: fakeImageStream("foo", "openshift", []string{"latest", "3.5"}), wantActionsCnt: 1, }, { name: "Case: Valid request for specific imagestream of specific version not in current namespace", imageNS: "", imageName: "foo", imageTag: "3.5", want: fakeImageStream("foo", "openshift", []string{"latest", "3.5"}), wantActionsCnt: 1, // Ideally supposed to be 2 but bcoz prependreactor is not parameter sensitive, the way it is mocked makes it 1 }, { name: "Case: Invalid request for non-current and non-openshift namespace imagestream/Non-existant imagestream", imageNS: "foo", imageName: "bar", imageTag: "3.5", wantErr: true, wantActionsCnt: 1, }, { name: "Case: Request for non-existant tag", imageNS: "", imageName: "foo", imageTag: "3.6", wantErr: true, wantActionsCnt: 2, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { fkclient, fkclientset := FakeNew() fkclient.Namespace = "testing" openshiftIS := fakeImageStream(tt.imageName, "openshift", []string{"latest", "3.5"}) currentNSIS := fakeImageStream(tt.imageName, "testing", []string{"latest"}) fkclientset.ImageClientset.PrependReactor("get", "imagestreams", func(action ktesting.Action) (bool, runtime.Object, error) { if tt.imageNS == "" { if isTagInImageStream(*fakeImageStream("foo", "testing", []string{"latest"}), tt.imageTag) { return true, currentNSIS, nil } else if isTagInImageStream(*fakeImageStream("foo", "openshift", []string{"latest", "3.5"}), tt.imageTag) { return true, openshiftIS, nil } return true, nil, fmt.Errorf("Requested imagestream %s with tag %s not found", tt.imageName, tt.imageTag) } if tt.imageNS == "testing" { return true, currentNSIS, nil } if tt.imageNS == "openshift" { return true, openshiftIS, nil } return true, nil, fmt.Errorf("Requested imagestream %s with tag %s not found", tt.imageName, tt.imageTag) }) got, err := fkclient.GetImageStream(tt.imageNS, tt.imageName, tt.imageTag) if len(fkclientset.ImageClientset.Actions()) != tt.wantActionsCnt { t.Errorf("expected %d ImageClientset.Actions() in GetImageStream, got %v", tt.wantActionsCnt, fkclientset.ImageClientset.Actions()) } if !tt.wantErr == (err != nil) { t.Errorf("\nclient.GetImageStream(imageNS, imageName, imageTag) unexpected error %v, wantErr %v", err, tt.wantErr) } if !reflect.DeepEqual(got, tt.want) { t.Errorf("GetImageStream() = %#v, want %#v and the current project name is %s\n\n", got, tt, fkclient.GetCurrentProjectName()) } }) } }
explode_data.jsonl/65159
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1539 }
[ 2830, 3393, 1949, 1906, 3027, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 1843, 914, 198, 197, 31426, 2448, 286, 914, 198, 197, 31426, 675, 414, 914, 198, 197, 31426, 5668, 981, 914, 198, 197, 50780, 7747, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestAddProwConfig(t *testing.T) { t.Parallel() o := TestOptions{} o.Setup() o.Repos = append(o.Repos, "test/repo2") err := o.AddProwConfig() assert.NoError(t, err) cm, err := o.KubeClient.CoreV1().ConfigMaps(o.NS).Get("config", metav1.GetOptions{}) assert.NoError(t, err) prowConfig := &config.Config{} yaml.Unmarshal([]byte(cm.Data["config.yaml"]), &prowConfig) assert.NotEmpty(t, prowConfig.Presubmits["test/repo"]) assert.NotEmpty(t, prowConfig.Presubmits["test/repo2"]) }
explode_data.jsonl/70868
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 211 }
[ 2830, 3393, 2212, 47, 651, 2648, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 22229, 1669, 3393, 3798, 16094, 22229, 39820, 2822, 22229, 2817, 966, 284, 8737, 10108, 2817, 966, 11, 330, 1944, 10758, 5368, 17, 5130, 9859, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetAuthAPISupportedExchanges(t *testing.T) { e := CreateTestBot(t) if result := e.GetAuthAPISupportedExchanges(); len(result) != 0 { t.Fatal("Unexpected result", result) } exch := e.ExchangeManager.GetExchangeByName(testExchange) b := exch.GetBase() b.API.AuthenticatedWebsocketSupport = true b.API.Credentials.Key = "test" b.API.Credentials.Secret = "test" if result := e.GetAuthAPISupportedExchanges(); len(result) != 1 { t.Fatal("Unexpected result", result) } }
explode_data.jsonl/59227
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 187 }
[ 2830, 3393, 1949, 5087, 2537, 1637, 12513, 840, 19365, 1155, 353, 8840, 836, 8, 341, 7727, 1669, 4230, 2271, 23502, 1155, 340, 743, 1102, 1669, 384, 2234, 5087, 2537, 1637, 12513, 840, 19365, 2129, 2422, 4456, 8, 961, 220, 15, 341, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_defaultErrorHandler_JSON(t *testing.T) { r := require.New(t) app := New(Options{}) app.GET("/", func(c Context) error { return c.Error(401, fmt.Errorf("boom")) }) w := httptest.New(app) res := w.JSON("/").Get() r.Equal(401, res.Code) ct := res.Header().Get("content-type") r.Equal("application/json", ct) b := res.Body.String() r.Contains(b, `"code":401`) r.Contains(b, `"error":"boom"`) r.Contains(b, `"trace":"`) }
explode_data.jsonl/82181
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 197 }
[ 2830, 3393, 9993, 66673, 25356, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1373, 7121, 1155, 340, 28236, 1669, 1532, 7, 3798, 37790, 28236, 17410, 35460, 2915, 1337, 9608, 8, 1465, 341, 197, 853, 272, 6141, 7, 19, 15, 16, 11, 8879, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddNews(t *testing.T) { body := []byte(`{"articles":[{"title":"TITLE","thumb_media_id":"THUMB_MEDIA_ID","author":"AUTHOR","digest":"DIGEST","show_cover_pic":1,"content":"CONTENT","content_source_url":"CONTENT_SOURCE_URL","need_open_comment":1,"only_fans_can_comment":1}]}`) resp := &http.Response{ StatusCode: http.StatusOK, Body: io.NopCloser(bytes.NewReader([]byte(`{ "errcode": 0, "errmsg": "ok", "media_id": "MEDIA_ID" }`))), } ctrl := gomock.NewController(t) defer ctrl.Finish() client := mock.NewMockHTTPClient(ctrl) client.EXPECT().Do(gomock.AssignableToTypeOf(context.TODO()), http.MethodPost, "https://api.weixin.qq.com/cgi-bin/material/add_news?access_token=ACCESS_TOKEN", body).Return(resp, nil) oa := New("APPID", "APPSECRET") oa.SetClient(wx.WithHTTPClient(client)) articles := []*NewsArticle{ { Title: "TITLE", ThumbMediaID: "THUMB_MEDIA_ID", Author: "AUTHOR", Digest: "DIGEST", ShowCoverPic: 1, Content: "CONTENT", ContentSourceURL: "CONTENT_SOURCE_URL", NeedOpenComment: 1, OnlyFansCanComment: 1, }, } result := new(ResultMaterialAdd) err := oa.Do(context.TODO(), "ACCESS_TOKEN", AddNews(articles, result)) assert.Nil(t, err) assert.Equal(t, &ResultMaterialAdd{ MediaID: "MEDIA_ID", }, result) }
explode_data.jsonl/20873
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 601 }
[ 2830, 3393, 2212, 14373, 1155, 353, 8840, 836, 8, 341, 35402, 1669, 3056, 3782, 5809, 4913, 16243, 66582, 2102, 3252, 50328, 2198, 25036, 29173, 842, 3252, 3617, 71567, 49527, 3450, 2198, 3094, 3252, 26694, 868, 2198, 36339, 3252, 86929, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFetchMessagesFirstPollingClockGetInvalidValues(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() ch := randomChannelID(t) keys := keyOfChannel(ch) sl := domain.SubscriberLocator{ChannelID: ch, SubscriberID: "sbsc-1"} s, redisCmd, _ := newMockedRedisStorageAndPubSubDispatcher(ctrl) // (1st fetchMessagesNow) MGET clock cursor redisCmd.EXPECT().MGet(gomock.Any(), keys.Clock(), keys.SubscriberCursor(sl.SubscriberID)).Return(strPList(t, "INVALID", "INVALID"), nil) _, _, _, err := s.FetchMessages(context.Background(), sl, 100, domain.Duration{Duration: 30 * time.Second}) dspstesting.IsError(t, domain.ErrSubscriptionNotFound, err) }
explode_data.jsonl/51709
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 249 }
[ 2830, 3393, 20714, 15820, 5338, 49207, 287, 26104, 1949, 7928, 6227, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 23049, 1669, 4194, 9629, 915, 1155, 340, 80112, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetContainerSecurityOpts(t *testing.T) { containerName := "bar" makeConfig := func(annotations map[string]string) *runtimeApi.PodSandboxConfig { return makeSandboxConfigWithLabelsAndAnnotations("pod", "ns", "1234", 1, nil, annotations) } tests := []struct { msg string config *runtimeApi.PodSandboxConfig expectedOpts []string }{{ msg: "No security annotations", config: makeConfig(nil), expectedOpts: []string{"seccomp=unconfined"}, }, { msg: "Seccomp unconfined", config: makeConfig(map[string]string{ api.SeccompContainerAnnotationKeyPrefix + containerName: "unconfined", }), expectedOpts: []string{"seccomp=unconfined"}, }, { msg: "Seccomp default", config: makeConfig(map[string]string{ api.SeccompContainerAnnotationKeyPrefix + containerName: "docker/default", }), expectedOpts: nil, }, { msg: "Seccomp pod default", config: makeConfig(map[string]string{ api.SeccompPodAnnotationKey: "docker/default", }), expectedOpts: nil, }, { msg: "AppArmor runtime/default", config: makeConfig(map[string]string{ apparmor.ContainerAnnotationKeyPrefix + containerName: apparmor.ProfileRuntimeDefault, }), expectedOpts: []string{"seccomp=unconfined"}, }, { msg: "AppArmor local profile", config: makeConfig(map[string]string{ apparmor.ContainerAnnotationKeyPrefix + containerName: apparmor.ProfileNamePrefix + "foo", }), expectedOpts: []string{"seccomp=unconfined", "apparmor=foo"}, }, { msg: "AppArmor and seccomp profile", config: makeConfig(map[string]string{ api.SeccompContainerAnnotationKeyPrefix + containerName: "docker/default", apparmor.ContainerAnnotationKeyPrefix + containerName: apparmor.ProfileNamePrefix + "foo", }), expectedOpts: []string{"apparmor=foo"}, }} for i, test := range tests { opts, err := getContainerSecurityOpts(containerName, test.config, "test/seccomp/profile/root") assert.NoError(t, err, "TestCase[%d]: %s", i, test.msg) assert.Len(t, opts, len(test.expectedOpts), "TestCase[%d]: %s", i, test.msg) for _, opt := range test.expectedOpts { assert.Contains(t, opts, opt, "TestCase[%d]: %s", i, test.msg) } } }
explode_data.jsonl/74425
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 828 }
[ 2830, 3393, 1949, 4502, 15352, 43451, 1155, 353, 8840, 836, 8, 341, 53290, 675, 1669, 330, 2257, 698, 77438, 2648, 1669, 2915, 7, 39626, 2415, 14032, 30953, 8, 353, 22255, 6563, 88823, 50, 31536, 2648, 341, 197, 853, 1281, 50, 31536, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEnum(t *testing.T) { suite := tester.NewParserDumpTestSuite(t) suite.UsePHP8() suite.Code = `<?php enum A {} enum B implements Bar, Baz { } enum C: int implements Bar {} ` suite.Expected = `&ast.Root{ Stmts: []ast.Vertex{ &ast.StmtEnum{ Name: &ast.Identifier{ Val: []byte("A"), }, Stmts: []ast.Vertex{}, }, &ast.StmtEnum{ Name: &ast.Identifier{ Val: []byte("B"), }, Implements: []ast.Vertex{ &ast.Name{ Parts: []ast.Vertex{ &ast.NamePart{ Val: []byte("Bar"), }, }, }, &ast.Name{ Parts: []ast.Vertex{ &ast.NamePart{ Val: []byte("Baz"), }, }, }, }, Stmts: []ast.Vertex{}, }, &ast.StmtEnum{ Name: &ast.Identifier{ Val: []byte("C"), }, Type: &ast.Name{ Parts: []ast.Vertex{ &ast.NamePart{ Val: []byte("int"), }, }, }, Implements: []ast.Vertex{ &ast.Name{ Parts: []ast.Vertex{ &ast.NamePart{ Val: []byte("Bar"), }, }, }, }, Stmts: []ast.Vertex{}, }, }, },` suite.Run() }
explode_data.jsonl/3848
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 618 }
[ 2830, 3393, 10766, 1155, 353, 8840, 836, 8, 341, 96572, 1669, 37111, 7121, 6570, 51056, 2271, 28000, 1155, 340, 96572, 9046, 15158, 23, 741, 96572, 20274, 284, 1565, 1316, 1208, 198, 9018, 362, 5613, 9018, 425, 5169, 4716, 11, 91711, 34...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetFuturesLiquidationOrders(t *testing.T) { t.Parallel() _, err := b.GetFuturesLiquidationOrders(context.Background(), currency.EMPTYPAIR, "", 0, time.Time{}, time.Time{}) if err != nil { t.Error(err) } _, err = b.GetFuturesLiquidationOrders(context.Background(), currency.NewPairWithDelimiter("BTCUSD", "PERP", "_"), "", 0, time.Unix(1577836800, 0), time.Unix(1580515200, 0)) if err != nil { t.Error(err) } }
explode_data.jsonl/76610
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 173 }
[ 2830, 3393, 1949, 37, 74606, 96640, 367, 24898, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 6878, 1848, 1669, 293, 2234, 37, 74606, 96640, 367, 24898, 5378, 19047, 1507, 11413, 48377, 82966, 11, 7342, 220, 15, 11, 882, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPodPhaseWithRestartNever(t *testing.T) { desiredState := v1.PodSpec{ NodeName: "machine", Containers: []v1.Container{ {Name: "containerA"}, {Name: "containerB"}, }, RestartPolicy: v1.RestartPolicyNever, } tests := []struct { pod *v1.Pod status v1.PodPhase test string }{ {&v1.Pod{Spec: desiredState, Status: v1.PodStatus{}}, v1.PodPending, "waiting"}, { &v1.Pod{ Spec: desiredState, Status: v1.PodStatus{ ContainerStatuses: []v1.ContainerStatus{ runningState("containerA"), runningState("containerB"), }, }, }, v1.PodRunning, "all running with restart never", }, { &v1.Pod{ Spec: desiredState, Status: v1.PodStatus{ ContainerStatuses: []v1.ContainerStatus{ succeededState("containerA"), succeededState("containerB"), }, }, }, v1.PodSucceeded, "all succeeded with restart never", }, { &v1.Pod{ Spec: desiredState, Status: v1.PodStatus{ ContainerStatuses: []v1.ContainerStatus{ failedState("containerA"), failedState("containerB"), }, }, }, v1.PodFailed, "all failed with restart never", }, { &v1.Pod{ Spec: desiredState, Status: v1.PodStatus{ ContainerStatuses: []v1.ContainerStatus{ runningState("containerA"), succeededState("containerB"), }, }, }, v1.PodRunning, "mixed state #1 with restart never", }, { &v1.Pod{ Spec: desiredState, Status: v1.PodStatus{ ContainerStatuses: []v1.ContainerStatus{ runningState("containerA"), }, }, }, v1.PodPending, "mixed state #2 with restart never", }, { &v1.Pod{ Spec: desiredState, Status: v1.PodStatus{ ContainerStatuses: []v1.ContainerStatus{ runningState("containerA"), waitingState("containerB"), }, }, }, v1.PodPending, "mixed state #3 with restart never", }, } for _, test := range tests { status := getPhase(&test.pod.Spec, test.pod.Status.ContainerStatuses) assert.Equal(t, test.status, status, "[test %s]", test.test) } }
explode_data.jsonl/49886
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 985 }
[ 2830, 3393, 23527, 30733, 2354, 59354, 26155, 1155, 353, 8840, 836, 8, 341, 52912, 2690, 1397, 1669, 348, 16, 88823, 8327, 515, 197, 30217, 675, 25, 330, 32056, 756, 197, 197, 74632, 25, 3056, 85, 16, 33672, 515, 298, 197, 63121, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSendWithoutFollowRedirects(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { switch r.URL.Path { case "/original": w.Header().Set("Location", "/redirected") w.WriteHeader(301) case "/redirected": t.Fatalf("expect not to redirect, but was") } })) defer server.Close() svc := awstesting.NewClient(&aws.Config{ DisableSSL: aws.Bool(true), Endpoint: aws.String(server.URL), }) svc.Handlers.Clear() svc.Handlers.Send.PushBackNamed(corehandlers.SendHandler) r := svc.NewRequest(&request.Operation{ Name: "Operation", HTTPPath: "/original", }, nil, nil) r.DisableFollowRedirects = true err := r.Send() if err != nil { t.Errorf("expect no error, got %v", err) } if e, a := 301, r.HTTPResponse.StatusCode; e != a { t.Errorf("expect %d status code, got %d", e, a) } }
explode_data.jsonl/44099
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 360 }
[ 2830, 3393, 11505, 26040, 12480, 17725, 82, 1155, 353, 8840, 836, 8, 341, 41057, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 8961, 435, 20893, 17474, 341, 197, 2722, 352...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTraverse(t *testing.T) { memDB := db.NewMemDB() tree, err := NewMutableTree(memDB, 0) require.NoError(t, err) for i := 0; i < 6; i++ { tree.set([]byte(fmt.Sprintf("k%d", i)), []byte(fmt.Sprintf("v%d", i))) } require.Equal(t, 11, tree.nodeSize(), "Size of tree unexpected") }
explode_data.jsonl/23773
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 128 }
[ 2830, 3393, 1282, 22439, 1155, 353, 8840, 836, 8, 341, 14145, 3506, 1669, 2927, 7121, 18816, 3506, 741, 51968, 11, 1848, 1669, 1532, 11217, 6533, 39908, 3506, 11, 220, 15, 340, 17957, 35699, 1155, 11, 1848, 692, 2023, 600, 1669, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPruneEmptyKeys(t *testing.T) { specs := map[string]struct { srcFunc IndexerFunc expResult []interface{} expError error }{ "non empty": { srcFunc: func(v interface{}) ([]interface{}, error) { return []interface{}{uint64(0), uint64(1)}, nil }, expResult: []interface{}{uint64(0), uint64(1)}, }, "empty": { srcFunc: func(v interface{}) ([]interface{}, error) { return []interface{}{}, nil }, expResult: []interface{}{}, }, "nil": { srcFunc: func(v interface{}) ([]interface{}, error) { return nil, nil }, }, "empty in the beginning": { srcFunc: func(v interface{}) ([]interface{}, error) { return []interface{}{[]byte{}, uint64(0), uint64(1)}, nil }, expResult: []interface{}{uint64(0), uint64(1)}, }, "empty in the middle": { srcFunc: func(v interface{}) ([]interface{}, error) { return []interface{}{uint64(0), []byte{}, uint64(1)}, nil }, expResult: []interface{}{uint64(0), uint64(1)}, }, "empty at the end": { srcFunc: func(v interface{}) ([]interface{}, error) { return []interface{}{uint64(0), uint64(1), []byte{}}, nil }, expResult: []interface{}{uint64(0), uint64(1)}, }, "error passed": { srcFunc: func(v interface{}) ([]interface{}, error) { return nil, stdErrors.New("test") }, expError: stdErrors.New("test"), }, } for msg, spec := range specs { t.Run(msg, func(t *testing.T) { r, err := pruneEmptyKeys(spec.srcFunc)(nil) require.Equal(t, spec.expError, err) if spec.expError != nil { return } assert.Equal(t, spec.expResult, r) }) } }
explode_data.jsonl/42814
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 705 }
[ 2830, 3393, 3533, 2886, 3522, 8850, 1155, 353, 8840, 836, 8, 341, 98100, 82, 1669, 2415, 14032, 60, 1235, 341, 197, 41144, 9626, 256, 8008, 261, 9626, 198, 197, 48558, 2077, 3056, 4970, 16094, 197, 48558, 1454, 220, 1465, 198, 197, 59...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetDaemonEndpointsFromStringInvalid4(t *testing.T) { udpAddr := "1.2.1:2a" // error in resolving address port tcpAddr := "127.0.0.1:2000" dAddr := "udp:" + udpAddr + " tcp:" + tcpAddr dEndpt, err := GetDaemonEndpointsFromString(dAddr) assert.True(t, strings.Contains(fmt.Sprint(err), portErr)) assert.NotNil(t, err) assert.Nil(t, dEndpt) }
explode_data.jsonl/49939
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 155 }
[ 2830, 3393, 1949, 89177, 80786, 44491, 7928, 19, 1155, 353, 8840, 836, 8, 341, 197, 31101, 13986, 1669, 330, 16, 13, 17, 13, 16, 25, 17, 64, 1, 442, 1465, 304, 52483, 2621, 2635, 198, 3244, 4672, 13986, 1669, 330, 16, 17, 22, 13, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSpanStart(t *testing.T) { assert := assert.New(t) tracer := newTracer(withTransport(newDefaultTransport())) span := tracer.newRootSpan("pylons.request", "pylons", "/") // a new span sets the Start after the initialization assert.NotEqual(int64(0), span.Start) }
explode_data.jsonl/42845
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 98 }
[ 2830, 3393, 12485, 3479, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 25583, 9584, 1669, 501, 1282, 9584, 16980, 27560, 1755, 3675, 27560, 12145, 197, 1480, 1669, 64306, 4618, 8439, 12485, 445, 3288, 75, 2382, 8223, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLoadCollectionSpec(t *testing.T) { testutils.TestFiles(t, "testdata/loader-*.elf", func(t *testing.T, file string) { spec, err := LoadCollectionSpec(file) if err != nil { t.Fatal("Can't parse ELF:", err) } hashMapSpec := &MapSpec{ Name: "hash_map", Type: Hash, KeySize: 4, ValueSize: 2, MaxEntries: 1, } checkMapSpec(t, spec.Maps, "hash_map", hashMapSpec) checkMapSpec(t, spec.Maps, "array_of_hash_map", &MapSpec{ Name: "hash_map", Type: ArrayOfMaps, KeySize: 4, MaxEntries: 2, }) spec.Maps["array_of_hash_map"].InnerMap = spec.Maps["hash_map"] hashMap2Spec := &MapSpec{ Name: "", Type: Hash, KeySize: 4, ValueSize: 1, MaxEntries: 2, Flags: 1, } checkMapSpec(t, spec.Maps, "hash_map2", hashMap2Spec) checkMapSpec(t, spec.Maps, "hash_of_hash_map", &MapSpec{ Type: HashOfMaps, KeySize: 4, MaxEntries: 2, }) spec.Maps["hash_of_hash_map"].InnerMap = spec.Maps["hash_map2"] checkProgramSpec(t, spec.Programs, "xdp_prog", &ProgramSpec{ Type: XDP, License: "MIT", KernelVersion: 0, }) checkProgramSpec(t, spec.Programs, "no_relocation", &ProgramSpec{ Type: SocketFilter, License: "MIT", KernelVersion: 0, }) if rodata := spec.Maps[".rodata"]; rodata != nil { err := spec.RewriteConstants(map[string]interface{}{ "arg": uint32(1), }) if err != nil { t.Fatal("Can't rewrite constant:", err) } err = spec.RewriteConstants(map[string]interface{}{ "totallyBogus": uint32(1), }) if err == nil { t.Error("Rewriting a bogus constant doesn't fail") } } t.Log(spec.Programs["xdp_prog"].Instructions) if spec.Programs["xdp_prog"].ByteOrder != internal.NativeEndian { return } coll, err := NewCollectionWithOptions(spec, CollectionOptions{ Programs: ProgramOptions{ LogLevel: 1, }, }) testutils.SkipIfNotSupported(t, err) if err != nil { t.Fatal(err) } defer coll.Close() ret, _, err := coll.Programs["xdp_prog"].Test(make([]byte, 14)) if err != nil { t.Fatal("Can't run program:", err) } if ret != 5 { t.Error("Expected return value to be 5, got", ret) } }) }
explode_data.jsonl/27738
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1068 }
[ 2830, 3393, 5879, 6482, 8327, 1155, 353, 8840, 836, 8, 341, 18185, 6031, 8787, 10809, 1155, 11, 330, 92425, 93128, 12, 19922, 490, 497, 2915, 1155, 353, 8840, 836, 11, 1034, 914, 8, 341, 197, 98100, 11, 1848, 1669, 8893, 6482, 8327, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestCreateApplyRevision(t *testing.T) { set := newStatefulSet(1) set.Status.CollisionCount = new(int32) revision, err := newRevision(set, 1, set.Status.CollisionCount) if err != nil { t.Fatal(err) } set.Spec.Template.Spec.Containers[0].Name = "foo" if set.Annotations == nil { set.Annotations = make(map[string]string) } key := "foo" expectedValue := "bar" set.Annotations[key] = expectedValue restoredSet, err := ApplyRevision(set, revision) if err != nil { t.Fatal(err) } restoredRevision, err := newRevision(restoredSet, 2, restoredSet.Status.CollisionCount) if err != nil { t.Fatal(err) } if !history.EqualRevision(revision, restoredRevision) { t.Errorf("wanted %v got %v", string(revision.Data.Raw), string(restoredRevision.Data.Raw)) } value, ok := restoredRevision.Annotations[key] if !ok { t.Errorf("missing annotation %s", key) } if value != expectedValue { t.Errorf("for annotation %s wanted %s got %s", key, expectedValue, value) } }
explode_data.jsonl/68376
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 379 }
[ 2830, 3393, 4021, 28497, 33602, 1155, 353, 8840, 836, 8, 341, 8196, 1669, 36848, 1262, 1649, 7, 16, 340, 8196, 10538, 727, 29037, 2507, 284, 501, 1548, 18, 17, 340, 197, 28342, 11, 1848, 1669, 501, 33602, 14171, 11, 220, 16, 11, 738...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestAPIPutObjectPartHandlerPreSign(t *testing.T) { defer DetectTestLeak(t)() ExecObjectLayerAPITest(t, testAPIPutObjectPartHandlerPreSign, []string{"NewMultipart", "PutObjectPart"}) }
explode_data.jsonl/10708
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 67 }
[ 2830, 3393, 7082, 19103, 1190, 5800, 3050, 4703, 7264, 1155, 353, 8840, 836, 8, 341, 16867, 33287, 2271, 2304, 585, 1155, 8, 741, 197, 10216, 1190, 9188, 2537, 952, 477, 1155, 11, 1273, 7082, 19103, 1190, 5800, 3050, 4703, 7264, 11, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func Test_wrappingPaperNeeded(t *testing.T) { data := []struct { dims []int expected int }{ {[]int{2, 3, 4}, 58}, } for _, d := range data { area := wrappingPaperNeeded(d.dims) if area != d.expected { t.Errorf("expected %d but got %d", d.expected, area) } } }
explode_data.jsonl/30720
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 127 }
[ 2830, 3393, 44074, 3629, 30898, 56706, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 3056, 1235, 341, 197, 2698, 5742, 257, 3056, 396, 198, 197, 42400, 526, 198, 197, 59403, 197, 197, 90, 1294, 396, 90, 17, 11, 220, 18, 11, 220, 19, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRoute_SetService(t *testing.T) { testCases := []struct { param string errWanted bool }{ {"example", false}, {"", true}, } for _, tc := range testCases { route := Route{} errGot := route.SetService(tc.param) if tc.errWanted != (errGot != nil) { t.Errorf("SetService(%s) = %v; errWanted = %t", route.service, errGot, tc.errWanted) } if errGot == nil && route.service != tc.param { t.Errorf("SetService(%s) != want %s", route.service, tc.param) } } }
explode_data.jsonl/67791
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 209 }
[ 2830, 3393, 4899, 14812, 1860, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 36037, 257, 914, 198, 197, 9859, 54, 7566, 1807, 198, 197, 59403, 197, 197, 4913, 8687, 497, 895, 1583, 197, 197, 4913, 497, 830, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestMockMessageQueue(t *testing.T) { const msg = "hello, world!" const max = len(msg) const timeout = time.Millisecond mq := mqsend.OpenMockMessageQueue(mqsend.MessageQueueConfig{ MaxMessageSize: int64(max), MaxQueueSize: 1, }) defer mq.Close() t.Run( "message-too-large", func(t *testing.T) { data := make([]byte, max+1) err := mq.Send(context.Background(), data) if !errors.As(err, new(mqsend.MessageTooLargeError)) { t.Errorf( "Expected MessageTooLargeError when message is larger than the max size, got %v", err, ) } }, ) t.Run( "send", func(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), timeout) defer cancel() err := mq.Send(ctx, []byte(msg)) if err != nil { t.Errorf("Send returned error: %v", err) } }, ) t.Run( "send-timeout", func(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), timeout) defer cancel() err := mq.Send(ctx, []byte(msg)) if !errors.As(err, new(mqsend.TimedOutError)) { t.Errorf("Expected TimedOutError when the queue is full, got %v", err) } if !errors.Is(err, context.DeadlineExceeded) { t.Errorf("Expected DeadlineExceeded when the queue is full, got %v", err) } }, ) t.Run( "receive", func(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), timeout) defer cancel() data, err := mq.Receive(ctx) if err != nil { t.Fatalf("Receive returned error: %v", err) } if string(data) != msg { t.Errorf("Expected to receive data %q, got %q", msg, data) } }, ) t.Run( "send-again", func(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), timeout) defer cancel() err := mq.Send(ctx, []byte(msg)) if err != nil { t.Errorf("Send returned error: %v", err) } }, ) }
explode_data.jsonl/68782
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 810 }
[ 2830, 3393, 11571, 2052, 7554, 1155, 353, 8840, 836, 8, 341, 4777, 3750, 284, 330, 14990, 11, 1879, 24734, 4777, 1932, 284, 2422, 8119, 340, 4777, 9632, 284, 882, 71482, 271, 2109, 80, 1669, 72298, 6681, 12953, 11571, 2052, 7554, 1255, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInsertShardedUnownedReverseMap(t *testing.T) { invschema := &vschemapb.SrvVSchema{ Keyspaces: map[string]*vschemapb.Keyspace{ "sharded": { Sharded: true, Vindexes: map[string]*vschemapb.Vindex{ "hash": { Type: "hash", }, "twocol": { Type: "lookup", Params: map[string]string{ "table": "lkp2", "from": "from1,from2", "to": "toc", }, }, "onecol": { Type: "lookup", Params: map[string]string{ "table": "lkp1", "from": "from", "to": "toc", }, }, }, Tables: map[string]*vschemapb.Table{ "t1": { ColumnVindexes: []*vschemapb.ColumnVindex{{ Name: "hash", Columns: []string{"id"}, }, { Name: "hash", Columns: []string{"c1", "c2"}, }, { Name: "hash", Columns: []string{"c3"}, }}, }, }, }, }, } vs, err := vindexes.BuildVSchema(invschema) if err != nil { t.Fatal(err) } ks := vs.Keyspaces["sharded"] ins := NewInsert( InsertSharded, ks.Keyspace, []sqltypes.PlanValue{{ // colVindex columns: id Values: []sqltypes.PlanValue{{ // rows for id Values: []sqltypes.PlanValue{{ Value: sqltypes.NewInt64(1), }, { Value: sqltypes.NewInt64(2), }, { Value: sqltypes.NewInt64(3), }}, }}, }, { // colVindex columns: c1, c2 Values: []sqltypes.PlanValue{{ // rows for c1 Values: []sqltypes.PlanValue{{ Value: sqltypes.NULL, }, { Value: sqltypes.NULL, }, { Value: sqltypes.NULL, }}, }, { // rows for c2 Values: []sqltypes.PlanValue{{ Value: sqltypes.NULL, }, { Value: sqltypes.NULL, }, { Value: sqltypes.NULL, }}, }}, }, { // colVindex columns: c3 Values: []sqltypes.PlanValue{{ // rows for c3 Values: []sqltypes.PlanValue{{ Value: sqltypes.NULL, }, { Value: sqltypes.NULL, }, { Value: sqltypes.NULL, }}, }}, }}, ks.Tables["t1"], "prefix", []string{" mid1", " mid2", " mid3"}, " suffix", ) // nonemptyResult will cause the lookup verify queries to succeed. nonemptyResult := sqltypes.MakeTestResult( sqltypes.MakeTestFields( "c1", "int64", ), "1", ) vc := &loggingVCursor{ shards: []string{"-20", "20-"}, shardForKsid: []string{"20-", "-20", "20-"}, results: []*sqltypes.Result{ nonemptyResult, }, } _, err = ins.Execute(vc, map[string]*querypb.BindVariable{}, false) if err != nil { t.Fatal(err) } vc.ExpectLog(t, []string{ `ResolveDestinations sharded [value:"0" value:"1" value:"2" ] Destinations:DestinationKeyspaceID(166b40b44aba4bd6),DestinationKeyspaceID(06e7ea22ce92708f),DestinationKeyspaceID(4eb190c9a2fa169c)`, `ExecuteMultiShard ` + `sharded.20-: prefix mid1, mid3 suffix /* vtgate:: keyspace_id:166b40b44aba4bd6,4eb190c9a2fa169c */ ` + `{_c10: type:UINT64 value:"1" _c11: type:UINT64 value:"2" _c12: type:UINT64 value:"3" ` + `_c20: _c21: _c22: ` + `_c30: type:UINT64 value:"1" _c31: type:UINT64 value:"2" _c32: type:UINT64 value:"3" ` + `_id0: type:INT64 value:"1" _id1: type:INT64 value:"2" _id2: type:INT64 value:"3" } ` + `sharded.-20: prefix mid2 suffix /* vtgate:: keyspace_id:06e7ea22ce92708f */ ` + `{_c10: type:UINT64 value:"1" _c11: type:UINT64 value:"2" _c12: type:UINT64 value:"3" ` + `_c20: _c21: _c22: ` + `_c30: type:UINT64 value:"1" _c31: type:UINT64 value:"2" _c32: type:UINT64 value:"3" ` + `_id0: type:INT64 value:"1" _id1: type:INT64 value:"2" _id2: type:INT64 value:"3" } ` + `true false`, }) }
explode_data.jsonl/55997
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1860 }
[ 2830, 3393, 13780, 2016, 20958, 1806, 18332, 45695, 2227, 1155, 353, 8840, 836, 8, 341, 17430, 11562, 3416, 1669, 609, 11562, 2407, 391, 65, 808, 10553, 53, 8632, 515, 197, 197, 8850, 27338, 25, 2415, 14032, 8465, 11562, 2407, 391, 65, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestQuery_ScopeRequired(t *testing.T) { for _, cmd := range []string{"read", "range"} { c := StartCapture() exit = func(r int) {} os.Args = []string{ "dosa", "query", cmd, "--namePrefix", "foo", "--path", "../../testentity", "TestEntity", "StrKey:eq:foo", } main() assert.Contains(t, c.stop(true), "-s, --scope' was not specified") } }
explode_data.jsonl/70147
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 168 }
[ 2830, 3393, 2859, 1098, 2417, 8164, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 5439, 1669, 2088, 3056, 917, 4913, 878, 497, 330, 9669, 9207, 341, 197, 1444, 1669, 5145, 27429, 741, 197, 14519, 284, 2915, 2601, 526, 8, 5613, 197, 25078,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMakeRequestForAllSecrets(t *testing.T) { assert := tassert.New(t) mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() mockCatalog := catalog.NewMockMeshCataloger(mockCtrl) type testCase struct { name string proxySvcAccount identity.ServiceIdentity allowedOutboundServices []service.MeshService expectedDiscoveryRequest *xds_discovery.DiscoveryRequest } proxyServiceIdentity := identity.K8sServiceAccount{Name: "test-sa", Namespace: "ns-1"}.ToServiceIdentity() proxySvcAccount := proxyServiceIdentity.ToK8sServiceAccount() certSerialNumber := certificate.SerialNumber("123456") proxyXDSCertCN := certificate.CommonName(fmt.Sprintf("%s.%s.%s.%s", uuid.New(), envoy.KindSidecar, proxySvcAccount.Name, proxySvcAccount.Namespace)) testProxy, err := envoy.NewProxy(proxyXDSCertCN, certSerialNumber, nil) assert.Nil(err) testCases := []testCase{ { name: "scenario where proxy is both downstream and upstream", proxySvcAccount: proxyServiceIdentity, allowedOutboundServices: []service.MeshService{ {Name: "service-2", Namespace: "ns-2"}, {Name: "service-3", Namespace: "ns-3"}, }, expectedDiscoveryRequest: &xds_discovery.DiscoveryRequest{ TypeUrl: string(envoy.TypeSDS), ResourceNames: []string{ // 1. Proxy's own cert to present to peer during mTLS/TLS handshake "service-cert:ns-1/test-sa", // 2. Outbound validation certs to validate upstreams "root-cert-for-mtls-outbound:ns-2/service-2", "root-cert-for-mtls-outbound:ns-3/service-3", // 3. Inbound validation certs to validate downstreams "root-cert-for-mtls-inbound:ns-1/test-sa", "root-cert-https:ns-1/test-sa", }, }, }, { name: "scenario where proxy is only a downsteam (no service)", proxySvcAccount: proxyServiceIdentity, allowedOutboundServices: []service.MeshService{ {Name: "service-2", Namespace: "ns-2"}, {Name: "service-3", Namespace: "ns-3"}, }, expectedDiscoveryRequest: &xds_discovery.DiscoveryRequest{ TypeUrl: string(envoy.TypeSDS), ResourceNames: []string{ // 1. Proxy's own cert to present to peer during mTLS/TLS handshake "service-cert:ns-1/test-sa", // 2. Outbound validation certs to validate upstreams "root-cert-for-mtls-outbound:ns-2/service-2", "root-cert-for-mtls-outbound:ns-3/service-3", // 3. Inbound validation certs to validate downstreams "root-cert-for-mtls-inbound:ns-1/test-sa", "root-cert-https:ns-1/test-sa", }, }, }, { name: "scenario where proxy does not have allowed upstreams to connect to", proxySvcAccount: proxyServiceIdentity, allowedOutboundServices: nil, expectedDiscoveryRequest: &xds_discovery.DiscoveryRequest{ TypeUrl: string(envoy.TypeSDS), ResourceNames: []string{ // 1. Proxy's own cert to present to peer during mTLS/TLS handshake "service-cert:ns-1/test-sa", // 4. Inbound validation certs to validate downstreams "root-cert-for-mtls-inbound:ns-1/test-sa", "root-cert-https:ns-1/test-sa", }, }, }, { name: "scenario where proxy is both downstream and upstream, with mutiple upstreams on the proxy", proxySvcAccount: proxyServiceIdentity, allowedOutboundServices: []service.MeshService{ {Name: "service-2", Namespace: "ns-2"}, {Name: "service-3", Namespace: "ns-3"}, }, expectedDiscoveryRequest: &xds_discovery.DiscoveryRequest{ TypeUrl: string(envoy.TypeSDS), ResourceNames: []string{ // 1. Proxy's own cert to present to peer during mTLS/TLS handshake "service-cert:ns-1/test-sa", // 2. Outbound validation certs to validate upstreams "root-cert-for-mtls-outbound:ns-2/service-2", "root-cert-for-mtls-outbound:ns-3/service-3", // 4. Inbound validation certs to validate downstreams "root-cert-for-mtls-inbound:ns-1/test-sa", "root-cert-https:ns-1/test-sa", }, }, }, } for i, tc := range testCases { t.Run(fmt.Sprintf("Testing test case %d: %s", i, tc.name), func(t *testing.T) { mockCatalog.EXPECT().ListAllowedOutboundServicesForIdentity(tc.proxySvcAccount).Return(tc.allowedOutboundServices).Times(1) actual := makeRequestForAllSecrets(testProxy, mockCatalog) assert.Equal(tc.expectedDiscoveryRequest.TypeUrl, actual.TypeUrl) assert.ElementsMatch(tc.expectedDiscoveryRequest.ResourceNames, actual.ResourceNames) }) } }
explode_data.jsonl/68829
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1809 }
[ 2830, 3393, 8078, 1900, 2461, 2403, 19773, 82, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 259, 2207, 7121, 1155, 340, 77333, 15001, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 7860, 15001, 991, 18176, 2822, 77333, 41606, 1669, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBuilder_WithNamespaces(t *testing.T) { type fields struct { apiserver string kubeconfig string namespaces koptions.NamespaceList ctx context.Context enabledCollectors []string whiteBlackList whiteBlackLister } type args struct { n koptions.NamespaceList } tests := []struct { name string fields fields args args want *Builder }{ { name: "namespace", fields: fields{ apiserver: "", kubeconfig: "", namespaces: koptions.NamespaceList{}, ctx: ctx, enabledCollectors: []string{"col1", "col2"}, // whiteBlackList: whiteBlackLister{func(s) { return false }, func(s) { return false }}, }, args: args{ n: []string{"ns1"}, }, want: &Builder{ apiserver: "", kubeconfig: "", namespaces: []string{"ns1"}, ctx: ctx, enabledCollectors: []string{"col1", "col2"}, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { b := &Builder{ apiserver: tt.fields.apiserver, kubeconfig: tt.fields.kubeconfig, namespaces: tt.fields.namespaces, ctx: tt.fields.ctx, enabledCollectors: tt.fields.enabledCollectors, whiteBlackList: tt.fields.whiteBlackList, } if got := b.WithNamespaces(tt.args.n); !reflect.DeepEqual(got, tt.want) { t.Errorf("Builder.WithNamespaces() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/20151
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 757 }
[ 2830, 3393, 3297, 62, 2354, 7980, 27338, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 197, 13725, 2836, 260, 914, 198, 197, 16463, 3760, 1676, 286, 914, 198, 197, 93940, 27338, 286, 595, 2875, 46011, 852, 198, 197, 20985...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMapStringByte_ReplaceIfExists(t *testing.T) { Convey("TestMapStringByte.ReplaceIfExists", t, func() { var k string = "0580f468-1ff3-4dfd-8104-378deaa3c06f" var v byte = 14 var x byte = 216 test := omap.NewMapStringByte(1) So(test.Put(k, v), ShouldPointTo, test) So(test.Len(), ShouldEqual, 1) So(test.ReplaceIfExists("cce0b1d7-a584-4011-af34-431cde61fcae", x), ShouldPointTo, test) So(test.Len(), ShouldEqual, 1) So(test.At(0).Val, ShouldEqual, v) So(test.ReplaceIfExists(k, x), ShouldPointTo, test) So(test.Len(), ShouldEqual, 1) So(test.At(0).Val, ShouldEqual, x) }) }
explode_data.jsonl/5064
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 278 }
[ 2830, 3393, 2227, 703, 7153, 62, 23107, 40232, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 2271, 2227, 703, 7153, 20858, 40232, 497, 259, 11, 2915, 368, 341, 197, 2405, 595, 914, 284, 330, 15, 20, 23, 15, 69, 19, 21, 23, 12, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewAppVersion(t *testing.T) { assert := assert.New(t) var appVersion = new(model.AppVersion) appVersion.AppID = 1 appVersion.DbVersion = "3.69" db := model.NewAppVersion(appVersion) if assert.NotNil(db) { assert.Equal(nil, db.Error, "they should be equal") } db = model.DeleteAppVersion(appVersion.AppID) if assert.NotNil(db) { assert.Equal(nil, db.Error, "they should be equal") } }
explode_data.jsonl/53340
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 160 }
[ 2830, 3393, 3564, 2164, 5637, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 2405, 906, 5637, 284, 501, 7635, 5105, 5637, 340, 28236, 5637, 5105, 915, 284, 220, 16, 198, 28236, 5637, 45332, 5637, 284, 330, 18, 13, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestEncryptThenMAC(t *testing.T) { for i := 0; i < 10; i++ { message := random.BytesOrPanic(i * 13) encryptionKey := random.BytesOrPanic(32) authenticationKey := random.BytesOrPanic(32) encryptedBytes, err := crypto.EncryptThenMAC(message, encryptionKey, authenticationKey) assert.NoError(t, err) decryptedBytes, err := crypto.MACThenDecrypt(encryptedBytes, encryptionKey, authenticationKey) assert.NoError(t, err) assert.Equal(t, message, decryptedBytes) } }
explode_data.jsonl/42135
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 169 }
[ 2830, 3393, 61520, 12209, 25788, 1155, 353, 8840, 836, 8, 341, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 16, 15, 26, 600, 1027, 341, 197, 24753, 1669, 4194, 36868, 2195, 47, 31270, 1956, 353, 220, 16, 18, 340, 197, 197, 79937, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIsLeapYear(t *testing.T) { tests := []struct { name string year int expected bool }{ {"343", 343, false}, {"100", 100, false}, {"1700", 1700, false}, {"2100", 2100, false}, {"1600", 1600, true}, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { assert.Equal(t, test.expected, isLeapYear(test.year)) }) } }
explode_data.jsonl/62865
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 165 }
[ 2830, 3393, 3872, 96657, 9490, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 197, 3157, 257, 526, 198, 197, 42400, 1807, 198, 197, 59403, 197, 197, 4913, 18, 19, 18, 497, 220, 18, 19, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBasicWrapPipeline(t *testing.T) { mockOut := &mockOutput{} mockPi := &mockPipe{ ts: make(chan types.Transaction), } procs := 0 newOutput, err := WrapWithPipeline(&procs, mockOut, func(i *int) (types.Pipeline, error) { return nil, errors.New("nope") }) if err == nil { t.Error("expected error from back constructor") } newOutput, err = WrapWithPipeline(&procs, mockOut, func(i *int) (types.Pipeline, error) { return mockPi, nil }) if err != nil { t.Fatal(err) } dudMsgChan := make(chan types.Transaction) if err = newOutput.Consume(dudMsgChan); err != nil { t.Error(err) } if mockPi.tsIn != dudMsgChan { t.Error("Wrong message chan in mock pipe") } if mockOut.ts != mockPi.ts { t.Error("Wrong messages chan in mock pipe") } newOutput.CloseAsync() if err = newOutput.WaitForClose(time.Second); err != nil { t.Error(err) } }
explode_data.jsonl/70128
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 354 }
[ 2830, 3393, 15944, 26787, 34656, 1155, 353, 8840, 836, 8, 341, 77333, 2662, 1669, 609, 16712, 5097, 16094, 77333, 34767, 1669, 609, 16712, 34077, 515, 197, 57441, 25, 1281, 35190, 4494, 29284, 1326, 197, 630, 197, 90087, 1669, 220, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTransformWithSimpleFiltering(t *testing.T) { schema := []byte(` syntax = "proto3"; package test; message A { string name = 1; } message B { string name = 1; } `) input := new(bytes.Buffer) input.Write(schema) output := new(bytes.Buffer) transformer := proto2gql.NewTransformer(output) transformer.SetFilter(func(typeName string) bool { return typeName != "test.B" }) if err := transformer.Transform(input); err != nil { t.Fatal(err) } expected := ` type TestA { name: String } ` expected = strings.TrimSpace(expected) actual := strings.TrimSpace(output.String()) if expected != actual { t.Fatalf("Expected %s to equal to %s", expected, actual) } }
explode_data.jsonl/2078
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 261 }
[ 2830, 3393, 8963, 2354, 16374, 5632, 287, 1155, 353, 8840, 836, 8, 341, 1903, 3416, 1669, 3056, 3782, 61528, 56193, 284, 330, 15110, 18, 876, 1722, 1273, 401, 1994, 362, 341, 262, 914, 829, 284, 220, 16, 26, 715, 630, 1994, 425, 341...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTideContextPolicy_MissingRequiredContexts(t *testing.T) { testCases := []struct { name string skipUnknownContexts bool required, optional []string existingContexts, expectedContexts []string }{ { name: "no contexts registered", existingContexts: []string{"c1", "c2"}, }, { name: "optional contexts registered / no missing contexts", optional: []string{"o1", "o2", "o3"}, existingContexts: []string{"c1", "c2"}, }, { name: "required contexts registered / missing contexts", required: []string{"c1", "c2", "c3"}, existingContexts: []string{"c1", "c2"}, expectedContexts: []string{"c3"}, }, { name: "required contexts registered / no missing contexts", required: []string{"c1", "c2", "c3"}, existingContexts: []string{"c1", "c2", "c3"}, }, { name: "optional and required contexts registered / missing contexts", optional: []string{"o1", "o2", "o3"}, required: []string{"c1", "c2", "c3"}, existingContexts: []string{"c1", "c2"}, expectedContexts: []string{"c3"}, }, { name: "optional and required contexts registered / no missing contexts", optional: []string{"o1", "o2", "o3"}, required: []string{"c1", "c2"}, existingContexts: []string{"c1", "c2", "c4"}, }, } for _, tc := range testCases { cp := TideContextPolicy{ SkipUnknownContexts: &tc.skipUnknownContexts, RequiredContexts: tc.required, OptionalContexts: tc.optional, } missingContexts := cp.MissingRequiredContexts(tc.existingContexts) if !sets.NewString(missingContexts...).Equal(sets.NewString(tc.expectedContexts...)) { t.Errorf("%s - expected %v got %v", tc.name, tc.expectedContexts, missingContexts) } } }
explode_data.jsonl/53863
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 838 }
[ 2830, 3393, 51, 577, 1972, 13825, 1245, 13577, 8164, 1972, 82, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 7561, 914, 198, 197, 1903, 13389, 13790, 1972, 82, 394, 1807, 198, 197, 58183, 11, 10101, 33...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSidecarWithVolume(t *testing.T) { wf := unmarshalWF(sidecarWithVol) cancel, controller := newController(wf) defer cancel() ctx := context.Background() woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) assert.Equal(t, wfv1.WorkflowRunning, woc.wf.Status.Phase) pods, err := listPods(woc) assert.NoError(t, err) assert.True(t, len(pods.Items) > 0, "pod was not created successfully") pod := pods.Items[0] claimVolFound := false existingVolFound := false for _, ctr := range pod.Spec.Containers { if ctr.Name == "sidevol" { for _, vol := range ctr.VolumeMounts { if vol.Name == "claim-vol" { claimVolFound = true } if vol.Name == "existing-vol" { existingVolFound = true } } } } assert.True(t, claimVolFound, "claim vol was not referenced by sidecar") assert.True(t, existingVolFound, "existing vol was not referenced by sidecar") }
explode_data.jsonl/70948
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 356 }
[ 2830, 3393, 16384, 6918, 2354, 18902, 1155, 353, 8840, 836, 8, 341, 6692, 69, 1669, 650, 27121, 32131, 43002, 6918, 2354, 36361, 340, 84441, 11, 6461, 1669, 501, 2051, 3622, 69, 340, 16867, 9121, 2822, 20985, 1669, 2266, 19047, 741, 669...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestNiceMD_Get(t *testing.T) { nmd := metautils.NiceMD(metadata.Pairs(testPairs...)) assert.Equal(t, "uno", nmd.Get("singlekey"), "for present single-key value it should return it") assert.Equal(t, "one", nmd.Get("multikey"), "for present multi-key should return first value") assert.Empty(t, nmd.Get("nokey"), "for non existing key should return stuff") }
explode_data.jsonl/48145
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 128 }
[ 2830, 3393, 44978, 6076, 13614, 1155, 353, 8840, 836, 8, 341, 9038, 2277, 1669, 8823, 6031, 2067, 558, 6076, 54436, 1069, 4720, 8623, 54228, 1112, 1171, 6948, 12808, 1155, 11, 330, 16311, 497, 308, 2277, 2234, 445, 15338, 792, 3975, 330...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIndirectIndex_Entries_NonExistent(t *testing.T) { index := tsm1.NewDirectIndex() index.Add("cpu", time.Unix(0, 0), time.Unix(1, 0), 10, 100) index.Add("cpu", time.Unix(2, 0), time.Unix(3, 0), 20, 200) b, err := index.MarshalBinary() if err != nil { t.Fatalf("unexpected error marshaling index: %v", err) } indirect := tsm1.NewIndirectIndex() if err := indirect.UnmarshalBinary(b); err != nil { t.Fatalf("unexpected error unmarshaling index: %v", err) } // mem has not been added to the index so we should get now entries back // for both exp := index.Entries("mem") entries := indirect.Entries("mem") if got, exp := len(entries), len(exp); got != exp && exp != 0 { t.Fatalf("entries length mismatch: got %v, exp %v", got, exp) } }
explode_data.jsonl/82461
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 289 }
[ 2830, 3393, 1425, 1226, 1552, 62, 24533, 1604, 263, 840, 18128, 1155, 353, 8840, 836, 8, 341, 26327, 1669, 259, 3563, 16, 7121, 16027, 1552, 741, 26327, 1904, 445, 16475, 497, 882, 10616, 941, 7, 15, 11, 220, 15, 701, 882, 10616, 94...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestKubeadmConfigReconciler_Reconcile_ReturnEarlyIfMachineHasDataSecretName(t *testing.T) { g := NewWithT(t) machine := newMachine(nil, "machine") machine.Spec.Bootstrap.DataSecretName = pointer.StringPtr("something") config := newKubeadmConfig(machine, "cfg") objects := []client.Object{ machine, config, } myclient := helpers.NewFakeClientWithScheme(setupScheme(), objects...) k := &KubeadmConfigReconciler{ Client: myclient, } request := ctrl.Request{ NamespacedName: client.ObjectKey{ Namespace: "default", Name: "cfg", }, } result, err := k.Reconcile(ctx, request) g.Expect(err).NotTo(HaveOccurred()) g.Expect(result.Requeue).To(BeFalse()) g.Expect(result.RequeueAfter).To(Equal(time.Duration(0))) }
explode_data.jsonl/44311
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 287 }
[ 2830, 3393, 42, 392, 3149, 76, 2648, 693, 40446, 5769, 50693, 40446, 457, 53316, 41198, 2679, 21605, 10281, 1043, 19773, 675, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1532, 2354, 51, 1155, 692, 2109, 3814, 1669, 501, 21605, 27907, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidMemberRoll(t *testing.T) { validator, _, tracker := createMemberRollValidatorTestFixture(smcp) tracker.AddReactor("create", "subjectaccessreviews", createSubjectAccessReviewReactor(true, true, nil)) roll := newMemberRoll("default", "istio-system", "app-namespace") response := validator.Handle(ctx, createCreateRequest(roll)) assert.True(response.Allowed, "Expected validator to allow ServiceMeshMemberRoll", t) }
explode_data.jsonl/70267
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 130 }
[ 2830, 3393, 4088, 9366, 32355, 1155, 353, 8840, 836, 8, 341, 197, 16112, 11, 8358, 28331, 1669, 1855, 9366, 32355, 14256, 69356, 55280, 4672, 340, 25583, 9683, 1904, 693, 5621, 445, 3182, 497, 330, 11501, 5211, 39475, 497, 1855, 13019, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTopicAttributesCreation(t *testing.T) { capnslog.SetGlobalLogLevel(capnslog.DEBUG) os.Setenv("ROOK_LOG_LEVEL", "DEBUG") falseString := "false" trueString := "true" emptyString := "" t.Run("test HTTP attributes", func(t *testing.T) { uri := "http://localhost" expectedAttrs := map[string]*string{ "OpaqueData": &emptyString, "cloudevents": &falseString, "persistent": &falseString, "push-endpoint": &uri, "verify-ssl": &trueString, } bucketTopic := &cephv1.CephBucketTopic{ ObjectMeta: metav1.ObjectMeta{ Name: name, Namespace: namespace, }, TypeMeta: metav1.TypeMeta{ Kind: "CephBucketTopic", }, Spec: cephv1.BucketTopicSpec{ ObjectStoreName: store, ObjectStoreNamespace: namespace, Endpoint: cephv1.TopicEndpointSpec{ HTTP: &cephv1.HTTPEndpointSpec{ URI: uri, }, }, }, } assert.Equal(t, expectedAttrs, createTopicAttributes(bucketTopic, version.Quincy)) // make sure that non quincy version don't use the cloudevents flag bucketTopic.Spec.Endpoint.HTTP.SendCloudEvents = true delete(expectedAttrs, "cloudevents") assert.Equal(t, expectedAttrs, createTopicAttributes(bucketTopic, version.Pacific)) // make sure that quincy version can use the cloudevents flag expectedAttrs["cloudevents"] = &trueString assert.Equal(t, expectedAttrs, createTopicAttributes(bucketTopic, version.Quincy)) }) t.Run("test AMQP attributes", func(t *testing.T) { uri := "amqp://my-rabbitmq-service:5672/vhost1" ackLevel := "broker" exchange := "ex1" expectedAttrs := map[string]*string{ "OpaqueData": &emptyString, "persistent": &falseString, "push-endpoint": &uri, "verify-ssl": &trueString, "amqp-exchange": &exchange, "amqp-ack-level": &ackLevel, } bucketTopic := &cephv1.CephBucketTopic{ ObjectMeta: metav1.ObjectMeta{ Name: name, Namespace: namespace, }, TypeMeta: metav1.TypeMeta{ Kind: "CephBucketTopic", }, Spec: cephv1.BucketTopicSpec{ ObjectStoreName: store, ObjectStoreNamespace: namespace, Endpoint: cephv1.TopicEndpointSpec{ AMQP: &cephv1.AMQPEndpointSpec{ URI: uri, AckLevel: ackLevel, Exchange: exchange, }, }, }, } assert.Equal(t, expectedAttrs, createTopicAttributes(bucketTopic, version.Quincy)) }) t.Run("test Kafka attributes", func(t *testing.T) { uri := "kafka://my-kafka-service:9092" ackLevel := "broker" expectedAttrs := map[string]*string{ "OpaqueData": &emptyString, "persistent": &falseString, "push-endpoint": &uri, "verify-ssl": &trueString, "kafka-ack-level": &ackLevel, "use-ssl": &trueString, } bucketTopic := &cephv1.CephBucketTopic{ ObjectMeta: metav1.ObjectMeta{ Name: name, Namespace: namespace, }, TypeMeta: metav1.TypeMeta{ Kind: "CephBucketTopic", }, Spec: cephv1.BucketTopicSpec{ ObjectStoreName: store, ObjectStoreNamespace: namespace, Endpoint: cephv1.TopicEndpointSpec{ Kafka: &cephv1.KafkaEndpointSpec{ URI: uri, AckLevel: ackLevel, UseSSL: true, }, }, }, } assert.Equal(t, expectedAttrs, createTopicAttributes(bucketTopic, version.Quincy)) }) }
explode_data.jsonl/69710
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1490 }
[ 2830, 3393, 26406, 10516, 32701, 1155, 353, 8840, 836, 8, 341, 1444, 391, 4412, 839, 4202, 11646, 72676, 51386, 4412, 839, 38136, 340, 25078, 4202, 3160, 445, 1285, 3925, 8419, 17415, 497, 330, 5150, 1138, 36012, 703, 1669, 330, 3849, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMustParseAddressPanic(t *testing.T) { panicExamples := []struct{ address, scheme string }{ {"1.2.3.4", ""}, {"1.2.3.4", "https"}, } for i, panicExample := range panicExamples { t.Run(strconv.Itoa(i), func(t *testing.T) { defer func() { if r := recover(); r == nil { t.Fatal("expected panic") } }() mustParseAddress(panicExample.address, panicExample.scheme) }) } }
explode_data.jsonl/72009
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 176 }
[ 2830, 3393, 31776, 14463, 4286, 47, 31270, 1155, 353, 8840, 836, 8, 341, 30764, 40381, 1669, 3056, 1235, 90, 2621, 11, 12859, 914, 335, 515, 197, 197, 4913, 16, 13, 17, 13, 18, 13, 19, 497, 77496, 197, 197, 4913, 16, 13, 17, 13, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMatFromBytesWithEmptyByteSlise(t *testing.T) { _, err := NewMatFromBytes(600, 800, MatTypeCV8U, []byte{}) if err == nil { t.Error("TestMatFromBytesWithEmptyByteSlise: " + "must fail because of an empty byte slise") } if !strings.Contains(err.Error(), ErrEmptyByteSlice.Error()) { t.Errorf("TestMatFromBytesWithEmptyByteSlise: "+ "error must contain the following description: "+ "%v, but have: %v", ErrEmptyByteSlice, err) } }
explode_data.jsonl/81685
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 168 }
[ 2830, 3393, 11575, 3830, 7078, 2354, 3522, 7153, 7442, 1064, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 1532, 11575, 3830, 7078, 7, 21, 15, 15, 11, 220, 23, 15, 15, 11, 6867, 929, 19589, 23, 52, 11, 3056, 3782, 37790, 74...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_PanicHandler(t *testing.T) { app := New(Options{}) app.GET("/string", func(c Context) error { panic("string boom") }) app.GET("/error", func(c Context) error { panic(fmt.Errorf("error boom")) }) table := []struct { path string expected string }{ {"/string", "string boom"}, {"/error", "error boom"}, } const stack = `github.com/gobuffalo/buffalo.Test_PanicHandler` w := httptest.New(app) for _, tt := range table { t.Run(tt.path, func(st *testing.T) { r := require.New(st) res := w.HTML(tt.path).Get() r.Equal(500, res.Code) body := res.Body.String() r.Contains(body, tt.expected) r.Contains(body, stack) }) } }
explode_data.jsonl/82183
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 291 }
[ 2830, 3393, 1088, 31270, 3050, 1155, 353, 8840, 836, 8, 341, 28236, 1669, 1532, 7, 3798, 37790, 28236, 17410, 4283, 917, 497, 2915, 1337, 9608, 8, 1465, 341, 197, 30764, 445, 917, 29745, 1138, 197, 3518, 28236, 17410, 4283, 841, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestVerifyVoteExtensionNotCalledOnAbsentPrecommit(t *testing.T) { config := configSetup(t) ctx, cancel := context.WithCancel(context.Background()) defer cancel() m := abcimocks.NewApplication(t) m.On("ProcessProposal", mock.Anything, mock.Anything).Return(&abci.ResponseProcessProposal{Status: abci.ResponseProcessProposal_ACCEPT}, nil) m.On("PrepareProposal", mock.Anything, mock.Anything).Return(&abci.ResponsePrepareProposal{}, nil) m.On("ExtendVote", mock.Anything, mock.Anything).Return(&abci.ResponseExtendVote{ VoteExtension: []byte("extension"), }, nil) m.On("VerifyVoteExtension", mock.Anything, mock.Anything).Return(&abci.ResponseVerifyVoteExtension{ Status: abci.ResponseVerifyVoteExtension_ACCEPT, }, nil) m.On("FinalizeBlock", mock.Anything, mock.Anything).Return(&abci.ResponseFinalizeBlock{}, nil).Maybe() cs1, vss := makeState(ctx, t, makeStateArgs{config: config, application: m}) height, round := cs1.Height, cs1.Round cs1.state.ConsensusParams.ABCI.VoteExtensionsEnableHeight = cs1.Height proposalCh := subscribe(ctx, t, cs1.eventBus, types.EventQueryCompleteProposal) newRoundCh := subscribe(ctx, t, cs1.eventBus, types.EventQueryNewRound) pv1, err := cs1.privValidator.GetPubKey(ctx) require.NoError(t, err) addr := pv1.Address() voteCh := subscribeToVoter(ctx, t, cs1, addr) startTestRound(ctx, cs1, cs1.Height, round) ensureNewRound(t, newRoundCh, height, round) ensureNewProposal(t, proposalCh, height, round) rs := cs1.GetRoundState() blockID := types.BlockID{ Hash: rs.ProposalBlock.Hash(), PartSetHeader: rs.ProposalBlockParts.Header(), } signAddVotes(ctx, t, cs1, tmproto.PrevoteType, config.ChainID(), blockID, vss...) ensurePrevoteMatch(t, voteCh, height, round, blockID.Hash) ensurePrecommit(t, voteCh, height, round) m.AssertCalled(t, "ExtendVote", mock.Anything, &abci.RequestExtendVote{ Height: height, Hash: blockID.Hash, }) m.On("Commit", mock.Anything).Return(&abci.ResponseCommit{}, nil).Maybe() signAddVotes(ctx, t, cs1, tmproto.PrecommitType, config.ChainID(), blockID, vss[2:]...) ensureNewRound(t, newRoundCh, height+1, 0) m.AssertExpectations(t) // vss[1] did not issue a precommit for the block, ensure that a vote extension // for its address was not sent to the application. pv, err := vss[1].GetPubKey(ctx) require.NoError(t, err) addr = pv.Address() m.AssertNotCalled(t, "VerifyVoteExtension", ctx, &abci.RequestVerifyVoteExtension{ Hash: blockID.Hash, ValidatorAddress: addr, Height: height, VoteExtension: []byte("extension"), }) }
explode_data.jsonl/54281
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 966 }
[ 2830, 3393, 32627, 41412, 12049, 2623, 20960, 1925, 80251, 4703, 17413, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 2193, 21821, 1155, 340, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 16867, 9121, 2822, 2109, 1669, 39022, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGenerateStorageAccountName(t *testing.T) { tests := []struct { prefix string }{ { prefix: "", }, { prefix: "pvc", }, { prefix: "1234512345123451234512345", }, } for _, test := range tests { accountName := generateStorageAccountName(test.prefix) if len(accountName) > storageAccountNameMaxLength || len(accountName) < 3 { t.Errorf("input prefix: %s, output account name: %s, length not in [3,%d]", test.prefix, accountName, storageAccountNameMaxLength) } for _, char := range accountName { if (char < 'a' || char > 'z') && (char < '0' || char > '9') { t.Errorf("input prefix: %s, output account name: %s, there is non-digit or non-letter(%q)", test.prefix, accountName, char) break } } } }
explode_data.jsonl/7452
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 299 }
[ 2830, 3393, 31115, 5793, 7365, 675, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 3223, 5060, 914, 198, 197, 59403, 197, 197, 515, 298, 3223, 5060, 25, 8324, 197, 197, 1583, 197, 197, 515, 298, 3223, 5060, 25, 330...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestVerifyChallengeTxThreshold_disallowsAdditionalOpsOfOtherTypes(t *testing.T) { serverKP := newKeypair0() clientKP := newKeypair1() txSource := NewSimpleAccount(serverKP.Address(), -1) op1 := ManageData{ SourceAccount: clientKP.Address(), Name: "testanchor.stellar.org auth", Value: []byte(base64.StdEncoding.EncodeToString(make([]byte, 48))), } op2 := BumpSequence{ SourceAccount: txSource.AccountID, BumpTo: 0, } webAuthDomainOp := ManageData{ SourceAccount: serverKP.Address(), Name: "web_auth_domain", Value: []byte("testwebauth.stellar.org"), } tx64, err := newSignedTransaction( TransactionParams{ SourceAccount: &txSource, IncrementSequenceNum: true, Operations: []Operation{&op1, &op2, &webAuthDomainOp}, BaseFee: MinBaseFee, Timebounds: NewTimeout(1000), }, network.TestNetworkPassphrase, serverKP, clientKP, ) assert.NoError(t, err) threshold := Threshold(1) signerSummary := SignerSummary{ clientKP.Address(): 1, } signersFound, err := VerifyChallengeTxThreshold(tx64, serverKP.Address(), network.TestNetworkPassphrase, "testwebauth.stellar.org", []string{"testanchor.stellar.org"}, threshold, signerSummary) assert.Empty(t, signersFound) assert.EqualError(t, err, "operation type should be manage_data") }
explode_data.jsonl/20741
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 561 }
[ 2830, 3393, 32627, 62078, 31584, 37841, 9932, 55867, 29019, 38904, 2124, 11409, 4173, 1155, 353, 8840, 836, 8, 341, 41057, 65036, 1669, 501, 6608, 1082, 1310, 15, 741, 25291, 65036, 1669, 501, 6608, 1082, 1310, 16, 741, 46237, 3608, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseSwitchStatementSingleCase(t *testing.T) { p := createParser(`switch x { case 5{}}`) bvmUtils.Assert(t, isSwitchStatement(p), "should detect switch statement") parseSwitchStatement(p) }
explode_data.jsonl/49720
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 67 }
[ 2830, 3393, 14463, 16837, 8636, 10888, 4207, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 1855, 6570, 5809, 17338, 856, 314, 1142, 220, 20, 90, 3417, 24183, 2233, 7338, 4209, 11711, 1155, 11, 374, 16837, 8636, 1295, 701, 330, 5445, 11140, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestForward(t *testing.T) { rs := NewRuleSet([]Rule{ForwardRule}) result, rule := rs.Apply(&cb.Envelope{}) if result != Forward { t.Fatalf("Should have forwarded") } if rule != nil { t.Fatalf("Forwarded but rule is set") } }
explode_data.jsonl/11106
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 91 }
[ 2830, 3393, 25925, 1155, 353, 8840, 836, 8, 341, 41231, 1669, 1532, 11337, 1649, 10556, 11337, 90, 25925, 11337, 3518, 9559, 11, 5912, 1669, 10036, 36051, 2099, 7221, 22834, 18853, 37790, 743, 1102, 961, 22164, 341, 197, 3244, 30762, 445,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTxnRestartCount(t *testing.T) { defer leaktest.AfterTest(t)() defer log.Scope(t).Close(t) readKey := []byte("read") writeKey := []byte("write") value := []byte("value") ctx := context.Background() s, metrics, cleanupFn := setupMetricsTest(t) defer cleanupFn() // Start a transaction and read a key that we're going to modify outside the // txn. This ensures that refreshing the txn will not succeed, so a restart // will be necessary. txn := kv.NewTxn(ctx, s.DB, 0 /* gatewayNodeID */) if _, err := txn.Get(ctx, readKey); err != nil { t.Fatal(err) } // Write the read key outside of the transaction, at a higher timestamp, which // will necessitate a txn restart when the original read key span is updated. if err := s.DB.Put(ctx, readKey, value); err != nil { t.Fatal(err) } // Outside of the transaction, read the same key as will be // written within the transaction. This means that future // attempts to write will forward the txn timestamp. if _, err := s.DB.Get(ctx, writeKey); err != nil { t.Fatal(err) } // This put will lay down an intent, txn write timestamp will increase beyond // the read timestamp. if err := txn.Put(ctx, writeKey, value); err != nil { t.Fatal(err) } proto := txn.TestingCloneTxn() if proto.WriteTimestamp.LessEq(proto.ReadTimestamp) { t.Errorf("expected timestamp to increase: %s", proto) } // Wait for heartbeat to start. tc := txn.Sender().(*TxnCoordSender) testutils.SucceedsSoon(t, func() error { if !tc.IsTracking() { return errors.New("expected heartbeat to start") } return nil }) // Commit (should cause restart metric to increase). err := txn.CommitOrCleanup(ctx) assertTransactionRetryError(t, err) checkTxnMetrics(t, metrics, "restart txn", 0, 0, 1 /* aborts */, 1 /* restarts */) }
explode_data.jsonl/76896
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 627 }
[ 2830, 3393, 31584, 77, 59354, 2507, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 16867, 1487, 77940, 1155, 568, 7925, 1155, 692, 37043, 1592, 1669, 3056, 3782, 445, 878, 1138, 24945, 1592, 1669, 3056, 378...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestServer(t *testing.T) { port := "7275" name := "test" items := []string{"apple", "banana", "cherry", "date", "elderberry", "fig"} server := gs.NewServer(port) ech := make(chan error) go func() { defer close(ech) if err := server.ListenAndServe(); err != nil { ech <- err } }() select { case err := <-ech: t.Fatal(err) default: } // wait for port alloc try := 1 for { conn, err := net.DialTimeout("tcp", ":"+port, 100*time.Millisecond) if try > 3 { t.Fatalf("Failed to start RPC server: %s", err) } if conn != nil { conn.Close() break } time.Sleep(100 * time.Millisecond) try++ } client, err := rpc.Dial("tcp", ":"+port) if err != nil { t.Fatal(err) } defer client.Close() req := &gh.Request{Name: name} resp := &gh.Response{} t.Run("Create", func(t *testing.T) { if err = client.Call("goldberg.Create", req, resp); err != nil { t.Error(err) } expect := "ok" if resp.Message != expect { t.Errorf("Expected response %q, got %q", expect, resp.Message) } }) t.Run("Enqueue", func(t *testing.T) { for i, item := range items { r := &gh.Request{Name: name, Item: item} if err = client.Call("goldberg.PushBack", r, resp); err != nil { t.Error(err) } expect := "ok" if resp.Message != expect { t.Errorf("Expected response %q, got %q", expect, resp.Message) } if err = client.Call("goldberg.Len", req, resp); err != nil { t.Error(err) } expect = strconv.Itoa(i + 1) if resp.Message != expect { t.Errorf("Expected response %q, got %q", expect, resp.Message) } } }) t.Run("Dequeue", func(t *testing.T) { for i, item := range items { if err = client.Call("goldberg.PopFront", req, resp); err != nil { t.Error(err) } if resp.Message != item { t.Errorf("Expected response %q, got %q", item, resp.Message) } if err = client.Call("goldberg.Len", req, resp); err != nil { t.Error(err) } expect := strconv.Itoa(len(items) - i - 1) if resp.Message != expect { t.Errorf("Expected response %q, got %q", expect, resp.Message) } } }) t.Run("Empty", func(t *testing.T) { if err = client.Call("goldberg.Len", req, resp); err != nil { t.Error(err) } expect := "0" if resp.Message != expect { t.Errorf("Expected response %q, got %q", expect, resp.Message) } }) }
explode_data.jsonl/73207
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1028 }
[ 2830, 3393, 5475, 1155, 353, 8840, 836, 8, 341, 52257, 1669, 330, 22, 17, 22, 20, 698, 11609, 1669, 330, 1944, 698, 46413, 1669, 3056, 917, 4913, 22377, 497, 330, 87747, 497, 330, 331, 5400, 497, 330, 1028, 497, 330, 66976, 15357, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStripSlashesWithNilContext(t *testing.T) { r := http.NewServeMux() r.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { w.Write([]byte("root")) }) r.HandleFunc("/accounts", func(w http.ResponseWriter, r *http.Request) { w.Write([]byte("accounts")) }) r.HandleFunc("/accounts/admin", func(w http.ResponseWriter, r *http.Request) { w.Write([]byte("admin")) }) ts := httptest.NewServer(StripSlashes(r)) defer ts.Close() if _, resp := testRequest(t, ts, "GET", "/", nil); resp != "root" { t.Fatalf(resp) } if _, resp := testRequest(t, ts, "GET", "//", nil); resp != "root" { t.Fatalf(resp) } if _, resp := testRequest(t, ts, "GET", "/accounts", nil); resp != "accounts" { t.Fatalf(resp) } if _, resp := testRequest(t, ts, "GET", "/accounts/", nil); resp != "accounts" { t.Fatalf(resp) } if _, resp := testRequest(t, ts, "GET", "/accounts/admin", nil); resp != "admin" { t.Fatalf(resp) } if _, resp := testRequest(t, ts, "GET", "/accounts/admin/", nil); resp != "admin" { t.Fatalf(resp) } }
explode_data.jsonl/20409
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 442 }
[ 2830, 3393, 5901, 7442, 14051, 2354, 19064, 1972, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1758, 7121, 60421, 44, 2200, 2822, 7000, 63623, 35460, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 6692, 4073, 10556, 3782, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidation(t *testing.T) { a := NewArgs() if err := a.validate(); err != nil { t.Errorf("Expecting to validate but failed with: %v", err) } a.AdapterWorkerPoolSize = -1 if err := a.validate(); err == nil { t.Errorf("Got unexpected success") } a = NewArgs() a.APIWorkerPoolSize = -1 if err := a.validate(); err == nil { t.Errorf("Got unexpected success") } a = NewArgs() a.ExpressionEvalCacheSize = -1 if err := a.validate(); err == nil { t.Errorf("Got unexpected success") } }
explode_data.jsonl/24308
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 197 }
[ 2830, 3393, 13799, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 1532, 4117, 2822, 743, 1848, 1669, 264, 19520, 2129, 1848, 961, 2092, 341, 197, 3244, 13080, 445, 17536, 287, 311, 9593, 714, 4641, 448, 25, 1018, 85, 497, 1848, 340, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestWaitForDeployment_timeout(t *testing.T) { handler := func(w http.ResponseWriter, r *http.Request) { service := models.Service{ Deployments: []models.Deployment{ {DesiredCount: 1, RunningCount: 0}, }, } MarshalAndWrite(t, w, service, 200) } client, server := newClientAndServer(handler) defer server.Close() if _, err := client.WaitForDeployment("id", time.Millisecond); err == nil { t.Fatal("Error was nil!") } }
explode_data.jsonl/24186
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 167 }
[ 2830, 3393, 14190, 2461, 75286, 20537, 1155, 353, 8840, 836, 8, 341, 53326, 1669, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 52934, 1669, 4119, 13860, 515, 298, 197, 69464, 1368, 25, 3056, 6507, 34848, 39130, 515, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPKCEReuqest(t *testing.T) { t.Parallel() for k, m := range fositeStores { t.Run(fmt.Sprintf("case=%s", k), TestHelperCreateGetDeletePKCERequestSession(m)) } }
explode_data.jsonl/54420
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 75 }
[ 2830, 3393, 22242, 34, 640, 20128, 80, 477, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 2023, 595, 11, 296, 1669, 2088, 48390, 632, 69026, 341, 197, 3244, 16708, 28197, 17305, 445, 5638, 7846, 82, 497, 595, 701, 3393, 5511, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestRangeRemoval(t *testing.T) { bm := NewBitmap() bm.Add(1) bm.AddRange(21, 26) bm.AddRange(9, 14) bm.RemoveRange(11, 16) bm.RemoveRange(1, 26) c := bm.GetCardinality() assert.EqualValues(t, 0, c) bm.AddRange(1, 10000) c = bm.GetCardinality() assert.EqualValues(t, 10000-1, c) bm.RemoveRange(1, 10000) c = bm.GetCardinality() assert.EqualValues(t, 0, c) }
explode_data.jsonl/20332
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 177 }
[ 2830, 3393, 6046, 6590, 13516, 1155, 353, 8840, 836, 8, 341, 2233, 76, 1669, 1532, 16773, 741, 2233, 76, 1904, 7, 16, 340, 2233, 76, 19672, 7, 17, 16, 11, 220, 17, 21, 340, 2233, 76, 19672, 7, 24, 11, 220, 16, 19, 340, 2233, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestList(t *testing.T) { c := aetesting.FakeSingleContext(t, "modules", "GetModules", func(req *pb.GetModulesRequest, res *pb.GetModulesResponse) error { res.Module = []string{"default", "mod1"} return nil }) got, err := List(c) if err != nil { t.Fatalf("List: %v", err) } want := []string{"default", "mod1"} if !reflect.DeepEqual(got, want) { t.Errorf("List = %v, want %v", got, want) } }
explode_data.jsonl/54562
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 181 }
[ 2830, 3393, 852, 1155, 353, 8840, 836, 8, 972, 1444, 1669, 264, 57824, 287, 991, 726, 10888, 1972, 1155, 11, 330, 11525, 497, 330, 1949, 28201, 497, 2915, 6881, 353, 16650, 2234, 28201, 1900, 11, 592, 353, 16650, 2234, 28201, 2582, 8,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetTextErrorNotice(t *testing.T) { readFile = func(name string) ([]byte, error) { if name == "LICENSE" { return nil, nil } return nil, errors.New("test error") } provider := licenseProvider{} _, err := provider.GetText() if err == nil { t.Error("Expected error but was nil") } }
explode_data.jsonl/56706
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 117 }
[ 2830, 3393, 1949, 1178, 1454, 34193, 1155, 353, 8840, 836, 8, 341, 37043, 1703, 284, 2915, 3153, 914, 8, 34923, 3782, 11, 1465, 8, 341, 197, 743, 829, 621, 330, 64368, 1, 341, 298, 853, 2092, 11, 2092, 198, 197, 197, 532, 197, 853...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCreate(t *testing.T) { storage, _, server := newStorage(t) defer server.Terminate(t) defer storage.Store.DestroyFunc() test := genericregistrytest.New(t, storage.Store) ingress := validIngress() noDefaultBackendAndRules := validIngress() noDefaultBackendAndRules.Spec.Backend = &networking.IngressBackend{} noDefaultBackendAndRules.Spec.Rules = []networking.IngressRule{} badPath := validIngress() badPath.Spec.Rules = toIngressRules(map[string]IngressRuleValues{ "foo.bar.com": {"/invalid[": "svc"}}) test.TestCreate( // valid ingress, noDefaultBackendAndRules, badPath, ) }
explode_data.jsonl/47441
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 4021, 1155, 353, 8840, 836, 8, 341, 197, 16172, 11, 8358, 3538, 1669, 501, 5793, 1155, 340, 16867, 3538, 836, 261, 34016, 1155, 340, 16867, 5819, 38047, 57011, 9626, 741, 18185, 1669, 13954, 29172, 1944, 7121, 1155, 11, 5819...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetAllUsers(t *testing.T) { system := getSystem() assert.NotNil(t, system) allUsers, err := system.GetUser() assert.Nil(t, err) assert.NotZero(t, len(allUsers)) }
explode_data.jsonl/75169
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 71 }
[ 2830, 3393, 1949, 2403, 7137, 1155, 353, 8840, 836, 8, 341, 40293, 1669, 633, 2320, 741, 6948, 93882, 1155, 11, 1849, 692, 50960, 7137, 11, 1848, 1669, 1849, 78179, 741, 6948, 59678, 1155, 11, 1848, 340, 6948, 15000, 17999, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1