text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestPassthroughTraffic(t *testing.T) { calls := map[string]simulation.Call{} for port := 80; port < 87; port++ { for _, call := range []simulation.Call{ {Port: port, Protocol: simulation.HTTP, TLS: simulation.Plaintext, HostHeader: "foo"}, {Port: port, Protocol: simulation.HTTP, TLS: simulation.TLS, HostHeader: "foo"}, {Port: port, Protocol: simulation.HTTP, TLS: simulation.TLS, HostHeader: "foo", Alpn: "http/1.1"}, {Port: port, Protocol: simulation.TCP, TLS: simulation.Plaintext, HostHeader: "foo"}, {Port: port, Protocol: simulation.HTTP2, TLS: simulation.TLS, HostHeader: "foo"}, } { suffix := "" if call.Alpn != "" { suffix = "-" + call.Alpn } calls[fmt.Sprintf("%v-%v-%v%v", call.Protocol, call.TLS, port, suffix)] = call } } ports := ` ports: - name: http number: 80 protocol: HTTP - name: auto number: 81 - name: tcp number: 82 protocol: TCP - name: tls number: 83 protocol: TLS - name: https number: 84 protocol: HTTPS - name: grpc number: 85 protocol: GRPC - name: h2 number: 86 protocol: HTTP2` // TODO: https://github.com/istio/istio/issues/26079 this should be empty list expectedFailures := sets.NewSet( "http-tls-80-http/1.1", "http-tls-85-http/1.1", "http-tls-86-http/1.1", ) withoutVipExpectedFailures := sets.NewSet( "http-tls-80-http/1.1", "http-tls-81-http/1.1", "http-tls-85-http/1.1", "http-tls-86-http/1.1", ) isHTTPPort := func(p int) bool { switch p { case 80, 85, 86: return true default: return false } } isAutoPort := func(p int) bool { switch p { case 81: return true default: return false } } for _, tp := range []meshconfig.MeshConfig_OutboundTrafficPolicy_Mode{ meshconfig.MeshConfig_OutboundTrafficPolicy_REGISTRY_ONLY, meshconfig.MeshConfig_OutboundTrafficPolicy_ALLOW_ANY, } { t.Run(tp.String(), func(t *testing.T) { o := xds.FakeOptions{ MeshConfig: func() *meshconfig.MeshConfig { m := mesh.DefaultMeshConfig() m.OutboundTrafficPolicy.Mode = tp return &m }(), } expectedCluster := map[meshconfig.MeshConfig_OutboundTrafficPolicy_Mode]string{ meshconfig.MeshConfig_OutboundTrafficPolicy_REGISTRY_ONLY: util.BlackHoleCluster, meshconfig.MeshConfig_OutboundTrafficPolicy_ALLOW_ANY: util.PassthroughCluster, }[tp] t.Run("with VIP", func(t *testing.T) { testCalls := []simulation.Expect{} for name, call := range calls { e := simulation.Expect{ Name: name, Call: call, Result: simulation.Result{ ClusterMatched: expectedCluster, }, } // For blackhole, we will 502 where possible instead of blackhole cluster // This only works for HTTP on HTTP if expectedCluster == util.BlackHoleCluster && call.IsHTTP() && isHTTPPort(call.Port) { e.Result.ClusterMatched = "" e.Result.VirtualHostMatched = util.BlackHole } if expectedFailures.Contains(name) { e.Result.Error = simulation.ErrProtocolError e.Result.ClusterMatched = "" } testCalls = append(testCalls, e) } sort.Slice(testCalls, func(i, j int) bool { return testCalls[i].Name < testCalls[j].Name }) runSimulationTest(t, nil, o, simulationTest{ config: ` apiVersion: networking.istio.io/v1alpha3 kind: ServiceEntry metadata: name: se spec: hosts: - istio.io addresses: [1.2.3.4] location: MESH_EXTERNAL resolution: DNS` + ports, calls: testCalls, }) }) t.Run("without VIP", func(t *testing.T) { testCalls := []simulation.Expect{} for name, call := range calls { e := simulation.Expect{ Name: name, Call: call, Result: simulation.Result{ ClusterMatched: expectedCluster, }, } // For blackhole, we will 502 where possible instead of blackhole cluster // This only works for HTTP on HTTP if expectedCluster == util.BlackHoleCluster && call.IsHTTP() && (isHTTPPort(call.Port) || isAutoPort(call.Port)) { e.Result.ClusterMatched = "" e.Result.VirtualHostMatched = util.BlackHole } // TCP without a VIP will capture everything. // Auto without a VIP is similar, but HTTP happens to work because routing is done on header if call.Port == 82 || (call.Port == 81 && !call.IsHTTP()) { e.Result.Error = nil e.Result.ClusterMatched = "" } if withoutVipExpectedFailures.Contains(name) { e.Result.Error = simulation.ErrProtocolError e.Result.ClusterMatched = "" } testCalls = append(testCalls, e) } sort.Slice(testCalls, func(i, j int) bool { return testCalls[i].Name < testCalls[j].Name }) runSimulationTest(t, nil, o, simulationTest{ config: ` apiVersion: networking.istio.io/v1alpha3 kind: ServiceEntry metadata: name: se spec: hosts: - istio.io location: MESH_EXTERNAL resolution: DNS` + ports, calls: testCalls, }) }) }) } }
explode_data.jsonl/7967
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2163 }
[ 2830, 3393, 70911, 86901, 87229, 1155, 353, 8840, 836, 8, 341, 1444, 5583, 1669, 2415, 14032, 60, 83772, 27017, 16094, 2023, 2635, 1669, 220, 23, 15, 26, 2635, 366, 220, 23, 22, 26, 2635, 1027, 341, 197, 2023, 8358, 1618, 1669, 2088, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAutomountIsBackwardsCompatible(t *testing.T) { ns := "myns" tokenName := "token-name" serviceAccountName := DefaultServiceAccountName serviceAccountUID := "12345" defaultTokenName := "default-token-abc123" expectedVolume := api.Volume{ Name: defaultTokenName, VolumeSource: api.VolumeSource{ Secret: &api.SecretVolumeSource{ SecretName: defaultTokenName, }, }, } expectedVolumeMount := api.VolumeMount{ Name: defaultTokenName, ReadOnly: true, MountPath: DefaultAPITokenMountPath, } admit := NewServiceAccount() admit.generateName = testGenerateName admit.featureGate = deprecationEnabledFeature informerFactory := informers.NewSharedInformerFactory(nil, controller.NoResyncPeriodFunc()) admit.SetExternalKubeInformerFactory(informerFactory) admit.MountServiceAccountToken = true admit.RequireAPIToken = true // Add the default service account for the ns with a token into the cache informerFactory.Core().V1().ServiceAccounts().Informer().GetStore().Add(&corev1.ServiceAccount{ ObjectMeta: metav1.ObjectMeta{ Name: serviceAccountName, Namespace: ns, UID: types.UID(serviceAccountUID), }, Secrets: []corev1.ObjectReference{ {Name: tokenName}, }, }) // Add a token for the service account into the cache informerFactory.Core().V1().Secrets().Informer().GetStore().Add(&corev1.Secret{ ObjectMeta: metav1.ObjectMeta{ Name: tokenName, Namespace: ns, Annotations: map[string]string{ corev1.ServiceAccountNameKey: serviceAccountName, corev1.ServiceAccountUIDKey: serviceAccountUID, }, }, Type: corev1.SecretTypeServiceAccountToken, Data: map[string][]byte{ api.ServiceAccountTokenKey: []byte("token-data"), }, }) pod := &api.Pod{ Spec: api.PodSpec{ Containers: []api.Container{ { Name: "c-1", VolumeMounts: []api.VolumeMount{ { Name: defaultTokenName, MountPath: DefaultAPITokenMountPath, ReadOnly: true, }, }, }, }, Volumes: []api.Volume{ { Name: defaultTokenName, VolumeSource: api.VolumeSource{ Secret: &api.SecretVolumeSource{ SecretName: defaultTokenName, }, }, }, }, }, } attrs := admission.NewAttributesRecord(pod, nil, api.Kind("Pod").WithVersion("version"), ns, "myname", api.Resource("pods").WithVersion("version"), "", admission.Create, false, nil) err := admit.Admit(attrs) if err != nil { t.Errorf("Unexpected error: %v", err) } if pod.Spec.ServiceAccountName != DefaultServiceAccountName { t.Errorf("Expected service account %s assigned, got %s", DefaultServiceAccountName, pod.Spec.ServiceAccountName) } _ = expectedVolume _ = expectedVolumeMount if len(pod.Spec.Volumes) != 1 { t.Fatalf("Expected 1 volume, got %d", len(pod.Spec.Volumes)) } if !reflect.DeepEqual(expectedVolume, pod.Spec.Volumes[0]) { t.Fatalf("Expected\n\t%#v\ngot\n\t%#v", expectedVolume, pod.Spec.Volumes[0]) } if len(pod.Spec.Containers[0].VolumeMounts) != 1 { t.Fatalf("Expected 1 volume mount, got %d", len(pod.Spec.Containers[0].VolumeMounts)) } if !reflect.DeepEqual(expectedVolumeMount, pod.Spec.Containers[0].VolumeMounts[0]) { t.Fatalf("Expected\n\t%#v\ngot\n\t%#v", expectedVolumeMount, pod.Spec.Containers[0].VolumeMounts[0]) } }
explode_data.jsonl/61358
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1281 }
[ 2830, 3393, 41072, 629, 3872, 3707, 4014, 29161, 1155, 353, 8840, 836, 8, 341, 84041, 1669, 330, 76, 1872, 82, 698, 43947, 675, 1669, 330, 5839, 11494, 698, 52934, 7365, 675, 1669, 7899, 1860, 7365, 675, 198, 52934, 7365, 6463, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestStress(t *testing.T) { dur := 3 * time.Second if testing.Short() { dur = 100 * time.Millisecond } defer runtime.GOMAXPROCS(runtime.GOMAXPROCS(4)) done := make(chan bool) finished := make(chan bool) go func() { sig := make(chan os.Signal, 1) Notify(sig, syscall.SIGUSR1) defer Stop(sig) Loop: for { select { case <-sig: case <-done: break Loop } } finished <- true }() go func() { Loop: for { select { case <-done: break Loop default: syscall.Kill(syscall.Getpid(), syscall.SIGUSR1) runtime.Gosched() } } finished <- true }() time.Sleep(dur) close(done) <-finished <-finished // When run with 'go test -cpu=1,2,4' SIGUSR1 from this test can slip // into subsequent TestSignal() causing failure. // Sleep for a while to reduce the possibility of the failure. time.Sleep(10 * time.Millisecond) }
explode_data.jsonl/46883
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 390 }
[ 2830, 3393, 623, 673, 1155, 353, 8840, 836, 8, 341, 2698, 324, 1669, 220, 18, 353, 882, 32435, 198, 743, 7497, 55958, 368, 341, 197, 2698, 324, 284, 220, 16, 15, 15, 353, 882, 71482, 198, 197, 532, 16867, 15592, 1224, 1898, 2954, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAtoi32(t *testing.T) { for i := range atoi32tests { test := &atoi32tests[i] out, ok := runtime.Atoi32(test.in) if test.out != out || test.ok != ok { t.Errorf("atoi32(%q) = (%v, %v) want (%v, %v)", test.in, out, ok, test.out, test.ok) } } }
explode_data.jsonl/79326
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 132 }
[ 2830, 3393, 32, 52609, 18, 17, 1155, 353, 8840, 836, 8, 341, 2023, 600, 1669, 2088, 25227, 18, 17, 23841, 341, 197, 18185, 1669, 609, 51071, 18, 17, 23841, 989, 921, 197, 13967, 11, 5394, 1669, 15592, 67107, 18, 17, 8623, 1858, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRepository_GetChecks_RepositoriesServiceError(t *testing.T) { githubErr := errors.New("github error") mocksChecksService := new(mocks.ChecksService) mocksChecksService. On("ListCheckRunsForRef", Anything, AnythingOfType("string"), AnythingOfType("string"), AnythingOfType("string"), Anything). Return(&github.ListCheckRunsResults{}, nil, nil) mocksRepositoriesService := new(mocks.RepositoriesService) mocksRepositoriesService. On("ListStatuses", Anything, AnythingOfType("string"), AnythingOfType("string"), AnythingOfType("string"), Anything). Return(nil, nil, githubErr) repository := initRepository(t) if repository != nil { repository.checksService = mocksChecksService repository.repositoriesService = mocksRepositoriesService _, err := repository.GetChecks("test", "test", "master") if assert.Error(t, err) { assert.Contains(t, err.Error(), "github error") mocksChecksService.AssertNumberOfCalls(t, "ListCheckRunsForRef", 1) mocksChecksService.AssertExpectations(t) mocksRepositoriesService.AssertNumberOfCalls(t, "ListStatuses", 1) mocksRepositoriesService.AssertExpectations(t) } } }
explode_data.jsonl/36434
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 385 }
[ 2830, 3393, 4624, 13614, 49820, 62, 44814, 1860, 1454, 1155, 353, 8840, 836, 8, 341, 3174, 3827, 7747, 1669, 5975, 7121, 445, 5204, 1465, 5130, 2109, 25183, 49820, 1860, 1669, 501, 1255, 25183, 10600, 82, 1860, 340, 2109, 25183, 49820, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestOpenDeletedFileFails(t *testing.T) { testCases := []struct { name string // The original file is removed if changeFile is true. changeFile bool // The Merkle tree file is removed if changeMerkleFile is true. changeMerkleFile bool }{ { name: "FileOnly", changeFile: true, changeMerkleFile: false, }, { name: "MerkleOnly", changeFile: false, changeMerkleFile: true, }, { name: "FileAndMerkle", changeFile: true, changeMerkleFile: true, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { vfsObj, root, ctx, err := newVerityRoot(t, SHA256) if err != nil { t.Fatalf("newVerityRoot: %v", err) } filename := "verity-test-file" fd, _, err := newFileFD(ctx, t, vfsObj, root, filename, 0644) if err != nil { t.Fatalf("newFileFD: %v", err) } // Enable verity on the file. enableVerity(ctx, t, fd) if tc.changeFile { if err := dentryFromVD(t, root).unlinkLowerAt(ctx, vfsObj, filename); err != nil { t.Fatalf("UnlinkAt: %v", err) } } if tc.changeMerkleFile { if err := dentryFromVD(t, root).unlinkLowerMerkleAt(ctx, vfsObj, filename); err != nil { t.Fatalf("UnlinkAt: %v", err) } } // Ensure reopening the verity enabled file fails. if _, err = openVerityAt(ctx, vfsObj, root, filename, linux.O_RDONLY, linux.ModeRegular); err != syserror.EIO { t.Errorf("got OpenAt error: %v, expected EIO", err) } }) } }
explode_data.jsonl/56767
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 712 }
[ 2830, 3393, 5002, 26039, 1703, 37, 6209, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 197, 322, 576, 4024, 1034, 374, 6963, 421, 2297, 1703, 374, 830, 624, 197, 68380, 1703, 1807, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestIsParentPath(t *testing.T) { tt := []struct { parent string path string want bool }{ {".config", ".config", true}, {".config", ".bashrc", false}, {".local/share", ".local", true}, {".local/share", ".bashrc", false}, } for _, tc := range tt { t.Run(fmt.Sprintf("%s,%s", tc.parent, tc.path), func(t *testing.T) { got := isParentPath(tc.parent, tc.path) if got != tc.want { t.Errorf("unexpected result (got: %t, want %t)", got, tc.want) } }) } }
explode_data.jsonl/56643
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 218 }
[ 2830, 3393, 3872, 8387, 1820, 1155, 353, 8840, 836, 8, 341, 3244, 83, 1669, 3056, 1235, 341, 197, 24804, 914, 198, 197, 26781, 256, 914, 198, 197, 50780, 256, 1807, 198, 197, 59403, 197, 197, 90, 3263, 1676, 497, 5933, 1676, 497, 83...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReadArchive(t *testing.T) { // first create archive, that we will be able to read updateTestDir, _ := ioutil.TempDir("", "update") defer os.RemoveAll(updateTestDir) archive, err := WriteRootfsImageArchive(updateTestDir, RootfsImageStructOK) assert.NoError(t, err) assert.NotEqual(t, "", archive) // open archive file f, err := os.Open(archive) defer f.Close() assert.NoError(t, err) assert.NotNil(t, f) df, err := os.Create(path.Join(updateTestDir, "my_update")) rp := &parser.RootfsParser{W: df} defer df.Close() aReader := NewReader(f) aReader.Register(rp) p, err := aReader.Read() assert.NoError(t, err) assert.NotNil(t, df) df.Close() assert.Len(t, p, 1) rp, ok := p["0000"].(*parser.RootfsParser) assert.True(t, ok) assert.Len(t, aReader.GetCompatibleDevices(), 1) assert.Equal(t, "vexpress", aReader.GetCompatibleDevices()[0]) data, err := ioutil.ReadFile(path.Join(updateTestDir, "my_update")) assert.NoError(t, err) assert.Equal(t, "my first update", string(data)) assert.Equal(t, "vexpress", aReader.GetCompatibleDevices()[0]) }
explode_data.jsonl/35294
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 432 }
[ 2830, 3393, 4418, 42502, 1155, 353, 8840, 836, 8, 341, 197, 322, 1156, 1855, 18132, 11, 429, 582, 686, 387, 2952, 311, 1349, 198, 27175, 2271, 6184, 11, 716, 1669, 43144, 65009, 6184, 19814, 330, 2386, 1138, 16867, 2643, 84427, 31540, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateApp(t *testing.T) { a := New() assert.NotNil(t, a) assert.IsType(t, (*App)(nil), a) assert.IsType(t, (*api.Api)(nil), a.Api) assert.IsType(t, (*Cmd)(nil), a.Cmd) }
explode_data.jsonl/46462
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 4021, 2164, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 1532, 2822, 6948, 93882, 1155, 11, 264, 340, 6948, 4506, 929, 1155, 11, 4609, 2164, 2376, 8385, 701, 264, 340, 6948, 4506, 929, 1155, 11, 4609, 2068, 21044, 2376, 8385...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestGetUsers(t *testing.T) { th := Setup().InitBasic() defer th.TearDown() Client := th.Client rusers, resp := Client.GetUsers(0, 60, "") CheckNoError(t, resp) for _, u := range rusers { CheckUserSanitization(t, u) } rusers, resp = Client.GetUsers(0, 60, resp.Etag) CheckEtag(t, rusers, resp) rusers, resp = Client.GetUsers(0, 1, "") CheckNoError(t, resp) if len(rusers) != 1 { t.Fatal("should be 1 per page") } rusers, resp = Client.GetUsers(1, 1, "") CheckNoError(t, resp) if len(rusers) != 1 { t.Fatal("should be 1 per page") } rusers, resp = Client.GetUsers(10000, 100, "") CheckNoError(t, resp) if len(rusers) != 0 { t.Fatal("should be no users") } // Check default params for page and per_page if _, err := Client.DoApiGet("/users", ""); err != nil { t.Fatal("should not have errored") } Client.Logout() _, resp = Client.GetUsers(0, 60, "") CheckUnauthorizedStatus(t, resp) }
explode_data.jsonl/21535
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 385 }
[ 2830, 3393, 1949, 7137, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1005, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 741, 71724, 1669, 270, 11716, 271, 7000, 4218, 11, 9039, 1669, 8423, 2234, 7137, 7, 15, 11, 220, 21, 15, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestServer_getClientIP(t *testing.T) { s := &Server{} _, err := s.getClientIP(context.TODO()) assert.Equal(t, err, ErrGetIPFailed) tcpAddr, _ := net.ResolveTCPAddr("tcp", "127.0.0.1:8080") ipAddress, err := s.getClientIP(peer.NewContext(context.Background(), &peer.Peer{Addr: tcpAddr})) assert.Nil(t, err) assert.Equal(t, ipAddress, "127.0.0.1") tcpAddr, err = net.ResolveTCPAddr("tcp", "[2000:0:0:0:0:0:0:1]:8080") assert.Nil(t, err) ipAddress, err = s.getClientIP(peer.NewContext(context.Background(), &peer.Peer{Addr: tcpAddr})) assert.Nil(t, err) assert.Equal(t, "2000::1", ipAddress) }
explode_data.jsonl/30077
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 277 }
[ 2830, 3393, 5475, 3062, 2959, 3298, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 609, 5475, 16094, 197, 6878, 1848, 1669, 274, 73864, 3298, 5378, 90988, 2398, 6948, 12808, 1155, 11, 1848, 11, 15495, 1949, 3298, 9408, 692, 3244, 4672, 13986...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBuildURLShouldReturnAddressWithPortWhenDefined(t *testing.T) { url := buildURL(config.Endpoint{ APIKey: "bar", Host: "foo", Port: 1234, UseSSL: false, }) assert.Equal(t, "http://foo:1234/v1/input", url) }
explode_data.jsonl/82445
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 98 }
[ 2830, 3393, 11066, 3144, 14996, 5598, 4286, 2354, 7084, 4498, 29361, 1155, 353, 8840, 836, 8, 341, 19320, 1669, 1936, 3144, 8754, 90409, 515, 197, 197, 7082, 1592, 25, 330, 2257, 756, 197, 197, 9296, 25, 256, 330, 7975, 756, 197, 9845...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExtensionXz(t *testing.T) { compression := Xz output := compression.Extension() if output != "tar.xz" { t.Fatalf("The extension of a bzip2 archive should be 'tar.xz'") } }
explode_data.jsonl/81977
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 72 }
[ 2830, 3393, 12049, 55, 89, 1155, 353, 8840, 836, 8, 341, 32810, 4011, 1669, 1599, 89, 198, 21170, 1669, 25111, 59715, 741, 743, 2550, 961, 330, 26737, 1993, 89, 1, 341, 197, 3244, 30762, 445, 785, 8894, 315, 264, 293, 9964, 17, 1813...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestNonResourceMatchURLEndsShort(t *testing.T) { test := &nonResourceMatchTest{ url: "first", matcher: "first/second", expectedResult: false, } test.run(t) }
explode_data.jsonl/45780
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 86 }
[ 2830, 3393, 8121, 4783, 8331, 1511, 867, 81003, 12472, 1155, 353, 8840, 836, 8, 341, 18185, 1669, 609, 6280, 4783, 8331, 2271, 515, 197, 19320, 25, 310, 330, 3896, 756, 197, 2109, 28058, 25, 286, 330, 3896, 14, 5569, 756, 197, 42400, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSortByTraceID(t *testing.T) { traceID := &TraceID{ High: uint64(1), Low: uint64(1), } traceID2 := &TraceID{ High: uint64(2), Low: uint64(0), } traceID3 := &TraceID{ High: uint64(1), Low: uint64(0), } traces := []*TraceID{traceID, traceID2, traceID3} // Expect ascending order tracesExpected := []*TraceID{traceID3, traceID, traceID2} SortTraceIDs(traces) assert.EqualValues(t, tracesExpected, traces) }
explode_data.jsonl/44543
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 199 }
[ 2830, 3393, 10231, 1359, 6550, 915, 1155, 353, 8840, 836, 8, 341, 65058, 915, 1669, 609, 6550, 915, 515, 197, 197, 11976, 25, 2622, 21, 19, 7, 16, 1326, 197, 15070, 363, 25, 220, 2622, 21, 19, 7, 16, 1326, 197, 532, 65058, 915, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCancelOrder(t *testing.T) { t.Parallel() if !areTestAPIKeysSet() || !canManipulateRealOrders { t.Skip("skipping test: api keys not set or canManipulateRealOrders set to false") } p, err := currency.NewPairFromString("EOS-USDT") if err != nil { t.Error(err) } fpair, err := b.FormatExchangeCurrency(p, asset.CoinMarginedFutures) if err != nil { t.Error(err) } err = b.CancelOrder(context.Background(), &order.Cancel{ AssetType: asset.CoinMarginedFutures, Pair: fpair, ID: "1234", }) if err != nil { t.Error(err) } p2, err := currency.NewPairFromString("BTC-USDT") if err != nil { t.Error(err) } fpair2, err := b.FormatExchangeCurrency(p2, asset.USDTMarginedFutures) if err != nil { t.Error(err) } err = b.CancelOrder(context.Background(), &order.Cancel{ AssetType: asset.USDTMarginedFutures, Pair: fpair2, ID: "1234", }) if err != nil { t.Error(err) } }
explode_data.jsonl/76671
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 419 }
[ 2830, 3393, 9269, 4431, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 743, 753, 546, 2271, 7082, 8850, 1649, 368, 1369, 753, 4814, 92876, 6334, 12768, 24898, 341, 197, 3244, 57776, 445, 4886, 5654, 1273, 25, 6330, 6894, 537, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestDagPut(t *testing.T) { is := is.New(t) s := NewShell(shellUrl) c, err := s.DagPut(`{"x": "abc","y":"def"}`, "dag-json", "dag-cbor") is.Nil(err) is.Equal(c, "bafyreidrm3r2k6vlxqp2fk47sboeycf7apddib47w7cyagrajtpaxxl2pi") }
explode_data.jsonl/61089
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 130 }
[ 2830, 3393, 35, 351, 19103, 1155, 353, 8840, 836, 8, 341, 19907, 1669, 374, 7121, 1155, 340, 1903, 1669, 1532, 25287, 93558, 2864, 692, 1444, 11, 1848, 1669, 274, 909, 351, 19103, 5809, 4913, 87, 788, 330, 13683, 2198, 88, 3252, 750, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBExample3(t *testing.T) { input := "^v^v^v^v^v" expected := 11 actual := answerB(input) if expected != actual { t.Fail() } }
explode_data.jsonl/16878
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 62 }
[ 2830, 3393, 33, 13314, 18, 1155, 353, 8840, 836, 8, 341, 22427, 1669, 39915, 85, 61, 85, 61, 85, 61, 85, 61, 85, 698, 42400, 1669, 220, 16, 16, 198, 88814, 1669, 4226, 33, 5384, 340, 743, 3601, 961, 5042, 341, 197, 3244, 57243, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestBubbleSort(t *testing.T) { for _, tt := range sortCases { t.Run(tt.name, func(t *testing.T) { BubbleSort(tt.args.target, tt.args.comp) if !reflect.DeepEqual(tt.args.target, tt.exp) { t.Errorf("Expect : %v, Got : %v", tt.exp, tt.args.target) } else { t.Logf("Result : %v", tt.args.target) } }) } }
explode_data.jsonl/74713
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 169 }
[ 2830, 3393, 75778, 10231, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 3378, 37302, 341, 197, 3244, 16708, 47152, 2644, 11, 2915, 1155, 353, 8840, 836, 8, 341, 298, 12791, 14600, 10231, 47152, 16365, 6539, 11, 17853, 163...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReaderConfigErrors(t *testing.T) { client, err := NewClient(ClientOptions{ URL: lookupURL, }) assert.Nil(t, err) defer client.Close() consumer, err := client.CreateReader(ReaderOptions{ Topic: "my-topic", }) assert.Nil(t, consumer) assert.NotNil(t, err) consumer, err = client.CreateReader(ReaderOptions{ StartMessageID: EarliestMessageID(), }) assert.Nil(t, consumer) assert.NotNil(t, err) }
explode_data.jsonl/6381
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 160 }
[ 2830, 3393, 5062, 2648, 13877, 1155, 353, 8840, 836, 8, 341, 25291, 11, 1848, 1669, 1532, 2959, 46851, 3798, 515, 197, 79055, 25, 18615, 3144, 345, 197, 8824, 6948, 59678, 1155, 11, 1848, 340, 16867, 2943, 10421, 2822, 37203, 11761, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFilesInDir(t *testing.T) { t.Run("all files with symbolic link", func(t *testing.T) { dir := "testdata/files_in_dir" script := dir + "/create_sym_link.sh" xt.OK(t, os.Chmod(script, 0700)) cmd := exec.Command(dir + "/create_sym_link.sh") xt.OK(t, cmd.Run()) readdir, err := ioutil.ReadDir(dir) xt.OK(t, err) var exp []string for _, f := range readdir { exp = append(exp, f.Name()) } files, err := FilesInDir(dir) xt.OK(t, err) xt.Eq(t, exp, files) }) }
explode_data.jsonl/6997
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 10809, 641, 6184, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 541, 3542, 448, 35296, 2656, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 48532, 1669, 330, 92425, 33220, 1243, 4334, 1837, 197, 86956, 1669, 5419, 488, 35...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestHDRF(t *testing.T) { klog.InitFlags(nil) flag.Set("v", "4") flag.Set("alsologtostderr", "true") s := options.NewServerOption() s.MinNodesToFind = 100 s.PercentageOfNodesToFind = 100 s.RegisterOptions() framework.RegisterPluginBuilder(PluginName, New) defer framework.CleanupPluginBuilders() tests := []struct { name string pgSpecs []pgSpec nodes []*v1.Node queues []*schedulingv1.Queue queueSpecs []queueSpec expected map[string]string }{ { name: "rescaling test", pgSpecs: []pgSpec{ { taskNum: 10, cpu: "1", mem: "1G", pg: "pg1", queue: "root-sci", }, { taskNum: 10, cpu: "1", mem: "0G", pg: "pg21", queue: "root-eng-dev", }, { taskNum: 10, cpu: "0", mem: "1G", pg: "pg22", queue: "root-eng-prod", }, }, nodes: []*v1.Node{util.BuildNode("n", util.BuildResourceList("10", "10G"), make(map[string]string))}, queueSpecs: []queueSpec{ { name: "root-sci", hierarchy: "root/sci", weights: "100/50", }, { name: "root-eng-dev", hierarchy: "root/eng/dev", weights: "100/50/50", }, { name: "root-eng-prod", hierarchy: "root/eng/prod", weights: "100/50/50", }, }, expected: map[string]string{ "pg1": "cpu 5000.00, memory 5000000000.00, nvidia.com/gpu 0.00", "pg21": "cpu 5000.00, memory 0.00, nvidia.com/gpu 0.00", "pg22": "cpu 0.00, memory 5000000000.00, nvidia.com/gpu 0.00", }, }, { name: "blocking nodes test", pgSpecs: []pgSpec{ { taskNum: 30, cpu: "1", mem: "0G", pg: "pg1", queue: "root-pg1", }, { taskNum: 30, cpu: "1", mem: "0G", pg: "pg2", queue: "root-pg2", }, { taskNum: 30, cpu: "1", mem: "0G", pg: "pg31", queue: "root-pg3-pg31", }, { taskNum: 30, cpu: "0", mem: "1G", pg: "pg32", queue: "root-pg3-pg32", }, { taskNum: 30, cpu: "0", mem: "1G", pg: "pg4", queue: "root-pg4", }, }, nodes: []*v1.Node{util.BuildNode("n", util.BuildResourceList("30", "30G"), make(map[string]string))}, queueSpecs: []queueSpec{ { name: "root-pg1", hierarchy: "root/pg1", weights: "100/25", }, { name: "root-pg2", hierarchy: "root/pg2", weights: "100/25", }, { name: "root-pg3-pg31", hierarchy: "root/pg3/pg31", weights: "100/25/50", }, { name: "root-pg3-pg32", hierarchy: "root/pg3/pg32", weights: "100/25/50", }, { name: "root-pg4", hierarchy: "root/pg4", weights: "100/25", }, }, expected: map[string]string{ "pg1": "cpu 10000.00, memory 0.00, nvidia.com/gpu 0.00", "pg2": "cpu 10000.00, memory 0.00, nvidia.com/gpu 0.00", "pg31": "cpu 10000.00, memory 0.00, nvidia.com/gpu 0.00", "pg32": "cpu 0.00, memory 15000000000.00, nvidia.com/gpu 0.00", "pg4": "cpu 0.00, memory 15000000000.00, nvidia.com/gpu 0.00", }, }, } for _, test := range tests { binder := &util.FakeBinder{ Binds: map[string]string{}, Channel: make(chan string), } schedulerCache := &cache.SchedulerCache{ Nodes: make(map[string]*api.NodeInfo), Jobs: make(map[api.JobID]*api.JobInfo), Queues: make(map[api.QueueID]*api.QueueInfo), Binder: binder, StatusUpdater: &util.FakeStatusUpdater{}, VolumeBinder: &util.FakeVolumeBinder{}, Recorder: record.NewFakeRecorder(100), } for _, node := range test.nodes { schedulerCache.AddNode(node) } for _, q := range test.queueSpecs { schedulerCache.AddQueueV1beta1( &schedulingv1.Queue{ ObjectMeta: metav1.ObjectMeta{ Name: q.name, Annotations: map[string]string{ schedulingv1.KubeHierarchyAnnotationKey: q.hierarchy, schedulingv1.KubeHierarchyWeightAnnotationKey: q.weights, }, }, Spec: schedulingv1.QueueSpec{ Weight: 1, }, }) } for _, pgSpec := range test.pgSpecs { pods := makePods(pgSpec.taskNum, pgSpec.cpu, pgSpec.mem, pgSpec.pg) for _, pod := range pods { schedulerCache.AddPod(pod) } schedulerCache.AddPodGroupV1beta1(&schedulingv1.PodGroup{ ObjectMeta: metav1.ObjectMeta{ Name: pgSpec.pg, Namespace: "default", }, Spec: schedulingv1.PodGroupSpec{ Queue: pgSpec.queue, }, }) } trueValue := true ssn := framework.OpenSession(schedulerCache, []conf.Tier{ { Plugins: []conf.PluginOption{ { Name: PluginName, EnabledHierarchy: &trueValue, EnabledQueueOrder: &trueValue, EnabledJobOrder: &trueValue, }, }, }, }, nil) defer framework.CloseSession(ssn) allocateAction := allocate.New() allocateAction.Execute(ssn) for _, job := range ssn.Jobs { if test.expected[job.Name] != job.Allocated.String() { t.Fatalf("%s: job %s expected resource %s, but got %s", test.name, job.Name, test.expected[job.Name], job.Allocated) } } } }
explode_data.jsonl/53869
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2893 }
[ 2830, 3393, 70288, 37, 1155, 353, 8840, 836, 8, 341, 16463, 839, 26849, 9195, 27907, 340, 30589, 4202, 445, 85, 497, 330, 19, 1138, 30589, 4202, 445, 1127, 1609, 83, 535, 67, 615, 497, 330, 1866, 1138, 1903, 1669, 2606, 7121, 5475, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func Test_NewReaderWithInvalidFile(t *testing.T) { createDir() meta, err := NewMeta(metaDir, metaDir, testlogpath, ModeDir, "", defautFileRetention) if err != nil { t.Error(err) } createInvalidSuffixFile(dir) defer destroyFile() sf, err := NewSeqFile(meta, dir, false, []string{".pid"}, `test-logkit.log-*ss`, WhenceOldest) if err != nil { t.Error(err) } if sf.currFile != "" { t.Errorf("exp emtpy file, but got %s", sf.currFile) } }
explode_data.jsonl/19693
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 189 }
[ 2830, 3393, 39582, 5062, 2354, 7928, 1703, 1155, 353, 8840, 836, 8, 341, 39263, 6184, 741, 84004, 11, 1848, 1669, 1532, 12175, 45119, 6184, 11, 8823, 6184, 11, 1273, 839, 2343, 11, 14562, 6184, 11, 7342, 707, 2717, 1703, 86329, 340, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCopy(t *testing.T) { s, err := Run() ok(t, err) defer s.Close() c, err := proto.Dial(s.Addr()) ok(t, err) defer c.Close() t.Run("basic", func(t *testing.T) { s.Set("key1", "value") // should return 1 after a successful copy operation: must1(t, c, "COPY", "key1", "key2") s.CheckGet(t, "key2", "value") equals(t, "string", s.Type("key2")) }) // should return 0 when trying to copy a nonexistent key: t.Run("nonexistent key", func(t *testing.T) { must0(t, c, "COPY", "nosuch", "to") }) // should return 0 when trying to overwrite an existing key: t.Run("existing key", func(t *testing.T) { s.Set("existingkey", "value") s.Set("newkey", "newvalue") must0(t, c, "COPY", "newkey", "existingkey") // existing key value should remain unchanged: s.CheckGet(t, "existingkey", "value") }) t.Run("destination db", func(t *testing.T) { s.Set("akey1", "value") must1(t, c, "COPY", "akey1", "akey2", "DB", "2") s.Select(2) s.CheckGet(t, "akey2", "value") equals(t, "string", s.Type("akey2")) }) s.Select(0) t.Run("replace", func(t *testing.T) { s.Set("rkey1", "value") s.Set("rkey2", "another") must1(t, c, "COPY", "rkey1", "rkey2", "REPLACE") s.CheckGet(t, "rkey2", "value") equals(t, "string", s.Type("rkey2")) }) t.Run("direct", func(t *testing.T) { s.Set("d1", "value") ok(t, s.Copy(0, "d1", 0, "d2")) equals(t, "string", s.Type("d2")) s.CheckGet(t, "d2", "value") }) t.Run("errors", func(t *testing.T) { mustDo(t, c, "COPY", proto.Error(errWrongNumber("copy")), ) mustDo(t, c, "COPY", "foo", proto.Error(errWrongNumber("copy")), ) mustDo(t, c, "COPY", "foo", "bar", "baz", proto.Error(msgSyntaxError), ) }) }
explode_data.jsonl/44824
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 809 }
[ 2830, 3393, 12106, 1155, 353, 8840, 836, 8, 341, 1903, 11, 1848, 1669, 6452, 741, 59268, 1155, 11, 1848, 340, 16867, 274, 10421, 741, 1444, 11, 1848, 1669, 18433, 98462, 1141, 93626, 2398, 59268, 1155, 11, 1848, 340, 16867, 272, 10421, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test404WhenExplictlyMappedFileDoesNotExist(t *testing.T) { tmpdir, err := ioutil.TempDir("", "test_static_modifier_explicit_path_mapping_") if err != nil { t.Fatalf("ioutil.TempDir(): got %v, want no error", err) } //if err := os.MkdirAll(path.Join(tmpdir, "explicit/path"), 0777); err != nil { // t.Fatalf("os.Mkdir(): got %v, want no error", err) //} //if err := ioutil.WriteFile(path.Join(tmpdir, "explicit/path", "sfmtest.txt"), []byte("test file"), 0777); err != nil { // t.Fatalf("ioutil.WriteFile(): got %v, want no error", err) //} req, err := http.NewRequest("GET", "/sfmtest.txt", nil) if err != nil { t.Fatalf("NewRequest(): got %v, want no error", err) } _, remove, err := martian.TestContext(req, nil, nil) if err != nil { t.Fatalf("TestContext(): got %v, want no error", err) } defer remove() res := proxyutil.NewResponse(http.StatusOK, nil, req) mod := NewStaticModifier(tmpdir) if err := mod.ModifyRequest(req); err != nil { t.Fatalf("ModifyRequest(): got %v, want no error", err) } mod.SetExplicitPathMappings(map[string]string{"/sfmtest.txt": "/explicit/path/sfmtest.txt"}) if err := mod.ModifyResponse(res); err != nil { t.Fatalf("ModifyResponse(): got %v, want no error", err) } if got, want := res.StatusCode, http.StatusNotFound; got != want { t.Errorf("res.StatusCode: got %v, want %v", got, want) } }
explode_data.jsonl/41182
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 541 }
[ 2830, 3393, 19, 15, 19, 4498, 8033, 21242, 398, 83345, 1703, 21468, 45535, 1155, 353, 8840, 836, 8, 341, 20082, 3741, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 1944, 25360, 61773, 14214, 6026, 2638, 26930, 62, 1138, 743, 1848, 961...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestConfigsUsed(t *testing.T) { defer restoreDflts(dfltFiles, dfltDirs) tmpdir := t.TempDir() cntFile, err := os.CreateTemp(tmpdir, "nf_conntrack_count") require.NoError(t, err) t.Cleanup(func() { require.NoError(t, cntFile.Close()) }) maxFile, err := os.CreateTemp(tmpdir, "nf_conntrack_max") require.NoError(t, err) t.Cleanup(func() { require.NoError(t, maxFile.Close()) }) dfltDirs = []string{tmpdir} cntFname := path.Base(cntFile.Name()) maxFname := path.Base(maxFile.Name()) dfltFiles = []string{cntFname, maxFname} count := 1234321 max := 9999999 require.NoError(t, os.WriteFile(cntFile.Name(), []byte(strconv.Itoa(count)), 0660)) require.NoError(t, os.WriteFile(maxFile.Name(), []byte(strconv.Itoa(max)), 0660)) c := &Conntrack{} acc := &testutil.Accumulator{} require.NoError(t, c.Gather(acc)) fix := func(s string) string { return strings.Replace(s, "nf_", "ip_", 1) } acc.AssertContainsFields(t, inputName, map[string]interface{}{ fix(cntFname): float64(count), fix(maxFname): float64(max), }) }
explode_data.jsonl/37566
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 443 }
[ 2830, 3393, 84905, 22743, 1155, 353, 8840, 836, 8, 341, 16867, 14952, 35, 1489, 2576, 1500, 79209, 10809, 11, 294, 79209, 97384, 340, 20082, 3741, 1669, 259, 65009, 6184, 2822, 60553, 1703, 11, 1848, 1669, 2643, 7251, 12151, 10368, 3741, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIntegrationOBFS4ClientFactoryError(t *testing.T) { ctx := context.Background() config := obfs4config() config.transportsGet = func(name string) obfs4base.Transport { txp := transports.Get(name) if name == "obfs4" && txp != nil { txp = &faketransport{txp: txp} } return txp } results := OBFS4Connect(ctx, config) if results.Error.Error() != "mocked ClientFactory error" { t.Fatal("not the error we expected") } }
explode_data.jsonl/53551
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 168 }
[ 2830, 3393, 52464, 20608, 8485, 19, 2959, 4153, 1454, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 25873, 1669, 1508, 3848, 19, 1676, 741, 25873, 8020, 3394, 1949, 284, 2915, 3153, 914, 8, 1508, 3848, 19, 3152, 87669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestVector2(t *testing.T) { vd := &tygo.ProtoBuf{Buffer: make([]byte, v.ByteSize())} v.Serialize(vd) vd.Reset() v3 := &Vector2{} if err := v3.Deserialize(vd); err == nil { CompareVector2(t.Errorf, v, v3, "") } else { t.Errorf("TestVector2 Deserialize error: %v", err) } }
explode_data.jsonl/15372
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 132 }
[ 2830, 3393, 3781, 17, 1155, 353, 8840, 836, 8, 341, 5195, 67, 1669, 609, 1881, 3346, 7763, 983, 15064, 90, 4095, 25, 1281, 10556, 3782, 11, 348, 32119, 1695, 2140, 532, 5195, 32859, 3747, 67, 340, 5195, 67, 36660, 741, 5195, 18, 166...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestOperationRequestStreamOperations(t *testing.T) { hmock := httptest.NewClient() client := &Client{ AuroraURL: "https://localhost/", HTTP: hmock, } // All operations operationRequest := OperationRequest{} ctx, cancel := context.WithCancel(context.Background()) hmock.On( "GET", "https://localhost/operations?cursor=now", ).ReturnString(200, operationStreamResponse) operationStream := make([]operations.Operation, 1) err := client.StreamOperations(ctx, operationRequest, func(op operations.Operation) { operationStream[0] = op cancel() }) if assert.NoError(t, err) { assert.Equal(t, operationStream[0].GetType(), "create_account") } // Account payments operationRequest = OperationRequest{ForAccount: "GAIH3ULLFQ4DGSECF2AR555KZ4KNDGEKN4AFI4SU2M7B43MGK3QJZNSR"} ctx, cancel = context.WithCancel(context.Background()) hmock.On( "GET", "https://localhost/accounts/GAIH3ULLFQ4DGSECF2AR555KZ4KNDGEKN4AFI4SU2M7B43MGK3QJZNSR/payments?cursor=now", ).ReturnString(200, operationStreamResponse) err = client.StreamPayments(ctx, operationRequest, func(op operations.Operation) { operationStream[0] = op cancel() }) if assert.NoError(t, err) { payment, ok := operationStream[0].(operations.CreateAccount) assert.Equal(t, ok, true) assert.Equal(t, payment.Funder, "GAIH3ULLFQ4DGSECF2AR555KZ4KNDGEKN4AFI4SU2M7B43MGK3QJZNSR") } // test connection error operationRequest = OperationRequest{} ctx, cancel = context.WithCancel(context.Background()) hmock.On( "GET", "https://localhost/operations?cursor=now", ).ReturnString(500, operationStreamResponse) err = client.StreamOperations(ctx, operationRequest, func(op operations.Operation) { operationStream[0] = op cancel() }) if assert.Error(t, err) { assert.Contains(t, err.Error(), "got bad HTTP status code 500") } }
explode_data.jsonl/67648
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 693 }
[ 2830, 3393, 8432, 1900, 3027, 35120, 1155, 353, 8840, 836, 8, 341, 9598, 16712, 1669, 54320, 70334, 7121, 2959, 741, 25291, 1669, 609, 2959, 515, 197, 22985, 324, 6215, 3144, 25, 330, 2428, 1110, 8301, 35075, 197, 197, 9230, 25, 981, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTrie_InsertAndMatchPrefix(t *testing.T) { trie := NewTrie() t.Log("INSERT prefix=by week") trie.Insert(Prefix("by week"), 2) t.Log("INSERT prefix=by") trie.Insert(Prefix("by"), 1) if !trie.Match(Prefix("by")) { t.Error("MATCH prefix=by, expected=true, got=false") } }
explode_data.jsonl/2358
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 122 }
[ 2830, 3393, 51, 7231, 76417, 3036, 8331, 14335, 1155, 353, 8840, 836, 8, 341, 197, 8927, 1669, 1532, 51, 7231, 741, 3244, 5247, 445, 12698, 9252, 28, 1694, 2003, 1138, 197, 8927, 23142, 5304, 5060, 445, 1694, 2003, 3975, 220, 17, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAttachDetach(t *testing.T) { if testBackend == "lldb" && runtime.GOOS == "linux" { bs, _ := ioutil.ReadFile("/proc/sys/kernel/yama/ptrace_scope") if bs == nil || strings.TrimSpace(string(bs)) != "0" { t.Logf("can not run TestAttachDetach: %v\n", bs) return } } if testBackend == "rr" { return } var buildFlags protest.BuildFlags if buildMode == "pie" { buildFlags |= protest.BuildModePIE } fixture := protest.BuildFixture("testnextnethttp", buildFlags) cmd := exec.Command(fixture.Path) cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr assertNoError(cmd.Start(), t, "starting fixture") // wait for testnextnethttp to start listening t0 := time.Now() for { conn, err := net.Dial("tcp", "127.0.0.1:9191") if err == nil { conn.Close() break } time.Sleep(50 * time.Millisecond) if time.Since(t0) > 10*time.Second { t.Fatal("fixture did not start") } } var p *proc.Target var err error switch testBackend { case "native": p, err = native.Attach(cmd.Process.Pid, []string{}) case "lldb": path := "" if runtime.GOOS == "darwin" { path = fixture.Path } p, err = gdbserial.LLDBAttach(cmd.Process.Pid, path, []string{}) default: err = fmt.Errorf("unknown backend %q", testBackend) } assertNoError(err, t, "Attach") go func() { time.Sleep(1 * time.Second) http.Get("http://127.0.0.1:9191") }() assertNoError(p.Continue(), t, "Continue") assertLineNumber(p, t, 11, "Did not continue to correct location,") assertNoError(p.Detach(false), t, "Detach") if runtime.GOOS != "darwin" { // Debugserver sometimes will leave a zombie process after detaching, this // seems to be a bug with debugserver. resp, err := http.Get("http://127.0.0.1:9191/nobp") assertNoError(err, t, "Page request after detach") bs, err := ioutil.ReadAll(resp.Body) assertNoError(err, t, "Reading /nobp page") if out := string(bs); !strings.Contains(out, "hello, world!") { t.Fatalf("/nobp page does not contain \"hello, world!\": %q", out) } } cmd.Process.Kill() }
explode_data.jsonl/56283
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 832 }
[ 2830, 3393, 30485, 89306, 1155, 353, 8840, 836, 8, 341, 743, 1273, 29699, 621, 330, 32459, 65, 1, 1009, 15592, 97574, 3126, 621, 330, 14210, 1, 341, 197, 93801, 11, 716, 1669, 43144, 78976, 4283, 15782, 41836, 58343, 26491, 3029, 14, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEncodeStringValue(t *testing.T) { testCases := []struct { Val interface{} Expected string }{ { Val: "Testing 123", Expected: "Testing 123", }, { Val: strPtr("Testing 123"), Expected: "Testing 123", }, } for i, test := range testCases { val, err := EncodeValue(test.Val) if err != nil { t.Fatalf("Case [%d]: Got unexpected error %s!", i, err) } switch cast := val.ValueType.(type) { case *proto.Data_Value_StringValue: if test.Expected != cast.StringValue { t.Errorf("Case [%d]: Expected %s, got %s", i, test.Expected, cast.StringValue) } default: t.Errorf("Case [%d]: Got unexpected type back %+v!", i, cast) } } }
explode_data.jsonl/30504
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 305 }
[ 2830, 3393, 32535, 82696, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 197, 2208, 414, 3749, 16094, 197, 197, 18896, 914, 198, 197, 59403, 197, 197, 515, 298, 197, 2208, 25, 414, 330, 16451, 220, 16, 17, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestRolling_deployInitial(t *testing.T) { initialStrategyInvoked := false strategy := &RollingDeploymentStrategy{ rcClient: fake.NewSimpleClientset().Core(), eventClient: fake.NewSimpleClientset().Core(), initialStrategy: &testStrategy{ deployFn: func(from *corev1.ReplicationController, to *corev1.ReplicationController, desiredReplicas int, updateAcceptor strat.UpdateAcceptor) error { initialStrategyInvoked = true return nil }, }, rollingUpdate: func(config *RollingUpdaterConfig) error { t.Fatalf("unexpected call to rollingUpdate") return nil }, getUpdateAcceptor: getUpdateAcceptor, apiRetryPeriod: 1 * time.Millisecond, apiRetryTimeout: 10 * time.Millisecond, } config := appstest.OkDeploymentConfig(1) config.Spec.Strategy = appstest.OkRollingStrategy() deployment, _ := appsinternalutil.MakeDeploymentV1(config) strategy.out, strategy.errOut = &bytes.Buffer{}, &bytes.Buffer{} err := strategy.Deploy(nil, deployment, 2) if err != nil { t.Fatalf("unexpected error: %v", err) } if !initialStrategyInvoked { t.Fatalf("expected initial strategy to be invoked") } }
explode_data.jsonl/64614
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 408 }
[ 2830, 3393, 32355, 287, 91890, 6341, 1155, 353, 8840, 836, 8, 341, 85270, 19816, 15174, 10823, 1669, 895, 271, 11355, 10228, 1669, 609, 32355, 287, 75286, 19816, 515, 197, 30295, 2959, 25, 262, 12418, 7121, 16374, 2959, 746, 1005, 5386, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRunCommand(t *testing.T) { c, err := NewContainer(ContainerName) if err != nil { t.Errorf(err.Error()) } argsThree := []string{"/bin/sh", "-c", "exit 0"} ok, err := c.RunCommand(argsThree, DefaultAttachOptions) if err != nil { t.Errorf(err.Error()) } if ok != true { t.Errorf("Expected success") } argsThree = []string{"/bin/sh", "-c", "exit 1"} ok, err = c.RunCommand(argsThree, DefaultAttachOptions) if err != nil { t.Errorf(err.Error()) } if ok != false { t.Errorf("Expected failure") } }
explode_data.jsonl/2794
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 212 }
[ 2830, 3393, 6727, 4062, 1155, 353, 8840, 836, 8, 341, 1444, 11, 1848, 1669, 1532, 4502, 75145, 675, 340, 743, 1848, 961, 2092, 341, 197, 3244, 13080, 3964, 6141, 2398, 197, 630, 31215, 19641, 1669, 3056, 917, 90, 3115, 6863, 14688, 49...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestQuotaInspectCommand_Good(t *testing.T) { t.Parallel() // Create a server srv, client, url := testServer(t, true, nil) defer srv.Shutdown() ui := new(cli.MockUi) cmd := &QuotaInspectCommand{Meta: Meta{Ui: ui}} // Create a quota to delete qs := testQuotaSpec() _, err := client.Quotas().Register(qs, nil) assert.Nil(t, err) // Delete a namespace if code := cmd.Run([]string{"-address=" + url, qs.Name}); code != 0 { t.Fatalf("expected exit 0, got: %d; %v", code, ui.ErrorWriter.String()) } out := ui.OutputWriter.String() if !strings.Contains(out, "Usages") || !strings.Contains(out, qs.Name) { t.Fatalf("expected quota, got: %s", out) } }
explode_data.jsonl/69913
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 269 }
[ 2830, 3393, 2183, 6089, 58533, 4062, 2646, 1386, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 322, 4230, 264, 3538, 198, 1903, 10553, 11, 2943, 11, 2515, 1669, 1273, 5475, 1155, 11, 830, 11, 2092, 340, 16867, 43578, 108...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestHandleRDSResponseWithoutLDSWatch(t *testing.T) { fakeServer, sCleanup := fakexds.StartServer(t) client, cCleanup := fakeServer.GetClientConn(t) defer func() { cCleanup() sCleanup() }() v2c := newV2Client(client, goodNodeProto, func(int) time.Duration { return 0 }) if v2c.handleRDSResponse(goodRDSResponse1) == nil { t.Fatal("v2c.handleRDSResponse() succeeded, should have failed") } }
explode_data.jsonl/51250
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 156 }
[ 2830, 3393, 6999, 49, 5936, 2582, 26040, 43, 5936, 14247, 1155, 353, 8840, 836, 8, 341, 1166, 726, 5475, 11, 274, 67335, 1669, 282, 585, 327, 5356, 12101, 5475, 1155, 340, 25291, 11, 272, 67335, 1669, 12418, 5475, 2234, 2959, 9701, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAgentTerminate(t *testing.T) { a, dir := newTestAgent(t) err := a.start("-data-dir", dir) if err != nil { t.Fatal(err) } err = a.terminate() if err != nil { t.Fatal(err) } if _, err := os.Stat(dir); !os.IsNotExist(err) { t.Fatal(err) } }
explode_data.jsonl/75546
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 128 }
[ 2830, 3393, 16810, 62519, 1155, 353, 8840, 836, 8, 341, 11323, 11, 5419, 1669, 501, 2271, 16810, 1155, 692, 9859, 1669, 264, 4962, 13645, 691, 45283, 497, 5419, 340, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 630, 98...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDeleteVolume(t *testing.T) { d, err := NewFakeDriver(t) if err != nil { t.Fatalf("Error getting driver: %v", err) } tests := []struct { desc string req *csi.DeleteVolumeRequest expectedResp *csi.DeleteVolumeResponse expectedErrCode codes.Code }{ { desc: "success standard", req: &csi.DeleteVolumeRequest{ VolumeId: testVolumeID, }, expectedResp: &csi.DeleteVolumeResponse{}, }, { desc: "fail with no volume id", req: &csi.DeleteVolumeRequest{ VolumeId: "", }, expectedResp: nil, expectedErrCode: codes.InvalidArgument, }, { desc: "fail with the invalid diskURI", req: &csi.DeleteVolumeRequest{ VolumeId: "123", }, expectedResp: &csi.DeleteVolumeResponse{}, }, } for _, test := range tests { ctx, cancel := context.WithCancel(context.TODO()) defer cancel() id := test.req.VolumeId disk := compute.Disk{ ID: &id, } d.cloud.DisksClient.(*mockdiskclient.MockInterface).EXPECT().Get(gomock.Eq(ctx), gomock.Any(), gomock.Any()).Return(disk, nil).AnyTimes() d.cloud.DisksClient.(*mockdiskclient.MockInterface).EXPECT().Delete(gomock.Eq(ctx), gomock.Any(), gomock.Any()).Return(nil).AnyTimes() result, err := d.DeleteVolume(context.Background(), test.req) if err != nil { checkTestError(t, test.expectedErrCode, err) } if !reflect.DeepEqual(result, test.expectedResp) { t.Errorf("input request: %v, DeleteVolume result: %v, expected: %v", test.req, result, test.expectedResp) } } }
explode_data.jsonl/59380
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 634 }
[ 2830, 3393, 6435, 18902, 1155, 353, 8840, 836, 8, 341, 2698, 11, 1848, 1669, 1532, 52317, 11349, 1155, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 1454, 3709, 5579, 25, 1018, 85, 497, 1848, 340, 197, 630, 78216, 1669, 3056,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestRulesSafeCode(t *testing.T) { tempDir := t.TempDir() testcases := []*testutil.RuleTestCase{ { Name: "HS-LEAKS-1", Rule: NewAWSManagerID(), Src: SampleSafeHSLEAKS1, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-1", ".test")), }, { Name: "HS-LEAKS-2", Rule: NewAWSSecretKey(), Src: SampleSafeHSLEAKS2, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-2", ".test")), }, { Name: "HS-LEAKS-3", Rule: NewAWSMWSKey(), Src: SampleSafeHSLEAKS3, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-3", ".test")), }, { Name: "HS-LEAKS-4", Rule: NewFacebookSecretKey(), Src: SampleSafeHSLEAKS4, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-4", ".test")), }, { Name: "HS-LEAKS-5", Rule: NewFacebookClientID(), Src: SampleSafeHSLEAKS5, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-5", ".test")), }, { Name: "HS-LEAKS-6", Rule: NewTwitterSecretKey(), Src: SampleSafeHSLEAKS6, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-6", ".test")), }, { Name: "HS-LEAKS-7", Rule: NewTwitterClientID(), Src: SampleSafeHSLEAKS7, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-7", ".test")), }, { Name: "HS-LEAKS-8", Rule: NewGithub(), Src: SampleSafeHSLEAKS8, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-8", ".test")), }, { Name: "HS-LEAKS-9", Rule: NewLinkedInClientID(), Src: SampleSafeHSLEAKS9, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-9", ".test")), }, { Name: "HS-LEAKS-10", Rule: NewLinkedInSecretKey(), Src: SampleSafeHSLEAKS10, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-10", ".test")), }, { Name: "HS-LEAKS-11", Rule: NewSlack(), Src: SampleSafeHSLEAKS11, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-11", ".test")), }, { Name: "HS-LEAKS-12", Rule: NewAsymmetricPrivateKey(), Src: SampleSafeHSLEAKS12, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-12", ".test")), }, { Name: "HS-LEAKS-13", Rule: NewGoogleAPIKey(), Src: SampleSafeHSLEAKS13, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-13", ".test")), }, { Name: "HS-LEAKS-14", Rule: NewGoogleGCPServiceAccount(), Src: SampleSafeHSLEAKS14, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-14", ".test")), }, { Name: "HS-LEAKS-15", Rule: NewHerokuAPIKey(), Src: SampleSafeHSLEAKS15, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-15", ".test")), }, { Name: "HS-LEAKS-16", Rule: NewMailChimpAPIKey(), Src: SampleSafeHSLEAKS16, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-16", ".test")), }, { Name: "HS-LEAKS-17", Rule: NewMailgunAPIKey(), Src: SampleSafeHSLEAKS17, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-17", ".test")), }, { Name: "HS-LEAKS-18", Rule: NewPayPalBraintreeAccessToken(), Src: SampleSafeHSLEAKS18, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-18", ".test")), }, { Name: "HS-LEAKS-19", Rule: NewPicaticAPIKey(), Src: SampleSafeHSLEAKS19, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-19", ".test")), }, { Name: "HS-LEAKS-20", Rule: NewSendGridAPIKey(), Src: SampleSafeHSLEAKS20, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-20", ".test")), }, { Name: "HS-LEAKS-21", Rule: NewStripeAPIKey(), Src: SampleSafeHSLEAKS21, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-21", ".test")), }, { Name: "HS-LEAKS-22", Rule: NewSquareAccessToken(), Src: SampleSafeHSLEAKS22, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-22", ".test")), }, { Name: "HS-LEAKS-23", Rule: NewSquareOAuthSecret(), Src: SampleSafeHSLEAKS23, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-23", ".test")), }, { Name: "HS-LEAKS-24", Rule: NewTwilioAPIKey(), Src: SampleSafeHSLEAKS24, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-24", ".test")), }, { Name: "HS-LEAKS-25", Rule: NewHardCodedCredentialGeneric(), Src: SampleSafeHSLEAKS25, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-25", ".test")), }, { Name: "HS-LEAKS-26", Rule: NewHardCodedPassword(), Src: SampleSafeHSLEAKS26, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-26", ".test")), }, { Name: "HS-LEAKS-27", Rule: NewPasswordExposedInHardcodedURL(), Src: SampleSafeHSLEAKS27, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-27", ".test")), }, { Name: "HS-LEAKS-28", Rule: NewWPConfig(), Src: SampleSafeHSLEAKS28, Filename: filepath.Join(tempDir, fmt.Sprintf("%s%s", "HS-LEAKS-28", ".test")), }, } testutil.TestSafeCode(t, testcases) }
explode_data.jsonl/64911
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2858 }
[ 2830, 3393, 26008, 25663, 2078, 1155, 353, 8840, 836, 8, 341, 16280, 6184, 1669, 259, 65009, 6184, 741, 18185, 23910, 1669, 29838, 1944, 1314, 63961, 16458, 515, 197, 197, 515, 298, 21297, 25, 257, 330, 11961, 12, 867, 11907, 50, 12, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGPGKey_String(t *testing.T) { v := GPGKey{ ID: Int64(0), PrimaryKeyID: Int64(0), KeyID: String(""), PublicKey: String(""), CanSign: Bool(false), CanEncryptComms: Bool(false), CanEncryptStorage: Bool(false), CanCertify: Bool(false), } want := `github.GPGKey{ID:0, PrimaryKeyID:0, KeyID:"", PublicKey:"", CanSign:false, CanEncryptComms:false, CanEncryptStorage:false, CanCertify:false}` if got := v.String(); got != want { t.Errorf("GPGKey.String = %v, want %v", got, want) } }
explode_data.jsonl/33235
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 274 }
[ 2830, 3393, 38, 11383, 1592, 31777, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 479, 11383, 1592, 515, 197, 29580, 25, 394, 1333, 21, 19, 7, 15, 1326, 197, 197, 25981, 915, 25, 414, 1333, 21, 19, 7, 15, 1326, 197, 55242, 915, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDetachedSignature(t *testing.T) { kring, _ := ReadKeyRing(readerFromHex(testKeys1And2Hex)) testDetachedSignature(t, kring, readerFromHex(detachedSignatureHex), signedInput, "binary", testKey1KeyId) testDetachedSignature(t, kring, readerFromHex(detachedSignatureTextHex), signedInput, "text", testKey1KeyId) testDetachedSignature(t, kring, readerFromHex(detachedSignatureV3TextHex), signedInput, "v3", testKey1KeyId) incorrectSignedInput := signedInput + "X" _, err := CheckDetachedSignature(kring, bytes.NewBufferString(incorrectSignedInput), readerFromHex(detachedSignatureHex)) if err == nil { t.Fatal("CheckDetachedSignature returned without error for bad signature") } if err == errors.ErrUnknownIssuer { t.Fatal("CheckDetachedSignature returned ErrUnknownIssuer when the signer was known, but the signature invalid") } }
explode_data.jsonl/2283
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 285 }
[ 2830, 3393, 17076, 3854, 25088, 1155, 353, 8840, 836, 8, 341, 197, 9855, 287, 11, 716, 1669, 4457, 1592, 43466, 21987, 3830, 20335, 8623, 8850, 16, 3036, 17, 20335, 1171, 18185, 17076, 3854, 25088, 1155, 11, 595, 12640, 11, 6604, 3830, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUnsubAfterClose(t *testing.T) { ps := New(1) ch := ps.Sub("t1") defer func() { ps.Unsub(ch, "t1") ps.Shutdown() }() ps.Close("t1") checkContents(t, ch, []string{}) }
explode_data.jsonl/44254
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 90 }
[ 2830, 3393, 1806, 1966, 6025, 7925, 1155, 353, 8840, 836, 8, 341, 35009, 1669, 1532, 7, 16, 340, 23049, 1669, 4726, 12391, 445, 83, 16, 1138, 16867, 2915, 368, 341, 197, 35009, 10616, 1966, 7520, 11, 330, 83, 16, 1138, 197, 35009, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestValidateRouteField(t *testing.T) { validRouteFields := []string{ "coffee", "default/coffee", } for _, rf := range validRouteFields { allErrs := validateRouteField(rf, field.NewPath("route")) if len(allErrs) > 0 { t.Errorf("validRouteField(%q) returned errors %v for valid input", rf, allErrs) } } invalidRouteFields := []string{ "-", "/coffee", "-/coffee", } for _, rf := range invalidRouteFields { allErrs := validateRouteField(rf, field.NewPath("route")) if len(allErrs) == 0 { t.Errorf("validRouteField(%q) returned no errors for invalid input", rf) } } }
explode_data.jsonl/65837
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 245 }
[ 2830, 3393, 17926, 4899, 1877, 1155, 353, 8840, 836, 8, 341, 56322, 4899, 8941, 1669, 3056, 917, 515, 197, 197, 1, 78117, 756, 197, 197, 86191, 64942, 53125, 756, 197, 630, 2023, 8358, 36026, 1669, 2088, 2697, 4899, 8941, 341, 197, 50...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestCreateSwitch(t *testing.T) { type args struct { ctx context.Context req service.CreateSwitchRequest } tests := []struct { name string args args wantSwitchId string wantErr bool }{ { name: "创建交换机", args: args{ ctx: nil, req: service.CreateSwitchRequest{ SwitchName: "第一台交换机", ZoneId: "cn-qingdao-b", VpcId: "vpc-m5ey3pofeclswmv796tgd", CidrBlock: "172.16.0.0/24", }, }, wantSwitchId: "", wantErr: false, }} for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { gotSwitchId, err := service.CreateSwitch(tt.args.ctx, tt.args.req) if (err != nil) != tt.wantErr { t.Errorf("CreateSwitch() error = %v, wantErr %v", err, tt.wantErr) return } if gotSwitchId == tt.wantSwitchId { t.Errorf("CreateSwitch() gotSwitchId = %v, want %v", gotSwitchId, tt.wantSwitchId) } }) } }
explode_data.jsonl/72640
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 476 }
[ 2830, 3393, 4021, 16837, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 20985, 2266, 9328, 198, 197, 24395, 2473, 7251, 16837, 1900, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 260, 914, 198, 197, 31215, 260, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPodSecurityContextAdmission(t *testing.T) { handler := NewSecurityContextDeny(nil) pod := api.Pod{ Spec: api.PodSpec{ Containers: []api.Container{ {}, }, }, } fsGroup := int64(1001) tests := []struct { securityContext api.PodSecurityContext errorExpected bool }{ { securityContext: api.PodSecurityContext{}, errorExpected: false, }, { securityContext: api.PodSecurityContext{ SupplementalGroups: []int64{1234}, }, errorExpected: true, }, { securityContext: api.PodSecurityContext{ FSGroup: &fsGroup, }, errorExpected: true, }, } for _, test := range tests { pod.Spec.SecurityContext = &test.securityContext err := handler.Admit(admission.NewAttributesRecord(&pod, api.Kind("Pod"), "foo", "name", api.Resource("pods"), "", "ignored", nil)) if test.errorExpected && err == nil { t.Errorf("Expected error for security context %+v but did not get an error", test.securityContext) } if !test.errorExpected && err != nil { t.Errorf("Unexpected error %v for security context %+v", err, test.securityContext) } } }
explode_data.jsonl/5861
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 424 }
[ 2830, 3393, 23527, 15352, 1972, 2589, 2728, 1155, 353, 8840, 836, 8, 341, 53326, 1669, 1532, 15352, 1972, 23619, 88, 27907, 340, 3223, 347, 1669, 6330, 88823, 515, 197, 7568, 992, 25, 6330, 88823, 8327, 515, 298, 197, 74632, 25, 3056, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestConfigSourceParserProvider(t *testing.T) { tests := []struct { parserProvider parserprovider.ParserProvider wantErr error name string factories []Factory }{ { name: "success", }, { name: "wrapped_parser_provider_get_error", parserProvider: &mockParserProvider{ ErrOnGet: true, }, wantErr: &errOnParserProviderGet{}, }, { name: "duplicated_factory_type", factories: []Factory{ &mockCfgSrcFactory{}, &mockCfgSrcFactory{}, }, wantErr: &errDuplicatedConfigSourceFactory{}, }, { name: "new_manager_builder_error", factories: []Factory{ &mockCfgSrcFactory{ ErrOnCreateConfigSource: errors.New("new_manager_builder_error forced error"), }, }, parserProvider: &fileParserProvider{ FileName: path.Join("testdata", "basic_config.yaml"), }, wantErr: &errConfigSourceCreation{}, }, { name: "manager_resolve_error", parserProvider: &fileParserProvider{ FileName: path.Join("testdata", "manager_resolve_error.yaml"), }, wantErr: fmt.Errorf("error not wrapped by specific error type: %w", configsource.ErrSessionClosed), }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { factories := tt.factories if factories == nil { factories = []Factory{ &mockCfgSrcFactory{}, } } pp := NewConfigSourceParserProvider( zap.NewNop(), component.DefaultBuildInfo(), factories..., ) require.NotNil(t, pp) // Do not use the parserprovider.Default() to simplify the test setup. cspp := pp.(*configSourceParserProvider) cspp.pp = tt.parserProvider if cspp.pp == nil { cspp.pp = &mockParserProvider{} } cp, err := pp.Get() if tt.wantErr == nil { require.NoError(t, err) require.NotNil(t, cp) } else { assert.IsType(t, tt.wantErr, err) assert.Nil(t, cp) return } var watchForUpdatedError error wg := sync.WaitGroup{} wg.Add(1) go func() { defer wg.Done() watchForUpdatedError = pp.(parserprovider.Watchable).WatchForUpdate() }() cspp.csm.WaitForWatcher() closeErr := pp.(parserprovider.Closeable).Close(context.Background()) assert.NoError(t, closeErr) wg.Wait() assert.Equal(t, configsource.ErrSessionClosed, watchForUpdatedError) }) } }
explode_data.jsonl/21319
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1019 }
[ 2830, 3393, 2648, 3608, 6570, 5179, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 55804, 5179, 6729, 19979, 58430, 5179, 198, 197, 50780, 7747, 286, 1465, 198, 197, 11609, 1843, 914, 198, 197, 1166, 52893, 414, 3056, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseSliceInt64(t *testing.T) { var tcs = []struct { name string input []string expected []int64 }{ { name: tt.Name{ Given: "slice of string", When: "the value is valid", Then: "return valid slice of int64", }.Construct(), input: []string{"1", "2"}, expected: []int64{1, 2}, }, { name: tt.Name{ Given: "slice of string", When: "the value is invalid", Then: "return invalid slice of int64", }.Construct(), input: []string{"x", "y"}, expected: []int64{0, 0}, }, } for _, tc := range tcs { t.Run(tc.name, func(t *testing.T) { actual := ParseSliceInt64(tc.input) tt.Equal(t, tc.expected, actual) }) } }
explode_data.jsonl/9692
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 328 }
[ 2830, 3393, 14463, 33236, 1072, 21, 19, 1155, 353, 8840, 836, 8, 341, 2405, 259, 4837, 284, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 22427, 262, 3056, 917, 198, 197, 42400, 3056, 396, 21, 19, 198, 197, 59403, 197, 197, 515, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGbkIO(t *testing.T) { tmpDataDir, err := ioutil.TempDir("", "data-*") if err != nil { t.Error(err) } defer os.RemoveAll(tmpDataDir) gbk := Read("../../data/puc19.gbk") tmpGbkFilePath := filepath.Join(tmpDataDir, "puc19.gbk") Write(gbk, tmpGbkFilePath) writeTestGbk := Read(tmpGbkFilePath) if diff := cmp.Diff(gbk, writeTestGbk, cmpopts.IgnoreFields(poly.Feature{}, "ParentSequence")); diff != "" { t.Errorf("Parsing the output of Build() does not produce the same output as parsing the original file read with Read(). Got this diff:\n%s", diff) } // Test multiline Genbank features pichia := Read("../../data/pichia_chr1_head.gb") var multilineOutput string for _, feature := range pichia.Features { multilineOutput = feature.GbkLocationString } if multilineOutput != "join(<459260..459456,459556..459637,459685..459739,459810..>460126)" { t.Errorf("Failed to parse multiline genbank feature string") } }
explode_data.jsonl/74808
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 365 }
[ 2830, 3393, 38, 40029, 3810, 1155, 353, 8840, 836, 8, 341, 20082, 1043, 6184, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 691, 44903, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 6141, 3964, 340, 197, 532, 16867, 2643, 84427, 10368, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestPopulateCluster_AnonymousAuth(t *testing.T) { c := buildMinimalCluster() c.Spec.KubernetesVersion = "1.15.0" cloud, err := BuildCloud(c) if err != nil { t.Fatalf("error from BuildCloud: %v", err) } err = PerformAssignments(c, cloud) if err != nil { t.Fatalf("error from PerformAssignments: %v", err) } full, err := mockedPopulateClusterSpec(c) if err != nil { t.Fatalf("Unexpected error from PopulateCluster: %v", err) } if full.Spec.KubeAPIServer.AnonymousAuth == nil { t.Fatalf("AnonymousAuth not specified") } if fi.BoolValue(full.Spec.KubeAPIServer.AnonymousAuth) != false { t.Fatalf("Unexpected AnonymousAuth: %v", fi.BoolValue(full.Spec.KubeAPIServer.AnonymousAuth)) } }
explode_data.jsonl/75048
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 274 }
[ 2830, 3393, 11598, 6334, 28678, 1566, 6280, 89392, 5087, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 1936, 88328, 28678, 741, 1444, 36473, 11352, 29827, 5637, 284, 330, 16, 13, 16, 20, 13, 15, 1837, 197, 12361, 11, 1848, 1669, 7854, 160...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestNewestSeek(t *testing.T) { m := newMockD() defer close(m.recvChan) ds := initializeDeliverHandler() go ds.Handle(m) m.recvChan <- makeSeek(systemChainID, &ab.SeekInfo{Start: seekNewest, Stop: seekNewest, Behavior: ab.SeekInfo_BLOCK_UNTIL_READY}) select { case deliverReply := <-m.sendChan: if deliverReply.GetBlock() == nil { t.Fatalf("Received an error on the reply channel") } if deliverReply.GetBlock().Header.Number != uint64(ledgerSize-1) { t.Fatalf("Expected only the most recent block") } case <-time.After(time.Second): t.Fatalf("Timed out waiting to get all blocks") } }
explode_data.jsonl/36258
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 237 }
[ 2830, 3393, 3564, 477, 39350, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 501, 11571, 35, 741, 16867, 3265, 1255, 40433, 46019, 692, 83336, 1669, 9468, 16532, 1524, 3050, 741, 30680, 11472, 31421, 1255, 692, 2109, 40433, 46019, 9119, 1281, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGetReadingsByValueDescriptorOverLimit(t *testing.T) { reset() dbClient = nil _, err := getReadingsByValueDescriptor("", math.MaxInt32, logger.NewMockClient()) if err == nil { t.Errorf("Expected error getting readings by value descriptor") } }
explode_data.jsonl/48179
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 1949, 4418, 819, 1359, 1130, 11709, 1918, 16527, 1155, 353, 8840, 836, 8, 341, 70343, 741, 20939, 2959, 284, 2092, 271, 197, 6878, 1848, 1669, 633, 4418, 819, 1359, 1130, 11709, 19814, 6888, 14535, 1072, 18, 17, 11, 5925, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestHeaderHash(t *testing.T) { t.Parallel() fabBlock, err := getBlock("./mock/sampleblock.pb") assert.NoError(t, err) block, _ := FromFabricBlock(fabBlock) hash := block.HeaderHash() assert.Equal( t, "1652fcac96482da896909760e3df4758195fcad4672a54123e586c9a26afde0e", hex.EncodeToString(hash), ) }
explode_data.jsonl/40043
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 142 }
[ 2830, 3393, 4047, 6370, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 1166, 370, 4713, 11, 1848, 1669, 633, 4713, 13988, 16712, 69851, 4574, 37916, 1138, 6948, 35699, 1155, 11, 1848, 692, 47996, 11, 716, 1669, 5542, 81731, 4713...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddRequiredHeadersToRedirectedRequests(t *testing.T) { for _, urls := range [][]string{ {"http://docker.io", "https://docker.com"}, {"https://foo.docker.io:7777", "http://bar.docker.com"}, {"https://foo.docker.io", "https://example.com"}, } { reqFrom, _ := http.NewRequest("GET", urls[0], nil) reqFrom.Header.Add("Content-Type", "application/json") reqFrom.Header.Add("Authorization", "super_secret") reqTo, _ := http.NewRequest("GET", urls[1], nil) addRequiredHeadersToRedirectedRequests(reqTo, []*http.Request{reqFrom}) if len(reqTo.Header) != 1 { t.Fatalf("Expected 1 headers, got %d", len(reqTo.Header)) } if reqTo.Header.Get("Content-Type") != "application/json" { t.Fatal("'Content-Type' should be 'application/json'") } if reqTo.Header.Get("Authorization") != "" { t.Fatal("'Authorization' should be empty") } } for _, urls := range [][]string{ {"https://docker.io", "https://docker.com"}, {"https://foo.docker.io:7777", "https://bar.docker.com"}, } { reqFrom, _ := http.NewRequest("GET", urls[0], nil) reqFrom.Header.Add("Content-Type", "application/json") reqFrom.Header.Add("Authorization", "super_secret") reqTo, _ := http.NewRequest("GET", urls[1], nil) addRequiredHeadersToRedirectedRequests(reqTo, []*http.Request{reqFrom}) if len(reqTo.Header) != 2 { t.Fatalf("Expected 2 headers, got %d", len(reqTo.Header)) } if reqTo.Header.Get("Content-Type") != "application/json" { t.Fatal("'Content-Type' should be 'application/json'") } if reqTo.Header.Get("Authorization") != "super_secret" { t.Fatal("'Authorization' should be 'super_secret'") } } }
explode_data.jsonl/13115
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 645 }
[ 2830, 3393, 2212, 8164, 10574, 1249, 17725, 291, 35295, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 29984, 1669, 2088, 52931, 917, 515, 197, 197, 4913, 1254, 1110, 28648, 4245, 497, 330, 2428, 1110, 28648, 905, 7115, 197, 197, 4913, 2428,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestFormatter_PrintEmpty(t *testing.T) { g := NewWithT(t) msgs := diag.Messages{} logOutput, _ := Print(msgs, LogFormat, false) g.Expect(logOutput).To(Equal("")) jsonOutput, _ := Print(msgs, JSONFormat, false) g.Expect(jsonOutput).To(Equal("[]")) yamlOutput, _ := Print(msgs, YAMLFormat, false) g.Expect(yamlOutput).To(Equal("[]\n")) }
explode_data.jsonl/54033
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 146 }
[ 2830, 3393, 14183, 45788, 3522, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1532, 2354, 51, 1155, 692, 21169, 82, 1669, 39717, 54445, 31483, 6725, 5097, 11, 716, 1669, 8213, 8119, 82, 11, 2835, 4061, 11, 895, 340, 3174, 81893, 12531, 50...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInMemorySeedPhrase(t *testing.T) { // make the storage with reasonable defaults cstore := NewInMemory() algo := hd.Secp256k1 n1, n2 := "lost-key", "found-again" // make sure key works with initial password info, mnemonic, err := cstore.NewMnemonic(n1, English, sdk.FullFundraiserPath, algo) require.Nil(t, err, "%+v", err) require.Equal(t, n1, info.GetName()) require.NotEmpty(t, mnemonic) // now, let us delete this key err = cstore.Delete(n1) require.Nil(t, err, "%+v", err) _, err = cstore.Key(n1) require.NotNil(t, err) // let us re-create it from the mnemonic-phrase params := *hd.NewFundraiserParams(0, sdk.CoinType, 0) hdPath := params.String() newInfo, err := cstore.NewAccount(n2, mnemonic, DefaultBIP39Passphrase, hdPath, algo) require.NoError(t, err) require.Equal(t, n2, newInfo.GetName()) require.Equal(t, info.GetPubKey().Address(), newInfo.GetPubKey().Address()) require.Equal(t, info.GetPubKey(), newInfo.GetPubKey()) }
explode_data.jsonl/73450
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 383 }
[ 2830, 3393, 641, 10642, 41471, 46806, 1155, 353, 8840, 836, 8, 341, 197, 322, 1281, 279, 5819, 448, 13276, 16674, 198, 1444, 4314, 1669, 1532, 641, 10642, 2822, 69571, 3346, 1669, 17907, 808, 757, 79, 17, 20, 21, 74, 16, 198, 9038, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDefaultVersion(t *testing.T) { cluster := &MySQLCluster{} cluster.EnsureDefaults() if cluster.Spec.Version != defaultVersion { t.Errorf("Expected default version to be %s but got %s", defaultVersion, cluster.Spec.Version) } }
explode_data.jsonl/22108
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 81 }
[ 2830, 3393, 3675, 5637, 1155, 353, 8840, 836, 8, 341, 197, 18855, 1669, 609, 59224, 28678, 16094, 197, 18855, 22834, 19098, 16273, 2822, 743, 10652, 36473, 35842, 961, 1638, 5637, 341, 197, 3244, 13080, 445, 18896, 1638, 2319, 311, 387, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestHttpPost_Perform(t *testing.T) { cases := []struct { name string status int want string wantErrored bool response string }{ {"success", 200, "results!", false, `results!`}, {"success but error in body", 200, `{"error": "results!"}`, false, `{"error": "results!"}`}, {"success with HTML", 200, `<html>results!</html>`, false, `<html>results!</html>`}, {"not found", 400, "inputVal", true, `<html>so bad</html>`}, {"server error", 500, "inputVal", true, `big error`}, } for _, tt := range cases { test := tt t.Run(test.name, func(t *testing.T) { t.Parallel() input := cltest.RunResultWithValue("inputVal") wantedBody := `{"value":"inputVal"}` mock, cleanup := cltest.NewHTTPMockServer(t, test.status, "POST", test.response, func(body string) { assert.Equal(t, wantedBody, body) }) defer cleanup() hpa := adapters.HTTPPost{URL: cltest.MustParseWebURL(mock.URL)} result := hpa.Perform(input, nil) val, err := result.Get("value") assert.Nil(t, err) assert.Equal(t, test.want, val.String()) assert.Equal(t, true, val.Exists()) assert.Equal(t, test.wantErrored, result.HasError()) assert.Equal(t, false, result.Status.PendingBridge()) }) } }
explode_data.jsonl/39120
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 511 }
[ 2830, 3393, 23214, 53918, 627, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 286, 914, 198, 197, 23847, 414, 526, 198, 197, 50780, 286, 914, 198, 197, 50780, 36560, 1151, 1807, 198, 197, 21735, 262, 914,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOpenShiftTraits(t *testing.T) { env := createTestEnv(t, v1alpha1.IntegrationPlatformClusterOpenShift, "camel:core") res := processTestEnv(t, env) assert.NotEmpty(t, env.ExecutedTraits) assert.NotNil(t, env.GetTrait(ID("deployment"))) assert.Nil(t, env.GetTrait(ID("service"))) assert.Nil(t, env.GetTrait(ID("route"))) assert.NotNil(t, env.GetTrait(ID("owner"))) assert.NotNil(t, res.GetConfigMap(func(cm *corev1.ConfigMap) bool { return cm.Name == TestProperties })) assert.NotNil(t, res.GetDeployment(func(deployment *appsv1.Deployment) bool { return deployment.Name == TestDeploymentName })) }
explode_data.jsonl/54521
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 250 }
[ 2830, 3393, 5002, 24841, 42820, 1155, 353, 8840, 836, 8, 341, 57538, 1669, 1855, 2271, 14359, 1155, 11, 348, 16, 7141, 16, 7371, 17376, 17296, 28678, 5002, 24841, 11, 330, 93321, 25, 2153, 1138, 10202, 1669, 1882, 2271, 14359, 1155, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAzureFindValidAccessTokenForTenant_InvalidManagementDomain(t *testing.T) { expirationDate := time.Now().Add(1 * time.Hour) tenantID := "c056adac-c6a6-4ddf-ab20-0f26d47f7eea" expectedToken := cli.Token{ ExpiresOn: expirationDate.Format("2006-01-02 15:04:05.999999"), AccessToken: "7cabcf30-8dca-43f9-91e6-fd56dfb8632f", TokenType: "9b10b986-7a61-4542-8d5a-9fcd96112585", Resource: "https://portal.azure.com/", Authority: tenantID, } tokens := []cli.Token{expectedToken} token, err := findValidAccessTokenForTenant(tokens, tenantID) if err == nil { t.Fatalf("Expected an error but didn't get one") } if token != nil { t.Fatalf("Expected Token to be nil but got: %+v", token) } }
explode_data.jsonl/60953
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 313 }
[ 2830, 3393, 78107, 9885, 4088, 37649, 2461, 71252, 62, 7928, 22237, 13636, 1155, 353, 8840, 836, 8, 341, 48558, 19629, 1916, 1669, 882, 13244, 1005, 2212, 7, 16, 353, 882, 73550, 340, 197, 43919, 915, 1669, 330, 66, 15, 20, 21, 329, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestWsBalanceUpdate(t *testing.T) { t.Parallel() pressXToJSON := []byte(`{"stream":"jTfvpakT2yT0hVIo5gYWVihZhdM2PrBgJUZ5PyfZ4EVpCkx4Uoxk5timcrQc","data":{ "e": "balanceUpdate", "E": 1573200697110, "a": "BTC", "d": "100.00000000", "T": 1573200697068 }}`) err := b.wsHandleData(pressXToJSON) if err != nil { t.Error(err) } }
explode_data.jsonl/76687
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 237 }
[ 2830, 3393, 74733, 21190, 4289, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 1873, 55, 1249, 5370, 1669, 3056, 3782, 5809, 4913, 4027, 3252, 73, 51, 69, 15260, 585, 51, 17, 88, 51, 15, 71, 25308, 78, 20, 70, 98117, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestParsersMultilineTimeout(t *testing.T) { env := newInputTestingEnvironment(t) testlogName := "test.log" inp := env.mustCreateInput(map[string]interface{}{ "paths": []string{env.abspath(testlogName)}, "prospector.scanner.check_interval": "1ms", "parsers": []map[string]interface{}{ map[string]interface{}{ "multiline": map[string]interface{}{ "type": "pattern", "pattern": "^\\[", "negate": true, "match": "after", "max_lines": 3, "timeout": "100ms", // set to lower value to speed up test }, }, }, }) testlines := []byte(`[2015] hello world First Line Second Line `) env.mustWriteLinesToFile(testlogName, testlines) ctx, cancelInput := context.WithCancel(context.Background()) env.startInput(ctx, inp) env.waitUntilEventCount(1) env.requireOffsetInRegistry(testlogName, len(testlines)) moreLines := []byte(` This should not be third This should not be fourth [2016] Hello world First line again `) env.mustAppendLinesToFile(testlogName, moreLines) env.requireEventsReceived([]string{ `[2015] hello world First Line Second Line`, }) env.waitUntilEventCount(3) env.requireOffsetInRegistry(testlogName, len(testlines)+len(moreLines)) env.requireEventsReceived([]string{`[2015] hello world First Line Second Line`, ` This should not be third This should not be fourth`, `[2016] Hello world First line again`, }) cancelInput() env.waitUntilInputStops() }
explode_data.jsonl/14920
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 598 }
[ 2830, 3393, 47, 40488, 40404, 26560, 7636, 1155, 353, 8840, 836, 8, 341, 57538, 1669, 501, 2505, 16451, 12723, 1155, 692, 18185, 839, 675, 1669, 330, 1944, 1665, 698, 17430, 79, 1669, 6105, 69419, 4021, 2505, 9147, 14032, 31344, 67066, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeleteUser(t *testing.T) { apiResponse, err := client.UserApi.DeleteUser(context.Background(), "gopher").Execute() if err != nil { t.Fatalf("Error while deleting user: %v", err) } if apiResponse.StatusCode != 200 { t.Log(apiResponse) } }
explode_data.jsonl/14057
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 93 }
[ 2830, 3393, 6435, 1474, 1155, 353, 8840, 836, 8, 341, 54299, 2582, 11, 1848, 1669, 2943, 7344, 6563, 18872, 1474, 5378, 19047, 1507, 330, 70, 16940, 1827, 17174, 2822, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 1454, 1393, 33011,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRouterParam(t *testing.T) { e := New() r := e.router r.Add(http.MethodGet, "/users/:id", func(c Context) error { return nil }) c := e.NewContext(nil, nil).(*context) r.Find(http.MethodGet, "/users/1", c) assert.Equal(t, "1", c.Param("id")) }
explode_data.jsonl/47115
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 113 }
[ 2830, 3393, 9523, 2001, 1155, 353, 8840, 836, 8, 341, 7727, 1669, 1532, 741, 7000, 1669, 384, 22125, 198, 7000, 1904, 19886, 20798, 1949, 11, 3521, 4218, 11315, 307, 497, 2915, 1337, 9608, 8, 1465, 341, 197, 853, 2092, 198, 197, 3518,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConfigGzipList(t *testing.T) { var data = ` gzip: - .html - css - text/plain ` newTestConfig(data, func(c Config, seq sequence) { c.loadGzip(seq) assert.Equal(t, true, c.ShouldGzip("text/plain")) assert.Equal(t, true, c.ShouldGzip("text/html")) assert.Equal(t, true, c.ShouldGzip("text/css")) assert.Equal(t, false, c.ShouldGzip("image/png")) }) }
explode_data.jsonl/8668
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 163 }
[ 2830, 3393, 2648, 38, 9964, 852, 1155, 353, 8840, 836, 8, 341, 2405, 821, 284, 22074, 70996, 510, 220, 481, 659, 1551, 198, 220, 481, 15877, 198, 220, 481, 1467, 36971, 198, 3989, 8638, 2271, 2648, 2592, 11, 2915, 1337, 5532, 11, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBundleSourceDockerfile(t *testing.T) { var expectedDockerfile = `FROM pipeline:src RUN find . -type f -regex ".*\.\(yaml\|yml\)" -exec sed -i s?quay.io/openshift/origin-metering-ansible-operator:4.6?some-reg/target-namespace/pipeline@metering-ansible-operator?g {} + RUN find . -type f -regex ".*\.\(yaml\|yml\)" -exec sed -i s?quay.io/openshift/origin-metering-reporting-operator:4.6?some-reg/target-namespace/pipeline@metering-reporting-operator?g {} + RUN find . -type f -regex ".*\.\(yaml\|yml\)" -exec sed -i s?quay.io/openshift/origin-metering-presto:4.6?some-reg/target-namespace/stable@metering-presto?g {} + RUN find . -type f -regex ".*\.\(yaml\|yml\)" -exec sed -i s?quay.io/openshift/origin-metering-hive:4.6?some-reg/target-namespace/stable@metering-hive?g {} + RUN find . -type f -regex ".*\.\(yaml\|yml\)" -exec sed -i s?quay.io/openshift/origin-metering-hadoop:4.6?some-reg/target-namespace/stable@metering-hadoop?g {} + RUN find . -type f -regex ".*\.\(yaml\|yml\)" -exec sed -i s?quay.io/openshift/origin-ghostunnel:4.6?some-reg/target-namespace/stable@ghostunnel?g {} +` client := &buildClient{Client: fakectrlruntimeclient.NewFakeClient( &imagev1.ImageStream{ ObjectMeta: metav1.ObjectMeta{ Namespace: "target-namespace", Name: api.StableImageStream, }, Status: imagev1.ImageStreamStatus{ PublicDockerImageRepository: "some-reg/target-namespace/stable", Tags: []imagev1.NamedTagEventList{{ Tag: "metering-presto", Items: []imagev1.TagEvent{{ Image: "metering-presto", }}, }, { Tag: "metering-hive", Items: []imagev1.TagEvent{{ Image: "metering-hive", }}, }, { Tag: "metering-hadoop", Items: []imagev1.TagEvent{{ Image: "metering-hadoop", }}, }, { Tag: "ghostunnel", Items: []imagev1.TagEvent{{ Image: "ghostunnel", }}, }}, }, }, &imagev1.ImageStream{ ObjectMeta: metav1.ObjectMeta{ Namespace: "target-namespace", Name: api.PipelineImageStream, }, Status: imagev1.ImageStreamStatus{ PublicDockerImageRepository: "some-reg/target-namespace/pipeline", Tags: []imagev1.NamedTagEventList{{ Tag: "metering-ansible-operator", Items: []imagev1.TagEvent{{ Image: "metering-ansible-operator", }}, }, { Tag: "metering-reporting-operator", Items: []imagev1.TagEvent{{ Image: "metering-reporting-operator", }}, }}, }, })} s := bundleSourceStep{ config: api.BundleSourceStepConfiguration{ Substitutions: subs, }, jobSpec: &api.JobSpec{}, client: client, } s.jobSpec.SetNamespace("target-namespace") generatedDockerfile, err := s.bundleSourceDockerfile() if err != nil { t.Fatalf("Unexpected error: %v", err) } if expectedDockerfile != generatedDockerfile { t.Errorf("Generated bundle source dockerfile does not equal expected; generated dockerfile: %s", generatedDockerfile) } }
explode_data.jsonl/30262
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1294 }
[ 2830, 3393, 8409, 3608, 35, 13659, 1192, 1155, 353, 8840, 836, 8, 341, 2405, 3601, 35, 13659, 1192, 284, 1565, 30093, 15301, 25, 3548, 198, 47390, 1477, 659, 481, 1313, 282, 481, 26387, 330, 4908, 59, 7110, 7, 41466, 59, 91, 88, 101...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAt(t *testing.T) { assert := require.New(t) var input interface{} var out interface{} var ok bool input = []int{1, 3, 5} out, ok = At(input, 0) assert.True(ok) assert.Equal(1, out) out, ok = At(input, 1) assert.True(ok) assert.Equal(3, out) out, ok = At(input, 2) assert.True(ok) assert.Equal(5, out) out, ok = At(input, 99999) assert.False(ok) assert.Nil(out) }
explode_data.jsonl/45554
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 178 }
[ 2830, 3393, 1655, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 1373, 7121, 1155, 340, 2405, 1946, 3749, 16094, 2405, 700, 3749, 16094, 2405, 5394, 1807, 271, 22427, 284, 3056, 396, 90, 16, 11, 220, 18, 11, 220, 20, 532, 13967, 11, 5394...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBenchlistRemove(t *testing.T) { vdrs := validators.NewSet() vdr0 := validators.GenerateRandomValidator(1000) vdr1 := validators.GenerateRandomValidator(1000) vdr2 := validators.GenerateRandomValidator(1000) vdr3 := validators.GenerateRandomValidator(1000) vdr4 := validators.GenerateRandomValidator(1000) // Total weight is 5100 errs := wrappers.Errs{} errs.Add( vdrs.AddWeight(vdr0.ID(), vdr0.Weight()), vdrs.AddWeight(vdr1.ID(), vdr1.Weight()), vdrs.AddWeight(vdr2.ID(), vdr2.Weight()), vdrs.AddWeight(vdr3.ID(), vdr3.Weight()), vdrs.AddWeight(vdr4.ID(), vdr4.Weight()), ) if errs.Errored() { t.Fatal(errs.Err) } threshold := 3 duration := 2 * time.Second maxPortion := 0.76 // can bench 3 of the 5 validators benchIntf, err := NewBenchlist( logging.NoLog{}, vdrs, threshold, minimumFailingDuration, duration, maxPortion, "", prometheus.NewRegistry(), ) if err != nil { t.Fatal(err) } b := benchIntf.(*benchlist) defer b.timer.Stop() now := time.Now() b.lock.Lock() b.clock.Set(now) b.lock.Unlock() // Register [threshold-1] failures for 3 validators for _, vdr := range []validators.Validator{vdr0, vdr1, vdr2} { for i := 0; i < threshold-1; i++ { b.RegisterFailure(vdr.ID()) } } // Advance the time past the min failing duration and register another failure // for each now = now.Add(minimumFailingDuration).Add(time.Second) b.lock.Lock() b.clock.Set(now) b.lock.Unlock() for _, vdr := range []validators.Validator{vdr0, vdr1, vdr2} { b.RegisterFailure(vdr.ID()) } // All 3 should be benched b.lock.Lock() assert.True(t, b.isBenched(vdr0.ID())) assert.True(t, b.isBenched(vdr1.ID())) assert.True(t, b.isBenched(vdr2.ID())) assert.Equal(t, 3, b.benchedQueue.Len()) assert.Equal(t, 3, b.benchlistSet.Len()) assert.Len(t, b.failureStreaks, 0) // Ensure the benched queue root has the min end time minEndTime := b.benchedQueue[0].benchedUntil benchedIDs := []ids.ShortID{vdr0.ID(), vdr1.ID(), vdr2.ID()} for _, benchedVdr := range b.benchedQueue { assert.Contains(t, benchedIDs, benchedVdr.validatorID) assert.True(t, !benchedVdr.benchedUntil.Before(minEndTime)) } // Set the benchlist's clock past when all validators should be unbenched // so that when its timer fires, it can remove them b.clock.Set(b.clock.Time().Add(duration)) b.lock.Unlock() // Make sure each validator is eventually removed assert.Eventually( t, func() bool { return !b.IsBenched(vdr0.ID()) }, duration+time.Second, // extra time.Second as grace period 100*time.Millisecond, ) assert.Eventually( t, func() bool { return !b.IsBenched(vdr1.ID()) }, duration+time.Second, 100*time.Millisecond, ) assert.Eventually( t, func() bool { return !b.IsBenched(vdr2.ID()) }, duration+time.Second, 100*time.Millisecond, ) }
explode_data.jsonl/25539
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1187 }
[ 2830, 3393, 33, 19762, 1607, 13021, 1155, 353, 8840, 836, 8, 341, 5195, 93246, 1669, 38588, 7121, 1649, 741, 5195, 3612, 15, 1669, 38588, 57582, 13999, 14256, 7, 16, 15, 15, 15, 340, 5195, 3612, 16, 1669, 38588, 57582, 13999, 14256, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLatestSignedRootDBClosed(t *testing.T) { ndb, done := openTestDBOrDie() cleanTestDB(t, ndb) tree, err := createTree(ndb, testonly.LogTree) if err != nil { t.Fatalf("createTree: %v", err) } s := NewLogStorage(ndb, nil) done(context.Background()) tx, err := s.SnapshotForTree(context.Background(), tree) require.EqualError(t, err, "sql: database is closed") require.Nil(t, tx) }
explode_data.jsonl/30696
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 157 }
[ 2830, 3393, 31992, 49312, 8439, 3506, 26884, 1155, 353, 8840, 836, 8, 341, 197, 303, 65, 11, 2814, 1669, 1787, 2271, 3506, 2195, 18175, 2822, 1444, 2675, 2271, 3506, 1155, 11, 15581, 65, 692, 51968, 11, 1848, 1669, 1855, 6533, 52976, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFunctionParserLimits(t *testing.T) { tests := map[string]struct { input string remaining string deprecated bool }{ "nothing": { input: `json("foo") + meta("bar")`, remaining: ``, }, "space before": { input: ` json("foo") + meta("bar")`, remaining: ``, }, "space before 2": { input: ` json("foo") + meta("bar")`, remaining: ``, }, "unfinished comment": { input: `json("foo") + meta("bar") # Here's a comment`, remaining: ` # Here's a comment`, }, "extra text": { input: `json("foo") and this`, remaining: ` and this`, }, "extra text 2": { input: `json("foo") + meta("bar") and this`, remaining: ` and this`, }, "extra text 3": { input: `json("foo")+meta("bar")and this`, remaining: `and this`, }, "extra text 4": { input: `json("foo")+meta("bar") and this`, remaining: ` and this`, }, "squiggly bracket": { input: `json("foo")}`, remaining: `}`, }, "normal bracket": { input: `json("foo"))`, remaining: `)`, }, "normal bracket 2": { input: `json("foo"))))`, remaining: `)))`, }, "normal bracket 3": { input: `json("foo")) + json("bar")`, remaining: `) + json("bar")`, }, "path literals": { input: `this.foo bar baz`, remaining: ` bar baz`, }, "path literals 2": { input: `this.foo . bar baz`, remaining: ` . bar baz`, }, "brackets at root": { input: `(json().foo | "fallback").from_all()`, remaining: ``, }, "brackets after root": { input: `this.root.(json().foo | "fallback").from_all()`, remaining: ``, }, "brackets after root 2": { input: `this.root.(json().foo | "fallback").from_all().bar.baz`, remaining: ``, }, "this at root": { input: `this.foo.bar and then this`, remaining: ` and then this`, }, "path literal at root": { input: `foo.bar and then this`, remaining: ` and then this`, }, "match expression": { input: `match null { "foo" == "bar" => "baz" 5 > 10 => "or this" } not this`, remaining: "\nnot this", }, "operators and line breaks": { input: `(5 * 8) + 6 - 5 and also this`, remaining: " and also this", }, } for name, test := range tests { test := test t.Run(name, func(t *testing.T) { t.Parallel() var res Result if test.deprecated { res = ParseInterpolation(GlobalContext())([]rune(test.input)) } else { res = queryParser(Context{ Functions: query.AllFunctions, Methods: query.AllMethods, })([]rune(test.input)) } require.Nil(t, res.Err) assert.Equal(t, test.remaining, string(res.Remaining)) }) } }
explode_data.jsonl/5977
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1257 }
[ 2830, 3393, 5152, 6570, 94588, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 2415, 14032, 60, 1235, 341, 197, 22427, 414, 914, 198, 197, 197, 43436, 220, 914, 198, 197, 197, 38189, 1807, 198, 197, 59403, 197, 197, 1, 41212, 788, 341, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestHandleMemExceeded(t *testing.T) { testKubelet := newTestKubelet(t) kl := testKubelet.kubelet testKubelet.fakeCadvisor.On("MachineInfo").Return(&cadvisorapi.MachineInfo{MemoryCapacity: 100}, nil) testKubelet.fakeCadvisor.On("DockerImagesFsInfo").Return(cadvisorapiv2.FsInfo{}, nil) testKubelet.fakeCadvisor.On("RootFsInfo").Return(cadvisorapiv2.FsInfo{}, nil) spec := api.PodSpec{Containers: []api.Container{{Resources: api.ResourceRequirements{ Requests: api.ResourceList{ "memory": resource.MustParse("90"), }, }}}} pods := []*api.Pod{ { ObjectMeta: api.ObjectMeta{ UID: "123456789", Name: "newpod", Namespace: "foo", }, Spec: spec, }, { ObjectMeta: api.ObjectMeta{ UID: "987654321", Name: "oldpod", Namespace: "foo", }, Spec: spec, }, } // Make sure the Pods are in the reverse order of creation time. pods[1].CreationTimestamp = unversioned.NewTime(time.Now()) pods[0].CreationTimestamp = unversioned.NewTime(time.Now().Add(1 * time.Second)) // The newer pod should be rejected. notfittingPod := pods[0] kl.HandlePodAdditions(pods) // Check pod status stored in the status map. status, found := kl.statusManager.GetPodStatus(notfittingPod.UID) if !found { t.Fatalf("status of pod %q is not found in the status map", notfittingPod.UID) } if status.Phase != api.PodFailed { t.Fatalf("expected pod status %q. Got %q.", api.PodFailed, status.Phase) } }
explode_data.jsonl/43327
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 594 }
[ 2830, 3393, 6999, 18816, 840, 94206, 1155, 353, 8840, 836, 8, 341, 18185, 42, 3760, 1149, 1669, 501, 2271, 42, 3760, 1149, 1155, 340, 197, 10561, 1669, 1273, 42, 3760, 1149, 5202, 3760, 1149, 198, 18185, 42, 3760, 1149, 94624, 34, 817...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSetDisabledTags(t *testing.T) { tracer := mocktracer.New() tracing := newProxyTracing(&OpenTracingParams{ Tracer: tracer, ExcludeTags: []string{ SkipperRouteIDTag, }, }) span := tracer.StartSpan("test") defer span.Finish() tracing.setTag(span, HTTPStatusCodeTag, 200) tracing.setTag(span, ComponentTag, "skipper") tracing.setTag(span, SkipperRouteIDTag, "long_route_id") mockSpan := span.(*mocktracer.MockSpan) tags := mockSpan.Tags() _, ok := tags[HTTPStatusCodeTag] _, ok2 := tags[ComponentTag] _, ok3 := tags[SkipperRouteIDTag] if !ok || !ok2 { t.Errorf("could not set tags although they were not configured to be excluded") } if ok3 { t.Errorf("a tag was set although it was configured to be excluded") } }
explode_data.jsonl/50656
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 292 }
[ 2830, 3393, 1649, 25907, 15930, 1155, 353, 8840, 836, 8, 341, 25583, 9584, 1669, 7860, 94941, 7121, 741, 25583, 4527, 1669, 501, 16219, 1282, 4527, 2099, 5002, 1282, 4527, 4870, 515, 197, 197, 1282, 9584, 25, 64306, 345, 197, 197, 95239...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestManager_GenerateNewFlow(t *testing.T) { log.SetLevel(log.DebugLevel) config := tpflow.Configs{FlowStorageDir: "../var/flow_storage"} man, err := NewManager(config) if err != nil { t.Error(err) } flow := man.GenerateNewFlow() data, _ := json.Marshal(flow) man.UpdateFlowFromBinJson(flow.Id, data) }
explode_data.jsonl/28848
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 125 }
[ 2830, 3393, 2043, 2646, 13220, 3564, 18878, 1155, 353, 8840, 836, 8, 341, 6725, 4202, 4449, 12531, 20345, 4449, 340, 25873, 1669, 18101, 4965, 10753, 82, 90, 18878, 5793, 6184, 25, 7005, 947, 14, 4965, 23310, 16707, 197, 1515, 11, 1848,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestControl_PrintStorageFormatResponse(t *testing.T) { for name, tc := range map[string]struct { resp *control.StorageFormatResp expPrintStr string }{ "empty response": { resp: &control.StorageFormatResp{}, }, "server error": { resp: &control.StorageFormatResp{ HostErrorsResp: control.MockHostErrorsResp(t, &control.MockHostError{Hosts: "host1", Error: "failed"}), }, expPrintStr: ` Errors: Hosts Error ----- ----- host1 failed `, }, "2 SCM, 2 NVMe; first SCM fails": { resp: control.MockFormatResp(t, control.MockFormatConf{ Hosts: 1, ScmPerHost: 2, ScmFailures: control.MockFailureMap(0), NvmePerHost: 2, }), expPrintStr: ` Errors: Hosts Error ----- ----- host1 /mnt/1 format failed Format Summary: Hosts SCM Devices NVMe Devices ----- ----------- ------------ host1 1 1 `, }, "2 SCM, 2 NVMe; second NVMe fails": { resp: control.MockFormatResp(t, control.MockFormatConf{ Hosts: 1, ScmPerHost: 2, NvmePerHost: 2, NvmeFailures: control.MockFailureMap(1), }), expPrintStr: ` Errors: Hosts Error ----- ----- host1 NVMe device 2 format failed Format Summary: Hosts SCM Devices NVMe Devices ----- ----------- ------------ host1 2 1 `, }, "2 SCM, 2 NVMe": { resp: control.MockFormatResp(t, control.MockFormatConf{ Hosts: 1, ScmPerHost: 2, NvmePerHost: 2, }), expPrintStr: ` Format Summary: Hosts SCM Devices NVMe Devices ----- ----------- ------------ host1 2 2 `, }, "2 Hosts, 2 SCM, 2 NVMe; first SCM fails": { resp: control.MockFormatResp(t, control.MockFormatConf{ Hosts: 2, ScmPerHost: 2, ScmFailures: control.MockFailureMap(0), NvmePerHost: 2, }), expPrintStr: ` Errors: Hosts Error ----- ----- host[1-2] /mnt/1 format failed Format Summary: Hosts SCM Devices NVMe Devices ----- ----------- ------------ host[1-2] 1 1 `, }, "2 Hosts, 2 SCM, 2 NVMe": { resp: control.MockFormatResp(t, control.MockFormatConf{ Hosts: 2, ScmPerHost: 2, NvmePerHost: 2, }), expPrintStr: ` Format Summary: Hosts SCM Devices NVMe Devices ----- ----------- ------------ host[1-2] 2 2 `, }, } { t.Run(name, func(t *testing.T) { var bld strings.Builder if err := PrintResponseErrors(tc.resp, &bld); err != nil { t.Fatal(err) } if err := PrintStorageFormatMap(tc.resp.HostStorage, &bld); err != nil { t.Fatal(err) } if diff := cmp.Diff(strings.TrimLeft(tc.expPrintStr, "\n"), bld.String()); diff != "" { t.Fatalf("unexpected format string (-want, +got):\n%s\n", diff) } }) } }
explode_data.jsonl/70553
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1446 }
[ 2830, 3393, 3273, 45788, 5793, 4061, 2582, 1155, 353, 8840, 836, 8, 341, 2023, 829, 11, 17130, 1669, 2088, 2415, 14032, 60, 1235, 341, 197, 34653, 286, 353, 2865, 43771, 4061, 36555, 198, 197, 48558, 8994, 2580, 914, 198, 197, 59403, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestNewCatchUpClient(t *testing.T) { lg, err := logger.New(&logger.Config{ Level: "debug", OutputPath: []string{"stdout"}, ErrOutputPath: []string{"stderr"}, Encoding: "console", }) require.NoError(t, err) h := comm.NewCatchUpClient(lg, nil) require.NotNil(t, h) }
explode_data.jsonl/70491
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 136 }
[ 2830, 3393, 3564, 57760, 2324, 2959, 1155, 353, 8840, 836, 8, 341, 8810, 70, 11, 1848, 1669, 5925, 7121, 2099, 9786, 10753, 515, 197, 197, 4449, 25, 260, 330, 8349, 756, 197, 80487, 1820, 25, 262, 3056, 917, 4913, 36358, 7115, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInitializeSessionTimers(t *testing.T) { dir1, s1 := testServer(t) defer os.RemoveAll(dir1) defer s1.Shutdown() testutil.WaitForLeader(t, s1.RPC, "dc1") state := s1.fsm.State() state.EnsureNode(1, structs.Node{"foo", "127.0.0.1"}) session := &structs.Session{ ID: generateUUID(), Node: "foo", TTL: "10s", } if err := state.SessionCreate(100, session); err != nil { t.Fatalf("err: %v", err) } // Reset the session timers err := s1.initializeSessionTimers() if err != nil { t.Fatalf("err: %v", err) } // Check that we have a timer _, ok := s1.sessionTimers[session.ID] if !ok { t.Fatalf("missing session timer") } }
explode_data.jsonl/73910
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 283 }
[ 2830, 3393, 9928, 5283, 20217, 388, 1155, 353, 8840, 836, 8, 341, 48532, 16, 11, 274, 16, 1669, 1273, 5475, 1155, 340, 16867, 2643, 84427, 14161, 16, 340, 16867, 274, 16, 10849, 18452, 2822, 18185, 1314, 99153, 52621, 1155, 11, 274, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRoute_GetReadinessTimeoutSeconds(t *testing.T) { route := Route{} readinessTimeoutSeconds := 60 route.readinessTimeoutSeconds = &readinessTimeoutSeconds assert.Equal(t, 60, *route.GetReadinessTimeoutSeconds()) }
explode_data.jsonl/67793
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 73 }
[ 2830, 3393, 4899, 13614, 4418, 1880, 7636, 15343, 1155, 353, 8840, 836, 8, 341, 7000, 2133, 1669, 9572, 16094, 37043, 1880, 7636, 15343, 1669, 220, 21, 15, 198, 7000, 2133, 4125, 1880, 7636, 15343, 284, 609, 878, 1880, 7636, 15343, 271,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSingerTls(t *testing.T) { cert, err := signHost(&GoproxyCa, []string{"example.com", "1.1.1.1", "localhost"}) orFatal("singHost", err, t) cert.Leaf, err = x509.ParseCertificate(cert.Certificate[0]) orFatal("ParseCertificate", err, t) expected := "key verifies with Go" server := httptest.NewUnstartedServer(ConstantHanlder(expected)) defer server.Close() server.TLS = &tls.Config{Certificates: []tls.Certificate{*cert, GoproxyCa}} server.TLS.BuildNameToCertificate() server.StartTLS() certpool := x509.NewCertPool() certpool.AddCert(GoproxyCa.Leaf) tr := &http.Transport{ TLSClientConfig: &tls.Config{RootCAs: certpool}, } asLocalhost := strings.Replace(server.URL, "127.0.0.1", "localhost", -1) req, err := http.NewRequest("GET", asLocalhost, nil) orFatal("NewRequest", err, t) resp, err := tr.RoundTrip(req) orFatal("RoundTrip", err, t) txt, err := ioutil.ReadAll(resp.Body) orFatal("ioutil.ReadAll", err, t) if string(txt) != expected { t.Errorf("Expected '%s' got '%s'", expected, string(txt)) } browser := getBrowser(os.Args) if browser != "" { exec.Command(browser, asLocalhost).Run() time.Sleep(10 * time.Second) } }
explode_data.jsonl/80664
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 468 }
[ 2830, 3393, 50, 5137, 51, 4730, 1155, 353, 8840, 836, 8, 341, 1444, 529, 11, 1848, 1669, 1841, 9296, 2099, 38, 45926, 4130, 22571, 11, 3056, 917, 4913, 8687, 905, 497, 330, 16, 13, 16, 13, 16, 13, 16, 497, 330, 8301, 23625, 81166,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInsertCsvWithTypes(t *testing.T) { cache := newTestCache(t) input := []TestData{{S: "Foo", I: 123, F: 1.5, B: true}} cases := [][]keyValProperty{ {{"I", "string", "123"}}, {{"F", "string", "1.5"}, {"B", "string", "true"}}, {{"I", "enum", "123"}}, {{"F", "enum", "1.5"}, {"B", "enum", "true"}}, } for _, format := range []string{"kv", "json"} { for _, tc := range cases { t.Run(fmt.Sprintf("Types %s", toKeyVals(tc, format)), func(t *testing.T) { cache.insertCsv("FOO", map[string]string{"X-QCache-types": toKeyVals(tc, format)}, input) output := make([]map[string]interface{}, 0) cache.queryJson("FOO", nil, "{}", "GET", &output) assertEqual(t, 1, len(output)) for _, kv := range tc { if len(output) > 0 { sVal, ok := output[0][kv.key].(string) assertTrue(t, ok) assertEqual(t, kv.expected, sVal) } } }) } } }
explode_data.jsonl/8983
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 424 }
[ 2830, 3393, 13780, 94826, 2354, 4173, 1155, 353, 8840, 836, 8, 341, 52680, 1669, 501, 2271, 8233, 1155, 340, 22427, 1669, 3056, 83920, 2979, 50, 25, 330, 40923, 497, 358, 25, 220, 16, 17, 18, 11, 434, 25, 220, 16, 13, 20, 11, 425,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestApplicationContext_NameEquivalence(t *testing.T) { t.Run("", func(t *testing.T) { c := gs.New() c.Object(DefaultRegistry) c.Provide(NewRegistry) err := c.Refresh() assert.Error(t, err, "duplicate beans") }) t.Run("", func(t *testing.T) { c := gs.New() bd := c.Object(&registryFactory{}) c.Provide(func(f *registryFactory) Registry { return f.Create() }, bd) c.Provide(NewRegistryInterface) err := c.Refresh() assert.Error(t, err, "duplicate beans") }) }
explode_data.jsonl/17430
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 208 }
[ 2830, 3393, 19736, 19015, 24509, 88790, 1155, 353, 8840, 836, 8, 1476, 3244, 16708, 19814, 2915, 1155, 353, 8840, 836, 8, 341, 197, 1444, 1669, 28081, 7121, 741, 197, 1444, 8348, 87874, 15603, 340, 197, 1444, 7763, 19448, 35063, 15603, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeleteSubaccountName(t *testing.T) { t.Parallel() if err := f.DeleteSubaccount(context.Background(), ""); !errors.Is(err, errSubaccountNameMustBeSpecified) { t.Errorf("expected %v, but received: %s", errSubaccountNameMustBeSpecified, err) } if !areTestAPIKeysSet() || !canManipulateRealOrders { t.Skip("skipping test, either api keys or canManipulateRealOrders isn't set") } _, err := f.CreateSubaccount(context.Background(), "subzero") if err != nil { t.Fatal(err) } if err := f.DeleteSubaccount(context.Background(), "subzero"); err != nil { t.Error(err) } }
explode_data.jsonl/15248
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 213 }
[ 2830, 3393, 6435, 3136, 4608, 675, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 743, 1848, 1669, 282, 18872, 3136, 4608, 5378, 19047, 1507, 93754, 753, 7650, 4506, 3964, 11, 1848, 3136, 4608, 675, 31776, 3430, 8327, 1870, 8, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestValidatePoliciesFails(t *testing.T) { tests := []struct { policies []v1.PolicyReference msg string }{ { policies: []v1.PolicyReference{ { Name: "", }, }, msg: "missing name", }, { policies: []v1.PolicyReference{ { Name: "-invalid", }, }, msg: "invalid name", }, { policies: []v1.PolicyReference{ { Name: "my-policy", Namespace: "-invalid", }, }, msg: "invalid namespace", }, { policies: []v1.PolicyReference{ { Name: "my-policy", Namespace: "default", }, { Name: "my-policy", Namespace: "default", }, }, msg: "duplicated policies", }, { policies: []v1.PolicyReference{ { Name: "my-policy", Namespace: "default", }, { Name: "my-policy", }, }, msg: "duplicated policies with inferred namespace", }, } for _, test := range tests { allErrs := validatePolicies(test.policies, field.NewPath("policies"), "default") if len(allErrs) == 0 { t.Errorf("validatePolicies() returned no errors for invalid input for the case of %s", test.msg) } } }
explode_data.jsonl/65821
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 585 }
[ 2830, 3393, 17926, 47, 42038, 37, 6209, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 3223, 42038, 3056, 85, 16, 1069, 8018, 8856, 198, 197, 21169, 414, 914, 198, 197, 59403, 197, 197, 515, 298, 3223, 42038, 25, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestConvertibleTo(t *testing.T) { for _, test := range []struct { v, t Type want bool }{ {Typ[Int], Typ[Int], true}, {Typ[Int], Typ[Float32], true}, {newDefined(Typ[Int]), Typ[Int], true}, {newDefined(new(Struct)), new(Struct), true}, {newDefined(Typ[Int]), new(Struct), false}, {Typ[UntypedInt], Typ[Int], true}, } { if got := ConvertibleTo(test.v, test.t); got != test.want { t.Errorf("ConvertibleTo(%v, %v) = %t, want %t", test.v, test.t, got, test.want) } } }
explode_data.jsonl/56576
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 219 }
[ 2830, 3393, 88816, 1249, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 3056, 1235, 341, 197, 5195, 11, 259, 3990, 198, 197, 50780, 1807, 198, 197, 59403, 197, 197, 90, 12834, 36261, 1125, 17518, 36261, 1125, 830, 1583, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_combinationSum4(t *testing.T) { type args struct { nums []int target int } tests := []struct { name string args args want int }{ {"", args{[]int{1, 2, 3}, 4}, 7}, {"", args{[]int{9}, 3}, 0}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := combinationSum4(tt.args.nums, tt.args.target); got != tt.want { t.Errorf("combinationSum4() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/70399
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 209 }
[ 2830, 3393, 34454, 2554, 9190, 19, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 22431, 82, 256, 3056, 396, 198, 197, 28861, 526, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRuneMarshalUnmarshal(t *testing.T) { tt := &TestType{} r := 'r' rp := &r tt.Rune = r tt.RunePtr = &r tt.RunePtrPtr = &rp tt.RuneSlice = []rune{'a', 'b', '😳'} bts, err := tt.MarshalMsg(nil) if err != nil { t.Errorf("%v", err) } var out TestType if _, err := (&out).UnmarshalMsg(bts); err != nil { t.Errorf("%v", err) } if r != out.Rune { t.Errorf("rune mismatch: expected %c found %c", r, out.Rune) } if r != *out.RunePtr { t.Errorf("rune ptr mismatch: expected %c found %c", r, *out.RunePtr) } if r != **out.RunePtrPtr { t.Errorf("rune ptr ptr mismatch: expected %c found %c", r, **out.RunePtrPtr) } if !reflect.DeepEqual(tt.RuneSlice, out.RuneSlice) { t.Errorf("rune slice mismatch") } }
explode_data.jsonl/76328
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 352 }
[ 2830, 3393, 49, 2886, 55438, 1806, 27121, 1155, 353, 8840, 836, 8, 341, 3244, 83, 1669, 609, 2271, 929, 16094, 7000, 1669, 364, 81, 1248, 7000, 79, 1669, 609, 81, 198, 3244, 83, 2013, 2886, 284, 435, 198, 3244, 83, 2013, 2886, 5348,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestRequest_StateSession(t *testing.T) { tests := map[string]struct { request *alice.Request want interface{} }{ "when state is empty 0": { request: getReq(0), want: nil, }, "when state is empty 1":{ request: getReq(1), want: nil, }, "when state is empty 2":{ request: getReq(2), want: nil, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { req := tt.request got := req.StateSession(""); if !assert.Equal(t, tt.want, got) { t.Errorf("Request.StateSession() = %v, want %v", got, tt.want) } }) } t.Run("when state is struct", func(t *testing.T) { req := getReq(3) want := make(map[string]interface{}) want["int_value"] = 42 assert.Equal(t, 42.0, req.StateSession("int_value")) assert.Equal(t, "exampleString", req.StateSession("string_value")) assert.Equal(t, []interface{}{1.0,2.0,3.0,4.0}, req.StateSession("array_value")) assert.Equal(t, map[string]interface{}{"one":"one"}, req.StateSession("struct_value")) stateJson, err := req.StateSessionAsJson() if assert.NoError(t, err) { assert.Equal(t, `{"array_value":[1,2,3,4],"int_value":42,"string_value":"exampleString","struct_value":{"one":"one"}}`, stateJson) } }) }
explode_data.jsonl/18236
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 541 }
[ 2830, 3393, 1900, 24773, 5283, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 2415, 14032, 60, 1235, 341, 197, 23555, 353, 63195, 9659, 198, 197, 50780, 262, 3749, 16094, 197, 59403, 197, 197, 1, 9309, 1584, 374, 4287, 220, 15, 788, 341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRevokeProposalRule(t *testing.T) { env, exec, stateDB, kvdb := InitEnv() // PropRule testPropRule(t, env, exec, stateDB, kvdb, true) //RevokeProposalRule revokeProposalRule(t, env, exec, stateDB, kvdb, false) }
explode_data.jsonl/34706
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 91 }
[ 2830, 3393, 693, 7621, 98637, 11337, 1155, 353, 8840, 836, 8, 341, 57538, 11, 3883, 11, 1584, 3506, 11, 16178, 1999, 1669, 15690, 14359, 741, 197, 322, 3911, 11337, 198, 18185, 2008, 11337, 1155, 11, 6105, 11, 3883, 11, 1584, 3506, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEth_NewBlockFilter(t *testing.T) { rpcRes := call(t, "eth_newBlockFilter", []string{}) var ID string err := json.Unmarshal(rpcRes.Result, &ID) require.NoError(t, err) }
explode_data.jsonl/851
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 74 }
[ 2830, 3393, 65390, 39582, 4713, 5632, 1155, 353, 8840, 836, 8, 341, 7000, 3992, 1061, 1669, 1618, 1155, 11, 330, 769, 5921, 4713, 5632, 497, 3056, 917, 6257, 692, 2405, 3034, 914, 198, 9859, 1669, 2951, 38097, 2601, 3992, 1061, 18456, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestLogger_print(t *testing.T) { t.Parallel() t.Run("success(severity<l.config.SeverityThreshold)", func(t *testing.T) { t.Parallel() // prepare l := Must(New(devnull, WithSeverityThreshold(1))) // run l.write(time.Now(), DEFAULT, testLogEntryMessage) }) t.Run("success(UppercaseSeverity)", func(t *testing.T) { t.Parallel() // prepare l := Must(New(devnull)) // run l.write(time.Now(), DEFAULT, testLogEntryMessage) }) t.Run("success(LowercaseSeverity)", func(t *testing.T) { t.Parallel() // prepare l := Must(New(devnull, WithUseUppercaseSeverity(false))) // run l.write(time.Now(), DEFAULT, testLogEntryMessage) }) t.Run("success(UseShortCaller)", func(t *testing.T) { t.Parallel() // prepare l := Must(New(devnull)) // run l.write(time.Now(), DEFAULT, testLogEntryMessage) }) t.Run("success(UseLongCaller)", func(t *testing.T) { t.Parallel() // prepare l := Must(New(devnull, WithUseShortCaller(false))) // run l.write(time.Now(), DEFAULT, testLogEntryMessage) }) t.Run("success(UseHostnameField=true)", func(t *testing.T) { t.Parallel() // prepare l := Must(New(devnull, WithUseHostnameField(true))) // run l.write(time.Now(), DEFAULT, testLogEntryMessage) }) t.Run("success(UseHostnameField=false)", func(t *testing.T) { t.Parallel() // prepare l := Must(New(devnull)) // run l.write(time.Now(), DEFAULT, testLogEntryMessage) }) t.Run("success(NoField)", func(t *testing.T) { t.Parallel() // prepare buf := bytes.NewBuffer(nil) l := Must(New(buf)) l.config = &Config{} // run l.write(time.Now(), DEFAULT, testLogEntryMessage) // check const expect = `{}` actual := buf.String() FailIfNotEqual(t, expect, actual) }) t.Run("error(typeNone)", func(t *testing.T) { t.Parallel() // prepare buf := bytes.NewBuffer(nil) l := Must(New(buf, WithUseCallerField(false))) // run l.write(testTimestampValue, DEFAULT, testLogEntryMessage, Field{key: "noneField"}) // check const expect = `{"timestamp":"2021-01-01T10:23:45.6789+09:00","severity":"DEFAULT","message":"` + testLogEntryMessageJSONEscape + `","noneField":"ERROR: TYPE NONE"}` + defaultLineSeparator actualErr := buf.String() FailIfNotEqual(t, expect, actualErr) }) t.Run("error(undefinedField)", func(t *testing.T) { t.Parallel() // prepare buf := bytes.NewBuffer(nil) l := Must(New(buf, WithUseCallerField(false))) // run l.write(testTimestampValue, DEFAULT, testLogEntryMessage, Field{key: "undefinedField", t: math.MaxUint8}) // check const expect = `{"timestamp":"2021-01-01T10:23:45.6789+09:00","severity":"DEFAULT","message":"` + testLogEntryMessageJSONEscape + `","undefinedField":"ERROR: UNDEFINED TYPE: 255"}` + defaultLineSeparator actual := buf.String() FailIfNotEqual(t, expect, actual) }) t.Run("success(OutputMatch)", func(t *testing.T) { t.Parallel() // prepare buf := bytes.NewBuffer(nil) l := Must(New(buf, WithTimestampFieldFormat(""), WithUseHostnameField(true))) // run l.write(testTimestampValue, INFO, testLogEntryMessage) // check expect := regexp.MustCompile(`^{"timestamp":1609464225.6789,"severity":"INFO","hostname":".+","caller":"[^"]+:[0-9]+","message":".+"}` + l.config.LineSeparator) actual := buf.String() FailIfNotRegexpMatchString(t, expect, actual) }) t.Run("error(Write)", func(t *testing.T) { t.Parallel() // prepare buf := bytes.NewBuffer(nil) replacer := Must(NewWithConfig(buf, NewConfig())) replacer.config.UseCallerField = false backup := defaultLogger t.Cleanup(func() { defaultLogger = backup }) defaultLogger = replacer // run noSuchFile, _ := os.OpenFile("/tmp/no/such/file", os.O_RDWR, 0o600) l := Must(New(noSuchFile)) l.write(testTimestampValue, DEFAULT, testLogEntryMessage) // check const expect = `{"timestamp":"2021-01-01T10:23:45.6789+09:00","severity":"ERROR","message":"rec.(*Logger).write: writer=(*os.File)(nil): Write: invalid argument","error":"rec.(*Logger).write: writer=(*os.File)(nil): Write: invalid argument"}` + defaultLineSeparator actual := buf.String() FailIfNotEqual(t, expect, actual) }) }
explode_data.jsonl/71323
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1641 }
[ 2830, 3393, 7395, 10064, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 3244, 16708, 445, 5630, 10478, 26472, 84783, 5423, 808, 2054, 487, 37841, 11583, 2915, 1155, 353, 8840, 836, 8, 341, 197, 3244, 41288, 7957, 2822, 197, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBrokenConfig(t *testing.T) { for _, test := range []struct { name string cfg *Config }{ {"nil-configuraion", nil}, {"empty-configuration", &Config{}}, {"missing-cert", &Config{BaseURL: "dontcare", TLSClientConfig: TLSClientConfig{CAFile: "missing"}}}, {"broken-cert", &Config{BaseURL: "dontcare", TLSClientConfig: TLSClientConfig{CAFile: "testdata/broken.pem"}}}, } { t.Run(fmt.Sprintf("%v", test.cfg), func(t *testing.T) { _, err := NewAdapter(test.cfg, IngressAPIVersionNetworking, testIngressFilter, testSecurityGroup, testSSLPolicy, aws.LoadBalancerTypeApplication, DefaultClusterLocalDomain, false) if err == nil { t.Error("expected an error") } }) } }
explode_data.jsonl/6734
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 264 }
[ 2830, 3393, 90526, 2648, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 3056, 1235, 341, 197, 11609, 914, 198, 197, 50286, 220, 353, 2648, 198, 197, 59403, 197, 197, 4913, 8385, 25130, 5690, 290, 497, 2092, 1583, 197, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDriver(t *testing.T) { st := assert.New(t) st.Equal("ini", Driver.Name()) // st.IsType(new(Encoder), JSONDriver.GetEncoder()) c := config.NewEmpty("test") st.False(c.HasDecoder(config.Ini)) c.AddDriver(Driver) st.True(c.HasDecoder(config.Ini)) st.True(c.HasEncoder(config.Ini)) _, err := Encoder(map[string]interface{}{"k": "v"}) st.Nil(err) _, err = Encoder("invalid") st.Error(err) }
explode_data.jsonl/26377
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 179 }
[ 2830, 3393, 11349, 1155, 353, 8840, 836, 8, 341, 18388, 1669, 2060, 7121, 1155, 692, 18388, 12808, 445, 6591, 497, 14577, 2967, 2398, 197, 322, 357, 4506, 929, 1755, 7, 19921, 701, 4718, 11349, 2234, 19921, 12367, 1444, 1669, 2193, 7121...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSelect_Raw(t *testing.T) { shardMapper := ShardMapper{ MapShardsFn: func(sources influxql.Sources, _ influxql.TimeRange) query.ShardGroup { return &ShardGroup{ Fields: map[string]influxql.DataType{ "f": influxql.Float, "i": influxql.Integer, "u": influxql.Unsigned, "s": influxql.String, "b": influxql.Boolean, }, CreateIteratorFn: func(ctx context.Context, m *influxql.Measurement, opt query.IteratorOptions) (query.Iterator, error) { if m.Name != "cpu" { t.Fatalf("unexpected source: %s", m.Name) } if !reflect.DeepEqual(opt.Aux, []influxql.VarRef{ {Val: "b", Type: influxql.Boolean}, {Val: "f", Type: influxql.Float}, {Val: "i", Type: influxql.Integer}, {Val: "s", Type: influxql.String}, {Val: "u", Type: influxql.Unsigned}, }) { t.Fatalf("unexpected auxiliary fields: %v", opt.Aux) } return &FloatIterator{Points: []query.FloatPoint{ {Name: "cpu", Time: 0 * Second, Aux: []interface{}{ true, float64(20), int64(20), "a", uint64(20)}}, {Name: "cpu", Time: 5 * Second, Aux: []interface{}{ false, float64(10), int64(10), "b", uint64(10)}}, {Name: "cpu", Time: 9 * Second, Aux: []interface{}{ true, float64(19), int64(19), "c", uint64(19)}}, }}, nil }, } }, } stmt := MustParseSelectStatement(`SELECT f, i, u, s, b FROM cpu`) stmt.OmitTime = true cur, err := query.Select(context.Background(), stmt, &shardMapper, query.SelectOptions{}) if err != nil { t.Errorf("parse error: %s", err) } else if a, err := ReadCursor(cur); err != nil { t.Fatalf("unexpected error: %s", err) } else if diff := cmp.Diff([]query.Row{ { Time: 0 * Second, Series: query.Series{ Name: "cpu", }, Values: []interface{}{float64(20), int64(20), uint64(20), "a", true}, }, { Time: 5 * Second, Series: query.Series{ Name: "cpu", }, Values: []interface{}{float64(10), int64(10), uint64(10), "b", false}, }, { Time: 9 * Second, Series: query.Series{ Name: "cpu", }, Values: []interface{}{float64(19), int64(19), uint64(19), "c", true}, }, }, a); diff != "" { t.Errorf("unexpected points:\n%s", diff) } }
explode_data.jsonl/39404
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1016 }
[ 2830, 3393, 3379, 2568, 672, 1155, 353, 8840, 836, 8, 341, 36196, 567, 10989, 1669, 95366, 10989, 515, 197, 26873, 2016, 2347, 24911, 25, 2915, 1141, 2360, 52852, 1470, 808, 2360, 11, 716, 52852, 1470, 16299, 6046, 8, 3239, 10849, 567, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestParseResourceParameterType(t *testing.T) { t.Parallel() result, errs := ParseProgram(` fun test(x: @X) {} `) require.Empty(t, errs) utils.AssertEqualWithDiff(t, []ast.Declaration{ &ast.FunctionDeclaration{ Identifier: ast.Identifier{ Identifier: "test", Pos: ast.Position{Offset: 13, Line: 2, Column: 12}, }, ReturnTypeAnnotation: &ast.TypeAnnotation{ IsResource: false, Type: &ast.NominalType{ Identifier: ast.Identifier{ Identifier: "", Pos: ast.Position{Offset: 23, Line: 2, Column: 22}, }, }, StartPos: ast.Position{Offset: 23, Line: 2, Column: 22}, }, ParameterList: &ast.ParameterList{ Parameters: []*ast.Parameter{ { Label: "", Identifier: ast.Identifier{ Identifier: "x", Pos: ast.Position{Offset: 18, Line: 2, Column: 17}, }, TypeAnnotation: &ast.TypeAnnotation{ IsResource: true, Type: &ast.NominalType{ Identifier: ast.Identifier{ Identifier: "X", Pos: ast.Position{Offset: 22, Line: 2, Column: 21}, }, }, StartPos: ast.Position{Offset: 21, Line: 2, Column: 20}, }, Range: ast.Range{ StartPos: ast.Position{Offset: 18, Line: 2, Column: 17}, EndPos: ast.Position{Offset: 22, Line: 2, Column: 21}, }, }, }, Range: ast.Range{ StartPos: ast.Position{Offset: 17, Line: 2, Column: 16}, EndPos: ast.Position{Offset: 23, Line: 2, Column: 22}, }, }, FunctionBlock: &ast.FunctionBlock{ Block: &ast.Block{ Range: ast.Range{ StartPos: ast.Position{Offset: 25, Line: 2, Column: 24}, EndPos: ast.Position{Offset: 26, Line: 2, Column: 25}, }, }, }, StartPos: ast.Position{Offset: 9, Line: 2, Column: 8}, }, }, result.Declarations(), ) }
explode_data.jsonl/35993
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 953 }
[ 2830, 3393, 14463, 4783, 4971, 929, 1155, 353, 8840, 836, 8, 1476, 3244, 41288, 7957, 2822, 9559, 11, 70817, 1669, 14775, 10690, 61528, 286, 2464, 1273, 2075, 25, 569, 55, 8, 5613, 197, 24183, 17957, 11180, 1155, 11, 70817, 692, 80206, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExportImportPubKey(t *testing.T) { // make the storage with reasonable defaults cstore := NewInMemory() // CreateMnemonic a private-public key pair and ensure consistency notPasswd := "n9y25ah7" info, _, err := cstore.CreateMnemonic("john", English, notPasswd, Secp256k1) require.Nil(t, err) require.NotEqual(t, info, "") require.Equal(t, info.GetName(), "john") addr := info.GetPubKey().Address() john, err := cstore.Get("john") require.NoError(t, err) require.Equal(t, john.GetName(), "john") require.Equal(t, john.GetPubKey().Address(), addr) // Export the public key only armor, err := cstore.ExportPubKey("john") require.NoError(t, err) // Import it under a different name err = cstore.ImportPubKey("john-pubkey-only", armor) require.NoError(t, err) // Ensure consistency john2, err := cstore.Get("john-pubkey-only") require.NoError(t, err) // Compare the public keys require.True(t, john.GetPubKey().Equals(john2.GetPubKey())) // Ensure the original key hasn't changed john, err = cstore.Get("john") require.NoError(t, err) require.Equal(t, john.GetPubKey().Address(), addr) require.Equal(t, john.GetName(), "john") // Ensure keys cannot be overwritten err = cstore.ImportPubKey("john-pubkey-only", armor) require.NotNil(t, err) }
explode_data.jsonl/10672
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 460 }
[ 2830, 3393, 16894, 11511, 29162, 1592, 1155, 353, 8840, 836, 8, 341, 197, 322, 1281, 279, 5819, 448, 13276, 16674, 198, 1444, 4314, 1669, 1532, 641, 10642, 2822, 197, 322, 4230, 44, 70775, 264, 869, 56471, 1376, 6716, 323, 5978, 28137, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWithQueryParametersCatchesNilURL(t *testing.T) { _, err := Prepare(&http.Request{}, WithQueryParameters(map[string]interface{}{"foo": "bar"})) if err == nil { t.Fatalf("autorest: WithQueryParameters failed to catch a nil URL") } }
explode_data.jsonl/20976
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 87 }
[ 2830, 3393, 2354, 2859, 9706, 34, 9118, 19064, 3144, 1155, 353, 8840, 836, 8, 972, 197, 6878, 1848, 1669, 31166, 2099, 1254, 9659, 22655, 3085, 2859, 9706, 9147, 14032, 31344, 6257, 4913, 7975, 788, 330, 2257, 9207, 5784, 743, 1848, 621...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestAddScalar_Forward2(t *testing.T) { t.Run("float32", testAddScalarForward2[float32]) t.Run("float64", testAddScalarForward2[float64]) }
explode_data.jsonl/57479
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 59 }
[ 2830, 3393, 2212, 20639, 84368, 1606, 17, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 3649, 18, 17, 497, 1273, 2212, 20639, 25925, 17, 95381, 18, 17, 2546, 3244, 16708, 445, 3649, 21, 19, 497, 1273, 2212, 20639, 25925, 17, 95381, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestTypes(t *testing.T) { table := []interface{}{ &TestType1{}, &ExternalInternalSame{}, } for _, item := range table { // Try a few times, since runTest uses random values. for i := 0; i < *fuzzIters; i++ { runTest(t, item) } } }
explode_data.jsonl/29641
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 105 }
[ 2830, 3393, 4173, 1155, 353, 8840, 836, 8, 341, 26481, 1669, 3056, 4970, 67066, 197, 197, 5, 2271, 929, 16, 38837, 197, 197, 5, 25913, 11569, 19198, 38837, 197, 532, 2023, 8358, 1509, 1669, 2088, 1965, 341, 197, 197, 322, 9735, 264, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMemberRollWithWrongNameIsRejected(t *testing.T) { roll := newMemberRoll("not-default", "istio-system") validator, _, _ := createMemberRollValidatorTestFixture(smcp) response := validator.Handle(ctx, createCreateRequest(roll)) assert.False(response.Allowed, "Expected validator to reject ServiceMeshMemberRoll with wrong name", t) }
explode_data.jsonl/70262
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 105 }
[ 2830, 3393, 9366, 32355, 2354, 29185, 675, 3872, 77693, 1155, 353, 8840, 836, 8, 341, 197, 1100, 1669, 501, 9366, 32355, 445, 1921, 13672, 497, 330, 380, 815, 36648, 5130, 197, 16112, 11, 8358, 716, 1669, 1855, 9366, 32355, 14256, 69356...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWatchCancelDisconnected(t *testing.T) { integration2.BeforeTest(t) clus := integration2.NewClusterV3(t, &integration2.ClusterConfig{Size: 1}) defer clus.Terminate(t) cli := clus.Client(0) ctx, cancel := context.WithCancel(context.Background()) // add more watches than can be resumed before the cancel wch := cli.Watch(ctx, "abc") clus.Members[0].Stop(t) cancel() select { case <-wch: case <-time.After(time.Second): t.Fatal("took too long to cancel disconnected watcher") } }
explode_data.jsonl/28947
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 184 }
[ 2830, 3393, 14247, 9269, 77021, 1155, 353, 8840, 836, 8, 341, 2084, 17376, 17, 31153, 2271, 1155, 340, 197, 4163, 1669, 17590, 17, 7121, 28678, 53, 18, 1155, 11, 609, 60168, 17, 72883, 2648, 90, 1695, 25, 220, 16, 3518, 16867, 1185, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestIssue85(t *testing.T) { anon := struct{ Datasets []Dataset }{} testJsonFromStruct(t, anon, `{ "struct { Datasets ||swagger.Dataset }": { "id": "struct { Datasets ||swagger.Dataset }", "required": [ "Datasets" ], "properties": { "Datasets": { "type": "array", "description": "", "items": { "$ref": "swagger.Dataset" }, "format": "" } } }, "swagger.Dataset": { "id": "swagger.Dataset", "required": [ "Names" ], "properties": { "Names": { "type": "array", "description": "", "items": { "$ref": "string" }, "format": "" } } } }`) }
explode_data.jsonl/39932
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 326 }
[ 2830, 3393, 42006, 23, 20, 1155, 353, 8840, 836, 8, 341, 197, 58910, 1669, 2036, 90, 422, 76649, 3056, 33363, 335, 16094, 18185, 5014, 3830, 9422, 1155, 11, 74812, 11, 1565, 515, 220, 330, 1235, 314, 422, 76649, 1369, 74755, 79356, 33...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetMyTeamsUnread(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() Client := th.Client user := th.BasicUser Client.Login(user.Email, user.Password) teams, resp := Client.GetTeamsUnreadForUser(user.Id, "") CheckNoError(t, resp) require.NotEqual(t, len(teams), 0, "should have results") teams, resp = Client.GetTeamsUnreadForUser(user.Id, th.BasicTeam.Id) CheckNoError(t, resp) require.Empty(t, teams, "should not have results") _, resp = Client.GetTeamsUnreadForUser("fail", "") CheckBadRequestStatus(t, resp) _, resp = Client.GetTeamsUnreadForUser(model.NewId(), "") CheckForbiddenStatus(t, resp) Client.Logout() _, resp = Client.GetTeamsUnreadForUser(user.Id, "") CheckUnauthorizedStatus(t, resp) }
explode_data.jsonl/70734
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 284 }
[ 2830, 3393, 1949, 5050, 60669, 1806, 878, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 741, 71724, 1669, 270, 11716, 271, 19060, 1669, 270, 48868, 1474, 198, 71724, 32499, 42...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1