text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestTokenAuthenticator(t *testing.T) { now := metav1.Now() tests := []struct { name string secrets []*api.Secret token string wantNotFound bool wantUser *user.DefaultInfo }{ { name: "valid token", secrets: []*api.Secret{ { ObjectMeta: metav1.ObjectMeta{ Name: bootstrapapi.BootstrapTokenSecretPrefix + tokenID, }, Data: map[string][]byte{ bootstrapapi.BootstrapTokenIDKey: []byte(tokenID), bootstrapapi.BootstrapTokenSecretKey: []byte(tokenSecret), bootstrapapi.BootstrapTokenUsageAuthentication: []byte("true"), }, Type: "bootstrap.kubernetes.io/token", }, }, token: tokenID + "." + tokenSecret, wantUser: &user.DefaultInfo{ Name: "system:bootstrap:" + tokenID, Groups: []string{"system:bootstrappers"}, }, }, { name: "valid token with extra group", secrets: []*api.Secret{ { ObjectMeta: metav1.ObjectMeta{ Name: bootstrapapi.BootstrapTokenSecretPrefix + tokenID, }, Data: map[string][]byte{ bootstrapapi.BootstrapTokenIDKey: []byte(tokenID), bootstrapapi.BootstrapTokenSecretKey: []byte(tokenSecret), bootstrapapi.BootstrapTokenUsageAuthentication: []byte("true"), bootstrapapi.BootstrapTokenExtraGroupsKey: []byte("system:bootstrappers:foo"), }, Type: "bootstrap.kubernetes.io/token", }, }, token: tokenID + "." + tokenSecret, wantUser: &user.DefaultInfo{ Name: "system:bootstrap:" + tokenID, Groups: []string{"system:bootstrappers", "system:bootstrappers:foo"}, }, }, { name: "invalid group", secrets: []*api.Secret{ { ObjectMeta: metav1.ObjectMeta{ Name: bootstrapapi.BootstrapTokenSecretPrefix + tokenID, }, Data: map[string][]byte{ bootstrapapi.BootstrapTokenIDKey: []byte(tokenID), bootstrapapi.BootstrapTokenSecretKey: []byte(tokenSecret), bootstrapapi.BootstrapTokenUsageAuthentication: []byte("true"), bootstrapapi.BootstrapTokenExtraGroupsKey: []byte("foo"), }, Type: "bootstrap.kubernetes.io/token", }, }, token: tokenID + "." + tokenSecret, wantNotFound: true, }, { name: "invalid secret name", secrets: []*api.Secret{ { ObjectMeta: metav1.ObjectMeta{ Name: "bad-name", }, Data: map[string][]byte{ bootstrapapi.BootstrapTokenIDKey: []byte(tokenID), bootstrapapi.BootstrapTokenSecretKey: []byte(tokenSecret), bootstrapapi.BootstrapTokenUsageAuthentication: []byte("true"), }, Type: "bootstrap.kubernetes.io/token", }, }, token: tokenID + "." + tokenSecret, wantNotFound: true, }, { name: "no usage", secrets: []*api.Secret{ { ObjectMeta: metav1.ObjectMeta{ Name: bootstrapapi.BootstrapTokenSecretPrefix + tokenID, }, Data: map[string][]byte{ bootstrapapi.BootstrapTokenIDKey: []byte(tokenID), bootstrapapi.BootstrapTokenSecretKey: []byte(tokenSecret), }, Type: "bootstrap.kubernetes.io/token", }, }, token: tokenID + "." + tokenSecret, wantNotFound: true, }, { name: "wrong token", secrets: []*api.Secret{ { ObjectMeta: metav1.ObjectMeta{ Name: bootstrapapi.BootstrapTokenSecretPrefix + tokenID, }, Data: map[string][]byte{ bootstrapapi.BootstrapTokenIDKey: []byte(tokenID), bootstrapapi.BootstrapTokenSecretKey: []byte(tokenSecret), bootstrapapi.BootstrapTokenUsageAuthentication: []byte("true"), }, Type: "bootstrap.kubernetes.io/token", }, }, token: "barfoo" + "." + tokenSecret, wantNotFound: true, }, { name: "deleted token", secrets: []*api.Secret{ { ObjectMeta: metav1.ObjectMeta{ Name: bootstrapapi.BootstrapTokenSecretPrefix + tokenID, DeletionTimestamp: &now, }, Data: map[string][]byte{ bootstrapapi.BootstrapTokenIDKey: []byte(tokenID), bootstrapapi.BootstrapTokenSecretKey: []byte(tokenSecret), bootstrapapi.BootstrapTokenUsageAuthentication: []byte("true"), }, Type: "bootstrap.kubernetes.io/token", }, }, token: tokenID + "." + tokenSecret, wantNotFound: true, }, { name: "expired token", secrets: []*api.Secret{ { ObjectMeta: metav1.ObjectMeta{ Name: bootstrapapi.BootstrapTokenSecretPrefix + tokenID, }, Data: map[string][]byte{ bootstrapapi.BootstrapTokenIDKey: []byte(tokenID), bootstrapapi.BootstrapTokenSecretKey: []byte(tokenSecret), bootstrapapi.BootstrapTokenUsageAuthentication: []byte("true"), bootstrapapi.BootstrapTokenExpirationKey: []byte("2009-11-10T23:00:00Z"), }, Type: "bootstrap.kubernetes.io/token", }, }, token: tokenID + "." + tokenSecret, wantNotFound: true, }, { name: "not expired token", secrets: []*api.Secret{ { ObjectMeta: metav1.ObjectMeta{ Name: bootstrapapi.BootstrapTokenSecretPrefix + tokenID, }, Data: map[string][]byte{ bootstrapapi.BootstrapTokenIDKey: []byte(tokenID), bootstrapapi.BootstrapTokenSecretKey: []byte(tokenSecret), bootstrapapi.BootstrapTokenUsageAuthentication: []byte("true"), bootstrapapi.BootstrapTokenExpirationKey: []byte("2109-11-10T23:00:00Z"), }, Type: "bootstrap.kubernetes.io/token", }, }, token: tokenID + "." + tokenSecret, wantUser: &user.DefaultInfo{ Name: "system:bootstrap:" + tokenID, Groups: []string{"system:bootstrappers"}, }, }, { name: "token id wrong length", secrets: []*api.Secret{ { ObjectMeta: metav1.ObjectMeta{ Name: bootstrapapi.BootstrapTokenSecretPrefix + "foo", }, Data: map[string][]byte{ bootstrapapi.BootstrapTokenIDKey: []byte("foo"), bootstrapapi.BootstrapTokenSecretKey: []byte(tokenSecret), bootstrapapi.BootstrapTokenUsageAuthentication: []byte("true"), }, Type: "bootstrap.kubernetes.io/token", }, }, // Token ID must be 6 characters. token: "foo" + "." + tokenSecret, wantNotFound: true, }, } for _, test := range tests { func() { a := NewTokenAuthenticator(&lister{test.secrets}) u, found, err := a.AuthenticateToken(test.token) if err != nil { t.Errorf("test %q returned an error: %v", test.name, err) return } if !found { if !test.wantNotFound { t.Errorf("test %q expected to get user", test.name) } return } if test.wantNotFound { t.Errorf("test %q expected to not get a user", test.name) return } gotUser := u.(*user.DefaultInfo) if !reflect.DeepEqual(gotUser, test.wantUser) { t.Errorf("test %q want user=%#v, got=%#v", test.name, test.wantUser, gotUser) } }() } }
explode_data.jsonl/65924
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 3263 }
[ 2830, 3393, 3323, 5087, 61393, 1155, 353, 8840, 836, 8, 341, 80922, 1669, 77520, 16, 13244, 2822, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 271, 197, 84686, 52710, 29838, 2068, 74779, 198, 197, 43947, 256, 914, 271, 197, 50780, 103...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestByteSubscriptions_keys(t *testing.T) { tests := []struct { name string sub ByteSubscriptions want []string }{ { "0,8", ByteSubscriptions{ "0000": &PartialSubscription{0, "0", ByteSubscriptions{}}, "1000": &PartialSubscription{0, "8", ByteSubscriptions{}}, }, []string{"0000", "1000"}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := tt.sub.Keys(); !reflect.DeepEqual(got, tt.want) { t.Errorf("ByteSubscriptions.keys() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/38238
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 250 }
[ 2830, 3393, 7153, 3136, 29966, 12631, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 28624, 220, 10906, 3136, 29966, 198, 197, 50780, 3056, 917, 198, 197, 59403, 197, 197, 515, 298, 197, 1, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReconcileServiceInstanceUpdateAsynchronous(t *testing.T) { key := osb.OperationKey(testOperation) fakeKubeClient, fakeCatalogClient, fakeClusterServiceBrokerClient, testController, sharedInformers := newTestController(t, fakeosb.FakeClientConfiguration{ UpdateInstanceReaction: &fakeosb.UpdateInstanceReaction{ Response: &osb.UpdateInstanceResponse{ Async: true, OperationKey: &key, }, }, }) addGetNamespaceReaction(fakeKubeClient) sharedInformers.ClusterServiceBrokers().Informer().GetStore().Add(getTestClusterServiceBroker()) sharedInformers.ClusterServiceClasses().Informer().GetStore().Add(getTestClusterServiceClass()) sharedInformers.ClusterServicePlans().Informer().GetStore().Add(getTestClusterServicePlan()) instance := getTestServiceInstanceWithClusterRefs() instance.Generation = 2 instance.Status.ReconciledGeneration = 1 instance.Status.ObservedGeneration = 1 instance.Status.ProvisionStatus = v1beta1.ServiceInstanceProvisionStatusProvisioned instance.Status.DeprovisionStatus = v1beta1.ServiceInstanceDeprovisionStatusRequired instance.Status.ExternalProperties = &v1beta1.ServiceInstancePropertiesState{ ClusterServicePlanExternalName: "old-plan-name", ClusterServicePlanExternalID: "old-plan-id", } instanceKey := testNamespace + "/" + testServiceInstanceName if testController.instancePollingQueue.NumRequeues(instanceKey) != 0 { t.Fatalf("Expected polling queue to not have any record of test instance") } if err := reconcileServiceInstance(t, testController, instance); err != nil { t.Fatalf("unexpected error: %v", err) } instance = assertServiceInstanceUpdateInProgressIsTheOnlyCatalogClientAction(t, fakeCatalogClient, instance) fakeCatalogClient.ClearActions() fakeKubeClient.ClearActions() if err := reconcileServiceInstance(t, testController, instance); err != nil { t.Fatalf("This should not fail : %v", err) } brokerActions := fakeClusterServiceBrokerClient.Actions() assertNumberOfBrokerActions(t, brokerActions, 1) expectedPlanID := testClusterServicePlanGUID assertUpdateInstance(t, brokerActions[0], &osb.UpdateInstanceRequest{ AcceptsIncomplete: true, InstanceID: testServiceInstanceGUID, ServiceID: testClusterServiceClassGUID, PlanID: &expectedPlanID, Context: testContext}) actions := fakeCatalogClient.Actions() assertNumberOfActions(t, actions, 1) updatedServiceInstance := assertUpdateStatus(t, actions[0], instance) assertServiceInstanceAsyncStartInProgress(t, updatedServiceInstance, v1beta1.ServiceInstanceOperationUpdate, testOperation, testClusterServicePlanName, testClusterServicePlanGUID, instance) // verify no kube resources created. // One single action comes from getting namespace uid kubeActions := fakeKubeClient.Actions() if e, a := 1, len(kubeActions); e != a { t.Fatalf("Unexpected number of actions: expected %v, got %v", e, a) } if testController.instancePollingQueue.NumRequeues(instanceKey) != 1 { t.Fatalf("Expected polling queue to have a record of seeing test instance once") } }
explode_data.jsonl/58196
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 973 }
[ 2830, 3393, 693, 40446, 457, 1860, 2523, 4289, 2121, 27834, 1155, 353, 8840, 836, 8, 341, 23634, 1669, 2643, 65, 56297, 1592, 8623, 8432, 340, 1166, 726, 42, 3760, 2959, 11, 12418, 41606, 2959, 11, 12418, 28678, 1860, 65545, 2959, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestDefaultErrorHandlerDoesNotWriteIfAlreadyWritten(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() mrw := NewMockResponseWriter(ctrl) mrw.EXPECT().Len().Return(1) mc := NewMockContext(ctrl) mc.EXPECT().Response().Return(mrw) defaultErrorHandler(mc, errors.New("hello, world")) }
explode_data.jsonl/18943
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 111 }
[ 2830, 3393, 3675, 66673, 21468, 2623, 7985, 2679, 38370, 35624, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 2109, 31768, 1669, 1532, 11571, 2582, 6492, 62100, 340, 2109...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParamSubstitutionWithArtifact(t *testing.T) { wf := test.LoadE2EWorkflow("functional/param-sub-with-artifacts.yaml") woc := newWoc(*wf) woc.operate() wf, err := woc.controller.wfclientset.ArgoprojV1alpha1().Workflows("").Get(wf.ObjectMeta.Name, metav1.GetOptions{}) assert.NoError(t, err) assert.Equal(t, wf.Status.Phase, wfv1.NodeRunning) pods, err := woc.controller.kubeclientset.CoreV1().Pods("").List(metav1.ListOptions{}) assert.NoError(t, err) assert.Equal(t, len(pods.Items), 1) }
explode_data.jsonl/54376
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 215 }
[ 2830, 3393, 2001, 3136, 10446, 2354, 85578, 1155, 353, 8840, 836, 8, 341, 6692, 69, 1669, 1273, 13969, 36, 17, 36, 62768, 445, 49228, 14, 903, 17967, 26189, 37821, 26401, 33406, 1138, 6692, 509, 1669, 501, 54, 509, 4071, 43083, 340, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMonthDays(t *testing.T) { type args struct { year int month time.Month } tests := []struct { name string args args want int }{ {"1月", args{2020, time.January}, 31}, {"闰年2月", args{2020, time.February}, 29}, {"非闰年2月", args{2019, time.February}, 28}, {"3月", args{2020, time.March}, 31}, {"4月", args{2020, time.April}, 30}, {"5月", args{2020, time.May}, 31}, {"6月", args{2020, time.June}, 30}, {"7月", args{2020, time.July}, 31}, {"8月", args{2020, time.August}, 31}, {"9月", args{2020, time.September}, 30}, {"10月", args{2020, time.October}, 31}, {"11月", args{2020, time.November}, 30}, {"12月", args{2020, time.December}, 31}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := MonthDays(tt.args.year, tt.args.month); got != tt.want { t.Errorf("MonthDays() = %v, want %v", got, tt.want) } }) } require.Panics(t, func() { MonthDays(2020, 13) }) }
explode_data.jsonl/45308
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 435 }
[ 2830, 3393, 11318, 20557, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 197, 3157, 220, 526, 198, 197, 197, 10249, 882, 48383, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDaoTagCount(t *testing.T) { var ( c = context.TODO() tid = int64(0) ) convey.Convey("TagCount", t, func(ctx convey.C) { res, err := d.TagCount(c, tid) ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(res, convey.ShouldNotBeNil) }) }) }
explode_data.jsonl/36694
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 12197, 5668, 2507, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 1444, 256, 284, 2266, 90988, 741, 197, 3244, 307, 284, 526, 21, 19, 7, 15, 340, 197, 340, 37203, 5617, 4801, 5617, 445, 5668, 2507, 497, 259, 11, 2915, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAgent_Members(t *testing.T) { t.Parallel() a := NewTestAgent(t.Name(), "") defer a.Shutdown() req, _ := http.NewRequest("GET", "/v1/agent/members", nil) obj, err := a.srv.AgentMembers(nil, req) if err != nil { t.Fatalf("Err: %v", err) } val := obj.([]serf.Member) if len(val) == 0 { t.Fatalf("bad members: %v", obj) } if int(val[0].Port) != a.Config.SerfPortLAN { t.Fatalf("not lan: %v", obj) } }
explode_data.jsonl/33599
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 193 }
[ 2830, 3393, 16810, 1245, 7062, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 11323, 1669, 1532, 2271, 16810, 1155, 2967, 1507, 14676, 16867, 264, 10849, 18452, 2822, 24395, 11, 716, 1669, 1758, 75274, 445, 3806, 497, 3521, 85, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGenUUIDPreservedWhenNodeCountExceeds4(t *testing.T) { cluster := &logging.ClusterLogging{ Spec: logging.ClusterLoggingSpec{ LogStore: &logging.LogStoreSpec{ Type: "elasticsearch", ElasticsearchSpec: logging.ElasticsearchSpec{ NodeCount: 3, }, }, }, } cr := &ClusterLoggingRequest{ Cluster: cluster, } existing := &elasticsearch.Elasticsearch{} elasticsearchCR := cr.newElasticsearchCR("test-app-name", existing) dataUUID := esutils.GenerateUUID() elasticsearchCR.Spec.Nodes[0].GenUUID = &dataUUID cluster = &logging.ClusterLogging{ Spec: logging.ClusterLoggingSpec{ LogStore: &logging.LogStoreSpec{ Type: "elasticsearch", ElasticsearchSpec: logging.ElasticsearchSpec{ NodeCount: 4, }, }, }, } cr = &ClusterLoggingRequest{ Cluster: cluster, } elasticsearchCR2 := cr.newElasticsearchCR("test-app-name", existing) diffCR, different := isElasticsearchCRDifferent(elasticsearchCR, elasticsearchCR2) if !different { t.Errorf("Expected that difference would be found due to node count change") } if diffCR.Spec.Nodes[0].GenUUID == nil || *diffCR.Spec.Nodes[0].GenUUID != dataUUID { t.Errorf("Expected that original GenUUID would be preserved as %v but was %v", dataUUID, diffCR.Spec.Nodes[0].GenUUID) } }
explode_data.jsonl/72383
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 504 }
[ 2830, 3393, 9967, 24754, 14367, 2771, 4498, 1955, 2507, 840, 4635, 82, 19, 1155, 353, 8840, 836, 8, 341, 197, 18855, 1669, 609, 25263, 72883, 34575, 515, 197, 7568, 992, 25, 8392, 72883, 34575, 8327, 515, 298, 24201, 6093, 25, 609, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestOutboundListenerConflict_Unordered(t *testing.T) { // Ensure that the order is preserved when all the times match. The first service in the list wins. testOutboundListenerConflict(t, buildService("test1.com", wildcardIP, protocol.HTTP, tzero), buildService("test2.com", wildcardIP, protocol.TCP, tzero), buildService("test3.com", wildcardIP, protocol.TCP, tzero)) }
explode_data.jsonl/61264
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 122 }
[ 2830, 3393, 2662, 10891, 2743, 57974, 40687, 10544, 1155, 353, 8840, 836, 8, 341, 197, 322, 29279, 429, 279, 1973, 374, 33583, 979, 678, 279, 3039, 2432, 13, 576, 1156, 2473, 304, 279, 1140, 14816, 624, 18185, 2662, 10891, 2743, 57974, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewRestConfigFromKubeconfigSecretRefErrors(t *testing.T) { testcases := []struct { name string secret *secrets }{ { name: "empty map in the secret/kubeconfig doesn't exist as a key", secret: &secrets{ secret: &v1.Secret{ Data: map[string][]byte{ "unknown key": []byte(""), }, }, }, }, { name: "k8s secret client returned an error", secret: &secrets{ err: errors.New("an error."), }, }, } for _, tc := range testcases { t.Run(tc.name, func(t *testing.T) { config, err := upgrade.NewRestConfigFromKubeconfigSecretRef(tc.secret, "") if err == nil { t.Fatal("expected an error but did not get one") } if config != nil { t.Fatalf("expected config to be nil but it is not: %v", config) } }) } }
explode_data.jsonl/65359
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 350 }
[ 2830, 3393, 3564, 12416, 2648, 3830, 42, 3760, 1676, 19773, 3945, 13877, 1155, 353, 8840, 836, 8, 341, 18185, 23910, 1669, 3056, 1235, 341, 197, 11609, 256, 914, 198, 197, 197, 20474, 353, 325, 52710, 198, 197, 59403, 197, 197, 515, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUpgradeFromPrev(t *testing.T) { upgradedDbInfo := getUpgradedDbInfo(t, false, baseExtensionState) pristineDbInfo := getPristineDbInfo(t, false, baseExtensionState) if !reflect.DeepEqual(pristineDbInfo, upgradedDbInfo) { PrintDbSnapshotDifferences(t, pristineDbInfo, upgradedDbInfo) } }
explode_data.jsonl/79300
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 103 }
[ 2830, 3393, 43861, 3830, 33528, 1155, 353, 8840, 836, 8, 341, 59810, 23343, 7994, 1731, 1669, 633, 2324, 23343, 7994, 1731, 1155, 11, 895, 11, 2331, 12049, 1397, 340, 25653, 53065, 7994, 1731, 1669, 57720, 2819, 482, 7994, 1731, 1155, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAndCloseOpen(t *testing.T) { dir, err := ioutil.TempDir("", "tarwriter_test") if err != nil { assert.FailNow(t, "Cannot create temp dir", err.Error()) } tempFilePath := filepath.Join(dir, "test_file.tar") defer os.RemoveAll(dir) w := tarfile.NewWriter(tempFilePath) defer w.Close() err = w.Open() assert.Nil(t, err) if _, err := os.Stat(w.PathToTarFile); os.IsNotExist(err) { assert.Fail(t, "Tar file does not exist at %s", w.PathToTarFile) } err = w.Close() assert.Nil(t, err) }
explode_data.jsonl/75874
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 214 }
[ 2830, 3393, 3036, 7925, 5002, 1155, 353, 8840, 836, 8, 341, 48532, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 26737, 18189, 4452, 1138, 743, 1848, 961, 2092, 341, 197, 6948, 57243, 7039, 1155, 11, 330, 17444, 1855, 2730, 5419, 497,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDecodeAssuredCallbackMissingTarget(t *testing.T) { decoded := false testDecode := func(resp http.ResponseWriter, req *http.Request) { c, err := decodeAssuredCallback(ctx, req) require.Nil(t, c) require.Error(t, err) require.Equal(t, "'Assured-Callback-Target' header required for callback", err.Error()) decoded = true } req, err := http.NewRequest(http.MethodPost, "/callback", bytes.NewBuffer([]byte(`{"done": true}`))) req.Header.Set(AssuredCallbackKey, "call-key") require.NoError(t, err) router := mux.NewRouter() router.HandleFunc("/callback", testDecode).Methods(http.MethodPost) resp := httptest.NewRecorder() router.ServeHTTP(resp, req) require.True(t, decoded, "decode method was not hit") }
explode_data.jsonl/20259
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 265 }
[ 2830, 3393, 32564, 5615, 3073, 7494, 25080, 6397, 1155, 353, 8840, 836, 8, 341, 197, 62913, 1669, 895, 198, 18185, 32564, 1669, 2915, 20267, 1758, 37508, 11, 4232, 353, 1254, 9659, 8, 341, 197, 1444, 11, 1848, 1669, 16895, 5615, 3073, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestScanJSONB(t *testing.T) { { a := testStruct{} err := ScanJSONB(&a, []byte(`{"x": 5, "z": "Hello", "v": 1}`)) assert.NoError(t, err) assert.Equal(t, "Hello", a.Z) assert.Equal(t, float64(1), a.V.V) assert.Equal(t, 5, a.X) } { a := testStruct{} err := DecodeJSONB(&a, []byte(`{"x": 5, "z": "Hello", "v": null}`)) assert.NoError(t, err) assert.Equal(t, "Hello", a.Z) assert.Equal(t, nil, a.V.V) assert.Equal(t, 5, a.X) } { a := testStruct{} err := ScanJSONB(&a, []byte(`{"x": 5, "z": "Hello"}`)) assert.NoError(t, err) assert.Equal(t, "Hello", a.Z) assert.Equal(t, nil, a.V.V) assert.Equal(t, 5, a.X) } { a := testStruct{} err := ScanJSONB(&a, []byte(`{"v": "Hello"}`)) assert.NoError(t, err) assert.Equal(t, "Hello", a.V.V) } { a := testStruct{} err := ScanJSONB(&a, []byte(`{"v": true}`)) assert.NoError(t, err) assert.Equal(t, true, a.V.V) } { a := testStruct{} err := ScanJSONB(&a, []byte(`{}`)) assert.NoError(t, err) assert.Equal(t, nil, a.V.V) } { a := []*testStruct{} err := json.Unmarshal([]byte(`[{}]`), &a) assert.NoError(t, err) assert.Equal(t, 1, len(a)) assert.Nil(t, a[0].V.V) } { a := []*testStruct{} err := json.Unmarshal([]byte(`[{"v": true}]`), &a) assert.NoError(t, err) assert.Equal(t, 1, len(a)) assert.Equal(t, true, a[0].V.V) } { a := []*testStruct{} err := json.Unmarshal([]byte(`[{"v": null}]`), &a) assert.NoError(t, err) assert.Equal(t, 1, len(a)) assert.Nil(t, a[0].V.V) } { a := []*testStruct{} err := json.Unmarshal([]byte(`[{"v": 12.34}]`), &a) assert.NoError(t, err) assert.Equal(t, 1, len(a)) assert.Equal(t, 12.34, a[0].V.V) } }
explode_data.jsonl/77556
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 916 }
[ 2830, 3393, 26570, 5370, 33, 1155, 353, 8840, 836, 8, 341, 197, 515, 197, 11323, 1669, 1273, 9422, 16094, 197, 9859, 1669, 31108, 5370, 33, 2099, 64, 11, 3056, 3782, 5809, 4913, 87, 788, 220, 20, 11, 330, 89, 788, 330, 9707, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSSH_CustomUser(t *testing.T) { withTestClient(t, func(config *CmdConfig, tm *tcMocks) { tm.sshRunner.EXPECT().Run().Return(nil) tc := config.Doit.(*doctl.TestConfig) tc.SSHFn = func(user, host, keyPath string, port int, opts ssh.Options) runner.Runner { assert.Equal(t, "foobar", user) return tm.sshRunner } tm.droplets.EXPECT().List().Return(testDropletList, nil) config.Doit.Set(config.NS, doctl.ArgSSHUser, "foobar") config.Args = append(config.Args, testDroplet.Name) err := RunSSH(config) assert.NoError(t, err) }) }
explode_data.jsonl/6480
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 230 }
[ 2830, 3393, 62419, 57402, 1474, 1155, 353, 8840, 836, 8, 341, 46948, 2271, 2959, 1155, 11, 2915, 8754, 353, 15613, 2648, 11, 17333, 353, 10413, 72577, 8, 341, 197, 3244, 76, 514, 927, 19486, 22402, 7285, 1005, 6727, 1005, 5598, 27907, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEngineNoQuery(t *testing.T) { config := DefaultConfig() sender := &common.SenderTest{} sender.T = t config.Sender = sender sender.Default(true) sender.CantGetAcceptedFrontier = false gBlk := &Blk{ id: GenerateID(), status: choices.Accepted, } vm := &VMTest{} vm.T = t vm.LastAcceptedF = func() ids.ID { return gBlk.ID() } config.VM = vm te := &Transitive{} te.Initialize(config) te.finishBootstrapping() blk := &Blk{ parent: gBlk, id: GenerateID(), status: choices.Processing, bytes: []byte{1}, } te.insert(blk) }
explode_data.jsonl/3566
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 250 }
[ 2830, 3393, 4571, 2753, 2859, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 7899, 2648, 2822, 1903, 1659, 1669, 609, 5464, 808, 1659, 2271, 16094, 1903, 1659, 836, 284, 259, 198, 25873, 808, 1659, 284, 4646, 271, 1903, 1659, 13275, 3715, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReplaceHook_DeleteError(t *testing.T) { controller := gomock.NewController(t) defer controller.Finish() hooks := []*scm.Hook{ { ID: "1", Target: "https://drone.company.com/hook", }, } hookInput := &scm.HookInput{ Target: "https://drone.company.com/hook", } remote := mockscm.NewMockRepositoryService(controller) remote.EXPECT().ListHooks(gomock.Any(), "octocat/hello-world", gomock.Any()).Return(hooks, nil, nil) remote.EXPECT().DeleteHook(gomock.Any(), "octocat/hello-world", "1").Return(nil, io.EOF) client := new(scm.Client) client.Repositories = remote err := replaceHook(context.Background(), client, "octocat/hello-world", hookInput) if err == nil { t.Errorf("Expect error if hook deletion fails") } }
explode_data.jsonl/40652
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 294 }
[ 2830, 3393, 23107, 31679, 57418, 1454, 1155, 353, 8840, 836, 8, 341, 61615, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 6461, 991, 18176, 2822, 9598, 14685, 1669, 29838, 2388, 76, 3839, 1941, 515, 197, 197, 515, 298, 29580, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_scsi_GetDMChildren(t *testing.T) { type args struct { ctx context.Context dm string } ctx := context.Background() defaultArgs := args{ctx: ctx, dm: mh.ValidDMName} ctrl := gomock.NewController(t) defer ctrl.Finish() sysPath := fmt.Sprintf("/sys/block/%s/slaves/", mh.ValidDMName) mock := mh.MockHelper{ Ctrl: ctrl, FilePathGlobCallPattern: sysPath + "*", FilePathGlobOKReturn: []string{ sysPath + mh.ValidDeviceName, sysPath + mh.ValidDeviceName2}, } tests := []struct { name string fields scsiFields stateSetter func(fields scsiFields) args args want []string wantErr bool }{ { name: "glob err", fields: getDefaultSCSIFields(ctrl), stateSetter: func(fields scsiFields) { mock.FilePathGlobErr(fields.filePath) }, args: defaultArgs, want: nil, wantErr: true, }, { name: "found", fields: getDefaultSCSIFields(ctrl), stateSetter: func(fields scsiFields) { mock.FilePathGlobOK(fields.filePath) }, args: defaultArgs, want: []string{mh.ValidDeviceName, mh.ValidDeviceName2}, wantErr: false, }, { name: "not found", fields: getDefaultSCSIFields(ctrl), stateSetter: func(fields scsiFields) { mock.FilePathGlobOKReturn = nil mock.FilePathGlobOK(fields.filePath) }, args: defaultArgs, want: nil, wantErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { s := &Scsi{ fileReader: tt.fields.fileReader, filePath: tt.fields.filePath, os: tt.fields.os, osexec: tt.fields.osexec, singleCall: tt.fields.singleCall, } tt.stateSetter(tt.fields) got, err := s.GetDMChildren(tt.args.ctx, tt.args.dm) if (err != nil) != tt.wantErr { t.Errorf("GetDMChildren() error = %v, wantErr %v", err, tt.wantErr) return } if !reflect.DeepEqual(got, tt.want) { t.Errorf("GetDMChildren() got = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/65989
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 979 }
[ 2830, 3393, 643, 63229, 13614, 8395, 11539, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 20985, 2266, 9328, 198, 197, 2698, 76, 220, 914, 198, 197, 630, 20985, 1669, 2266, 19047, 2822, 11940, 4117, 1669, 2827, 90, 3773, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRepoLoadsAllDependantTemplates(t *testing.T) { var b bytes.Buffer repo := NewRepository(nil) err := repo.AddFile("multiple", multipleDefinitions) assert.NoError(t, err) err = repo.AddFile("dependant", dependantTemplate) assert.NoError(t, err) templ, err := repo.Get("dependant") assert.Nil(t, err) err = templ.Execute(&b, nil) assert.Nil(t, err) assert.Equal(t, "T1D1", b.String()) }
explode_data.jsonl/49689
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 163 }
[ 2830, 3393, 25243, 78517, 2403, 7839, 20372, 51195, 1155, 353, 8840, 836, 8, 1476, 2405, 293, 5820, 22622, 198, 17200, 5368, 1669, 1532, 4624, 27907, 692, 9859, 1669, 15867, 1904, 1703, 445, 35673, 497, 5248, 50542, 340, 6948, 35699, 1155...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPeerDistanceHeap_Distance(t *testing.T) { rng := rand.New(rand.NewSource(int64(0))) target := id.NewPseudoRandom(rng) cp := NewClosestPeers(target, 8) for _, p := range peer.NewTestPeers(rng, 8) { assert.True(t, target.Distance(p.ID()).Cmp(cp.Distance(p)) == 0) } }
explode_data.jsonl/41123
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 122 }
[ 2830, 3393, 30888, 14778, 27909, 1557, 3924, 1155, 353, 8840, 836, 8, 341, 7000, 968, 1669, 10382, 7121, 37595, 7121, 3608, 1548, 21, 19, 7, 15, 5929, 28861, 1669, 877, 7121, 47, 21952, 13999, 87597, 340, 52018, 1669, 1532, 7925, 267, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_mapper_MapRulesSpecialCharNamespace(t *testing.T) { l := log.NewLogfmtLogger(os.Stdout) l = level.NewFilter(l, level.AllowInfo()) setupRuleSets() m := &mapper{ Path: "/rules", FS: afero.NewMemMapFs(), logger: l, } t.Run("create special characters rulegroup", func(t *testing.T) { updated, files, err := m.MapRules(testUser, specialCharactersRuleSet) require.NoError(t, err) require.True(t, updated) require.Len(t, files, 1) require.Equal(t, specialCharFilePath, files[0]) exists, err := afero.Exists(m.FS, specialCharFilePath) require.NoError(t, err) require.True(t, exists) }) t.Run("delete special characters rulegroup", func(t *testing.T) { updated, files, err := m.MapRules(testUser, map[string][]rulefmt.RuleGroup{}) require.NoError(t, err) require.True(t, updated) require.Len(t, files, 0) exists, err := afero.Exists(m.FS, specialCharFilePath) require.NoError(t, err) require.False(t, exists) }) }
explode_data.jsonl/34365
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 398 }
[ 2830, 3393, 76664, 56992, 26008, 20366, 4768, 22699, 1155, 353, 8840, 836, 8, 341, 8810, 1669, 1487, 7121, 2201, 12501, 7395, 9638, 83225, 340, 8810, 284, 2188, 7121, 5632, 2333, 11, 2188, 29081, 1731, 2398, 84571, 11337, 30175, 741, 2109...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCollection_EnsureIndex(t *testing.T) { ast := require.New(t) cli := initClient("test") defer cli.Close(context.Background()) defer cli.DropCollection(context.Background()) cli.ensureIndex(context.Background(), nil) indexOpts := officialOpts.Index() indexOpts.SetUnique(true) cli.ensureIndex(context.Background(), []options.IndexModel{{Key: []string{"id1"}, IndexOptions: indexOpts}}) cli.ensureIndex(context.Background(), []options.IndexModel{{Key: []string{"id2", "id3"}}}) cli.ensureIndex(context.Background(), []options.IndexModel{{Key: []string{"id4", "-id5"}}}) // same index,error ast.Error(cli.ensureIndex(context.Background(), []options.IndexModel{{Key: []string{"id1"}}})) // check if unique indexs is working var err error doc := bson.M{ "id1": 1, } _, err = cli.InsertOne(context.Background(), doc) ast.NoError(err) coll, err := cli.CloneCollection() ast.NoError(err) _, err = coll.InsertOne(context.Background(), doc) ast.Equal(true, IsDup(err)) }
explode_data.jsonl/18367
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 356 }
[ 2830, 3393, 6482, 62, 64439, 1552, 1155, 353, 8840, 836, 8, 341, 88836, 1669, 1373, 7121, 1155, 340, 86448, 1669, 2930, 2959, 445, 1944, 1138, 16867, 21348, 10421, 5378, 19047, 2398, 16867, 21348, 21688, 6482, 5378, 19047, 12367, 86448, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDaoUpReportState(t *testing.T) { var ( c = context.TODO() id = int64(0) state = int32(0) ) convey.Convey("UpReportState", t, func(ctx convey.C) { affect, err := d.UpReportState(c, id, state) ctx.Convey("Then err should be nil.affect should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(affect, convey.ShouldNotBeNil) }) }) }
explode_data.jsonl/51296
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 176 }
[ 2830, 3393, 12197, 2324, 10361, 1397, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 1444, 257, 284, 2266, 90988, 741, 197, 15710, 262, 284, 526, 21, 19, 7, 15, 340, 197, 24291, 284, 526, 18, 17, 7, 15, 340, 197, 340, 37203, 5617,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetSNMPConnection_v2(t *testing.T) { s := &Snmp{ Agents: []string{"1.2.3.4:567", "1.2.3.4", "udp://127.0.0.1"}, ClientConfig: snmp.ClientConfig{ Timeout: config.Duration(3 * time.Second), Retries: 4, Version: 2, Community: "foo", }, } err := s.Init() require.NoError(t, err) gsc, err := s.getConnection(0) require.NoError(t, err) gs := gsc.(snmp.GosnmpWrapper) require.Equal(t, "1.2.3.4", gs.Target) require.EqualValues(t, 567, gs.Port) require.Equal(t, gosnmp.Version2c, gs.Version) require.Equal(t, "foo", gs.Community) require.Equal(t, "udp", gs.Transport) gsc, err = s.getConnection(1) require.NoError(t, err) gs = gsc.(snmp.GosnmpWrapper) require.Equal(t, "1.2.3.4", gs.Target) require.EqualValues(t, 161, gs.Port) require.Equal(t, "udp", gs.Transport) gsc, err = s.getConnection(2) require.NoError(t, err) gs = gsc.(snmp.GosnmpWrapper) require.Equal(t, "127.0.0.1", gs.Target) require.EqualValues(t, 161, gs.Port) require.Equal(t, "udp", gs.Transport) }
explode_data.jsonl/50784
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 498 }
[ 2830, 3393, 1949, 18966, 5781, 4526, 2273, 17, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 609, 20720, 1307, 515, 197, 197, 91804, 25, 3056, 917, 4913, 16, 13, 17, 13, 18, 13, 19, 25, 20, 21, 22, 497, 330, 16, 13, 17, 13, 18, 13...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRedis_HdelHlen(t *testing.T) { runOnRedis(t, func(client *Redis) { assert.Nil(t, client.Hset("a", "aa", "aaa")) assert.Nil(t, client.Hset("a", "bb", "bbb")) _, err := NewRedis(client.Addr, "").Hlen("a") assert.NotNil(t, err) num, err := client.Hlen("a") assert.Nil(t, err) assert.Equal(t, 2, num) val, err := client.Hdel("a", "aa") assert.Nil(t, err) assert.True(t, val) vals, err := client.Hvals("a") assert.Nil(t, err) assert.ElementsMatch(t, []string{"bbb"}, vals) }) }
explode_data.jsonl/39159
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 248 }
[ 2830, 3393, 48137, 2039, 9588, 39, 2892, 1155, 353, 8840, 836, 8, 341, 56742, 1925, 48137, 1155, 11, 2915, 12805, 353, 48137, 8, 341, 197, 6948, 59678, 1155, 11, 2943, 3839, 746, 445, 64, 497, 330, 5305, 497, 330, 32646, 5455, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRequest(t *testing.T) { t.Run("Context", func(t *testing.T) { t.Run("Default", testRequestContextDefault) t.Run("Custom", testRequestContextCustom) }) t.Run("WithContext", func(t *testing.T) { t.Run("Nil", testRequestWithContextNil) t.Run("Custom", testRequestWithContextCustom) }) }
explode_data.jsonl/14997
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 1900, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 1972, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 3244, 16708, 445, 3675, 497, 1273, 1900, 1972, 3675, 340, 197, 3244, 16708, 445, 10268, 497, 1273, 1900, 1972, 10268...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServerGetRecursive(t *testing.T) { listenerGo, hostGo, portGo := testServer(t, GolangSFTP, READONLY) defer listenerGo.Close() dirRemote, err := os.Getwd() if err != nil { t.Fatal(err) } tmpDirLocal := "/tmp/" + randName() defer os.RemoveAll(tmpDirLocal) t.Logf("get recursive: local %v remote %v", tmpDirLocal, dirRemote) // pull this directory (source code etc) recursively from the server if output, err := runSftpClient(t, "lmkdir "+tmpDirLocal+"\r\nget -r -P "+dirRemote+"/ "+tmpDirLocal+"/", "/", hostGo, portGo); err != nil { t.Fatalf("runSftpClient failed: %v, output\n%v\n", err, output) } compareDirectoriesRecursive(t, dirRemote, path.Join(tmpDirLocal, path.Base(dirRemote))) }
explode_data.jsonl/6585
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 272 }
[ 2830, 3393, 5475, 1949, 78542, 1155, 353, 8840, 836, 8, 341, 14440, 798, 10850, 11, 3468, 10850, 11, 2635, 10850, 1669, 1273, 5475, 1155, 11, 479, 37287, 50, 56468, 11, 18666, 31092, 340, 16867, 11446, 10850, 10421, 2822, 48532, 24703, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestJsonDirsPresenter(t *testing.T) { var buffer bytes.Buffer catalog := pkg.NewCatalog() // populate catalog with test data catalog.Add(pkg.Package{ ID: "package-1-id", Name: "package-1", Version: "1.0.1", Type: pkg.PythonPkg, FoundBy: "the-cataloger-1", Locations: []source.Location{ {RealPath: "/some/path/pkg1"}, }, Language: pkg.Python, MetadataType: pkg.PythonPackageMetadataType, Licenses: []string{"MIT"}, Metadata: pkg.PythonPackageMetadata{ Name: "package-1", Version: "1.0.1", }, PURL: "a-purl-2", CPEs: []pkg.CPE{ must(pkg.NewCPE("cpe:2.3:*:some:package:2:*:*:*:*:*:*:*")), }, }) catalog.Add(pkg.Package{ ID: "package-2-id", Name: "package-2", Version: "2.0.1", Type: pkg.DebPkg, FoundBy: "the-cataloger-2", Locations: []source.Location{ {RealPath: "/some/path/pkg1"}, }, MetadataType: pkg.DpkgMetadataType, Metadata: pkg.DpkgMetadata{ Package: "package-2", Version: "2.0.1", }, PURL: "a-purl-2", CPEs: []pkg.CPE{ must(pkg.NewCPE("cpe:2.3:*:some:package:2:*:*:*:*:*:*:*")), }, }) var d *distro.Distro s, err := source.NewFromDirectory("/some/path") if err != nil { t.Fatal(err) } pres := NewPresenter(catalog, s.Metadata, d) // run presenter err = pres.Present(&buffer) if err != nil { t.Fatal(err) } actual := buffer.Bytes() if *update { testutils.UpdateGoldenFileContents(t, actual) } var expected = testutils.GetGoldenFileContents(t) if !bytes.Equal(expected, actual) { dmp := diffmatchpatch.New() diffs := dmp.DiffMain(string(expected), string(actual), true) t.Errorf("mismatched output:\n%s", dmp.DiffPrettyText(diffs)) } }
explode_data.jsonl/13597
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 782 }
[ 2830, 3393, 5014, 97384, 33849, 1155, 353, 8840, 836, 8, 341, 2405, 4147, 5820, 22622, 271, 1444, 7750, 1669, 24793, 7121, 41606, 2822, 197, 322, 30446, 16403, 448, 1273, 821, 198, 1444, 7750, 1904, 68780, 49834, 515, 197, 29580, 25, 41...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestMarshal_roundtripFromBytes(t *testing.T) { before := []byte{ // length 0x1c, 0x0, 0x0, 0x0, // --- begin array --- // type - document 0x3, // key - "foo" 0x66, 0x6f, 0x6f, 0x0, // length 0x12, 0x0, 0x0, 0x0, // type - string 0x2, // key - "bar" 0x62, 0x61, 0x72, 0x0, // value - string length 0x4, 0x0, 0x0, 0x0, // value - "baz" 0x62, 0x61, 0x7a, 0x0, // null terminator 0x0, // --- end array --- // null terminator 0x0, } var doc D require.NoError(t, Unmarshal(before, &doc)) after, err := Marshal(doc) require.NoError(t, err) require.True(t, bytes.Equal(before, after)) }
explode_data.jsonl/12830
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 327 }
[ 2830, 3393, 55438, 29896, 32981, 3830, 7078, 1155, 353, 8840, 836, 8, 341, 63234, 1669, 3056, 3782, 515, 197, 197, 322, 3084, 198, 197, 197, 15, 87, 16, 66, 11, 220, 15, 87, 15, 11, 220, 15, 87, 15, 11, 220, 15, 87, 15, 3554, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetEnv(t *testing.T) { // backup environment vars oldEnv := os.Environ() os.Clearenv() os.Setenv("k3", "v3") os.Setenv("PATH", "/bin") os.Setenv("k2", "v2") os.Setenv("FOO_k3", "foo_v3") os.Setenv("k4", "v4") os.Setenv("FOO_k2", "foo_v2") os.Setenv("FOO_HTTPS_PROXY", "a.b.c:1080") os.Setenv("HTTPS_PROXY", "1.2.3.4:8888") os.Setenv("k1", "v1") os.Setenv("FOO_PATH", "/usr/local/bin") env := getEnv("/var/lib/k0s", "foo", false) sort.Strings(env) expected := "[HTTPS_PROXY=a.b.c:1080 PATH=/var/lib/k0s/bin:/usr/local/bin k1=v1 k2=foo_v2 k3=foo_v3 k4=v4]" actual := fmt.Sprintf("%s", env) if actual != expected { t.Errorf("Failed in env processing with keepEnvPrefix=false, expected: %q, actual: %q", expected, actual) } env = getEnv("/var/lib/k0s", "foo", true) sort.Strings(env) expected = "[FOO_PATH=/usr/local/bin FOO_k2=foo_v2 FOO_k3=foo_v3 HTTPS_PROXY=a.b.c:1080 PATH=/var/lib/k0s/bin:/bin k1=v1 k2=v2 k3=v3 k4=v4]" actual = fmt.Sprintf("%s", env) if actual != expected { t.Errorf("Failed in env processing with keepEnvPrefix=true, expected: %q, actual: %q", expected, actual) } //restore environment vars os.Clearenv() for _, e := range oldEnv { kv := strings.SplitN(e, "=", 2) os.Setenv(kv[0], kv[1]) } }
explode_data.jsonl/52872
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 606 }
[ 2830, 3393, 1949, 14359, 1155, 353, 8840, 836, 8, 341, 197, 322, 15725, 4573, 19942, 198, 61828, 14359, 1669, 2643, 22834, 2772, 2822, 25078, 727, 273, 9151, 85, 741, 25078, 4202, 3160, 445, 74, 18, 497, 330, 85, 18, 1138, 25078, 4202...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestColIdentMarshal(t *testing.T) { str := NewColIdent("Ab") b, err := json.Marshal(str) if err != nil { t.Fatal(err) } got := string(b) want := `"Ab"` if got != want { t.Errorf("json.Marshal()= %s, want %s", got, want) } var out ColIdent if err := json.Unmarshal(b, &out); err != nil { t.Errorf("Unmarshal err: %v, want nil", err) } if !reflect.DeepEqual(out, str) { t.Errorf("Unmarshal: %v, want %v", out, str) } }
explode_data.jsonl/3383
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 205 }
[ 2830, 3393, 6127, 28301, 55438, 1155, 353, 8840, 836, 8, 341, 11355, 1669, 1532, 6127, 28301, 445, 5830, 1138, 2233, 11, 1848, 1669, 2951, 37271, 4199, 340, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 3174, 354, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestBaseIdentity_GetThwart(t *testing.T) { i := getTestBaseIdentity() th := i.GetThwart() if th != 0 { t.Errorf("When isHero is false, GetThwart should return 0. Expected 0, got: %d", th) } i.ChangeForm() th = i.GetThwart() if th != TestThwart { t.Errorf("When isHero is true, GetThwart should return the hero's thwart. Expected %d, got %d", TestThwart, th) } }
explode_data.jsonl/55788
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 151 }
[ 2830, 3393, 3978, 18558, 13614, 1001, 35608, 1155, 353, 8840, 836, 8, 341, 8230, 1669, 633, 2271, 3978, 18558, 2822, 70479, 1669, 600, 2234, 1001, 35608, 741, 743, 270, 961, 220, 15, 341, 197, 3244, 13080, 445, 4498, 374, 30228, 374, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestConnMaxLifetime(t *testing.T) { t0 := time.Unix(1000000, 0) offset := time.Duration(0) nowFunc = func() time.Time { return t0.Add(offset) } defer func() { nowFunc = time.Now }() db := newTestDB(t, "magicquery") defer closeDB(t, db) driver := db.Driver().(*fakeDriver) // Force the number of open connections to 0 so we can get an accurate // count for the test db.clearAllConns(t) driver.mu.Lock() opens0 := driver.openCount closes0 := driver.closeCount driver.mu.Unlock() db.SetMaxIdleConns(10) db.SetMaxOpenConns(10) tx, err := db.Begin() if err != nil { t.Fatal(err) } offset = time.Second tx2, err := db.Begin() if err != nil { t.Fatal(err) } tx.Commit() tx2.Commit() driver.mu.Lock() opens := driver.openCount - opens0 closes := driver.closeCount - closes0 driver.mu.Unlock() if opens != 2 { t.Errorf("opens = %d; want 2", opens) } if closes != 0 { t.Errorf("closes = %d; want 0", closes) } if g, w := db.numFreeConns(), 2; g != w { t.Errorf("free conns = %d; want %d", g, w) } // Expire first conn offset = 11 * time.Second db.SetConnMaxLifetime(10 * time.Second) if err != nil { t.Fatal(err) } tx, err = db.Begin() if err != nil { t.Fatal(err) } tx2, err = db.Begin() if err != nil { t.Fatal(err) } tx.Commit() tx2.Commit() driver.mu.Lock() opens = driver.openCount - opens0 closes = driver.closeCount - closes0 driver.mu.Unlock() if opens != 3 { t.Errorf("opens = %d; want 3", opens) } if closes != 1 { t.Errorf("closes = %d; want 1", closes) } }
explode_data.jsonl/16008
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 663 }
[ 2830, 3393, 9701, 5974, 74579, 1155, 353, 8840, 836, 8, 341, 3244, 15, 1669, 882, 10616, 941, 7, 16, 15, 15, 15, 15, 15, 15, 11, 220, 15, 340, 40668, 1669, 882, 33795, 7, 15, 692, 80922, 9626, 284, 2915, 368, 882, 16299, 314, 47...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_vAdapter_run(t *testing.T) { const ( // number of vcsim events emitted for default VPX model vcsimEvents = 26 ) now := time.Now().UTC() type fields struct { StatusCodes []int Source string KVStore kvstore.Interface CpConfig CheckpointConfig } tests := []struct { name string fields fields wantCheckpointKey int32 // key we expect in checkpoint after run returns wantRunErr error // error we expect after run returns }{ { name: "no existing checkpoint, no events received", fields: fields{ StatusCodes: nil, // we don't send any events Source: source, KVStore: &fakeKVStore{}, CpConfig: CheckpointConfig{ MaxAge: CheckpointDefaultAge, Period: time.Millisecond, }, }, wantCheckpointKey: 0, // we never checkpoint in this test wantRunErr: context.Canceled, }, { name: "existing checkpoint, events received and all sends succeed", fields: fields{ StatusCodes: createStatusCodes(vcsimEvents, failNever), Source: source, KVStore: &fakeKVStore{ data: map[string]string{ checkpointKey: createCheckpoint(t, now.Add(time.Hour*-1)), }, dataChan: make(chan string, 1), }, CpConfig: CheckpointConfig{ MaxAge: time.Hour, Period: time.Millisecond, }, }, wantCheckpointKey: 26, wantRunErr: context.Canceled, }, { name: "existing checkpoint, events received and first two sends succeeds", fields: fields{ StatusCodes: createStatusCodes(vcsimEvents, 2), Source: source, KVStore: &fakeKVStore{ data: map[string]string{ checkpointKey: createCheckpoint(t, now.Add(time.Hour*-1)), }, dataChan: make(chan string, 1), }, CpConfig: CheckpointConfig{ MaxAge: time.Hour, Period: time.Millisecond, }, }, wantCheckpointKey: 2, wantRunErr: context.Canceled, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { simulator.Run(func(ctx context.Context, vim *vim25.Client) error { ctx = cecontext.WithTarget(ctx, "fake.example.com") roundTripper := &roundTripperTest{statusCodes: tt.fields.StatusCodes} opts := []cehttp.Option{ cehttp.WithRoundTripper(roundTripper), } p, err := cehttp.New(opts...) if err != nil { t.Error(err) } c, err := client.New(p, client.WithTimeNow(), client.WithUUIDs()) if err != nil { t.Error(err) } logger := zaptest.NewLogger(t, zaptest.WrapOptions(zap.AddCaller())) vcClient := govmomi.Client{ Client: vim, SessionManager: session.NewManager(vim), } a := &vAdapter{ Logger: logger.Sugar(), Source: tt.fields.Source, VClient: &vcClient, CEClient: c, KVStore: tt.fields.KVStore, CpConfig: tt.fields.CpConfig, } ctx, cancel := context.WithCancel(ctx) defer cancel() var ( wg sync.WaitGroup // assertion variables cp checkpoint runErr error ) // run components wg.Add(1) go func() { defer wg.Done() runErr = a.run(ctx) // will be stopped with cancel() }() wg.Add(1) go func() { defer wg.Done() select { case data := <-tt.fields.KVStore.(*fakeKVStore).dataChan: err := json.Unmarshal([]byte(data), &cp) if err != nil { t.Errorf("unmarshal data from KV store: %v", err) } cancel() // stop run case <-ctx.Done(): } }() // for test case(s) where we never send/checkpoint events so test won't hang if tt.wantCheckpointKey == 0 { wg.Add(1) go func() { defer wg.Done() time.Sleep(time.Millisecond * 100) cancel() }() } wg.Wait() if !reflect.DeepEqual(runErr, tt.wantRunErr) { // hack because govmomi does not wrap context.Canceled err (uses url.Error with // random port) if runErr != nil && !strings.Contains(runErr.Error(), "context canceled") { t.Error("run() unexpected error: ", runErr) } } if tt.wantCheckpointKey != cp.LastEventKey { t.Errorf("run() checkpointKey = %v, wantEventKey %v", cp.LastEventKey, tt.wantCheckpointKey) } return nil }) }) } }
explode_data.jsonl/66111
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1983 }
[ 2830, 3393, 2273, 5940, 14007, 1155, 353, 8840, 836, 8, 341, 4777, 2399, 197, 197, 322, 1372, 315, 348, 4837, 318, 4357, 46942, 369, 1638, 36010, 55, 1614, 198, 197, 5195, 4837, 318, 7900, 284, 220, 17, 21, 198, 197, 692, 80922, 166...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateAndApplyTransactionPanic(t *testing.T) { if testing.Short() { t.SkipNow() } t.Parallel() // Create invalid update that triggers a panic. update := writeaheadlog.Update{ Name: "invalid name", } // Declare a helper to check for a panic. assertRecover := func() { if r := recover(); r == nil { t.Fatalf("Expected a panic") } } // Run the test for both the method and function sf := newBlankTestFile() func() { defer assertRecover() _ = sf.createAndApplyTransaction(update) }() func() { defer assertRecover() _ = createAndApplyTransaction(sf.wal, update) }() }
explode_data.jsonl/14702
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 222 }
[ 2830, 3393, 4021, 3036, 28497, 8070, 47, 31270, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 7039, 741, 197, 532, 3244, 41288, 7957, 2822, 197, 322, 4230, 8318, 2647, 429, 30754, 264, 21975, 624, 27175, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestChannelGetGroupVersionKind(t *testing.T) { want := schema.GroupVersionKind{ Group: "messaging.cloud.google.com", Version: "v1beta1", Kind: "Channel", } c := &Channel{} got := c.GetGroupVersionKind() if diff := cmp.Diff(want, got); diff != "" { t.Errorf("failed to get expected (-want, +got) = %v", diff) } }
explode_data.jsonl/24163
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 133 }
[ 2830, 3393, 9629, 1949, 2808, 5637, 10629, 1155, 353, 8840, 836, 8, 341, 50780, 1669, 10802, 5407, 5637, 10629, 515, 197, 197, 2808, 25, 256, 330, 76, 32140, 16935, 5713, 905, 756, 197, 77847, 25, 330, 85, 16, 19127, 16, 756, 197, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPatchPost(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() Client := th.Client channel := th.BasicChannel th.App.SetLicense(model.NewTestLicense()) fileIds := make([]string, 3) data, err := testutils.ReadTestFile("test.png") require.Nil(t, err) for i := 0; i < len(fileIds); i++ { fileResp, resp := Client.UploadFile(data, channel.Id, "test.png") CheckNoError(t, resp) fileIds[i] = fileResp.FileInfos[0].Id } post := &model.Post{ ChannelId: channel.Id, IsPinned: true, Message: "#hashtag a message", Props: model.StringInterface{"channel_header": "old_header"}, FileIds: fileIds[0:2], HasReactions: true, } post, _ = Client.CreatePost(post) var rpost *model.Post t.Run("new message, props, files, HasReactions bit", func(t *testing.T) { patch := &model.PostPatch{} patch.IsPinned = model.NewBool(false) patch.Message = model.NewString("#otherhashtag other message") patch.Props = &model.StringInterface{"channel_header": "new_header"} patchFileIds := model.StringArray(fileIds) // one extra file patch.FileIds = &patchFileIds patch.HasReactions = model.NewBool(false) var resp *model.Response rpost, resp = Client.PatchPost(post.Id, patch) CheckNoError(t, resp) assert.False(t, rpost.IsPinned, "IsPinned did not update properly") assert.Equal(t, "#otherhashtag other message", rpost.Message, "Message did not update properly") assert.Equal(t, *patch.Props, rpost.GetProps(), "Props did not update properly") assert.Equal(t, "#otherhashtag", rpost.Hashtags, "Message did not update properly") assert.Equal(t, model.StringArray(fileIds[0:2]), rpost.FileIds, "FileIds should not update") assert.False(t, rpost.HasReactions, "HasReactions did not update properly") }) t.Run("add slack attachments", func(t *testing.T) { patch2 := &model.PostPatch{} attachments := []model.SlackAttachment{ { Text: "Hello World", }, } patch2.Props = &model.StringInterface{"attachments": attachments} rpost2, resp := Client.PatchPost(post.Id, patch2) CheckNoError(t, resp) assert.NotEmpty(t, rpost2.GetProp("attachments")) assert.NotEqual(t, rpost.EditAt, rpost2.EditAt) }) t.Run("invalid requests", func(t *testing.T) { r, err := Client.DoApiPut("/posts/"+post.Id+"/patch", "garbage") require.EqualError(t, err, ": Invalid or missing post in request body., ") require.Equal(t, http.StatusBadRequest, r.StatusCode, "wrong status code") patch := &model.PostPatch{} _, resp := Client.PatchPost("junk", patch) CheckBadRequestStatus(t, resp) }) t.Run("unknown post", func(t *testing.T) { patch := &model.PostPatch{} _, resp := Client.PatchPost(GenerateTestId(), patch) CheckForbiddenStatus(t, resp) }) t.Run("logged out", func(t *testing.T) { Client.Logout() patch := &model.PostPatch{} _, resp := Client.PatchPost(post.Id, patch) CheckUnauthorizedStatus(t, resp) }) t.Run("different user", func(t *testing.T) { th.LoginBasic2() patch := &model.PostPatch{} _, resp := Client.PatchPost(post.Id, patch) CheckForbiddenStatus(t, resp) }) t.Run("different user, but team admin", func(t *testing.T) { th.LoginTeamAdmin() patch := &model.PostPatch{} _, resp := Client.PatchPost(post.Id, patch) CheckForbiddenStatus(t, resp) }) t.Run("different user, but system admin", func(t *testing.T) { patch := &model.PostPatch{} _, resp := th.SystemAdminClient.PatchPost(post.Id, patch) CheckNoError(t, resp) }) }
explode_data.jsonl/5246
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1352 }
[ 2830, 3393, 43622, 4133, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 741, 71724, 1669, 270, 11716, 198, 71550, 1669, 270, 48868, 9629, 271, 70479, 5105, 4202, 9827, 7635, 71...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTransportTLSHandshakeTimeout(t *testing.T) { defer afterTest(t) if testing.Short() { t.Skip("skipping in short mode") } ln := newLocalListener(t) defer ln.Close() testdonec := make(chan struct{}) defer close(testdonec) go func() { c, err := ln.Accept() if err != nil { t.Error(err) return } <-testdonec c.Close() }() getdonec := make(chan struct{}) go func() { defer close(getdonec) tr := &Transport{ Dial: func(_, _ string) (net.Conn, error) { return net.Dial("tcp", ln.Addr().String()) }, TLSHandshakeTimeout: 250 * time.Millisecond, } cl := &Client{Transport: tr} _, err := cl.Get("https://dummy.tld/") if err == nil { t.Error("expected error") return } ue, ok := err.(*url.Error) if !ok { t.Errorf("expected url.Error; got %#v", err) return } ne, ok := ue.Err.(net.Error) if !ok { t.Errorf("expected net.Error; got %#v", err) return } if !ne.Timeout() { t.Errorf("expected timeout error; got %v", err) } if !strings.Contains(err.Error(), "handshake timeout") { t.Errorf("expected 'handshake timeout' in error; got %v", err) } }() select { case <-getdonec: case <-time.After(5 * time.Second): t.Error("test timeout; TLS handshake hung?") } }
explode_data.jsonl/4913
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 558 }
[ 2830, 3393, 27560, 45439, 2314, 29661, 7636, 1155, 353, 8840, 836, 8, 341, 16867, 1283, 2271, 1155, 340, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 445, 4886, 5654, 304, 2805, 3856, 1138, 197, 532, 197, 2261, 1669, 501, 7319, 2743, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPFCount(t *testing.T) { r.Del("hll", "hll2") r.PFAdd("hll", "1", "2") r.PFAdd("hll2", "a", "1") n, err := r.PFCount("hll") if err != nil { t.Error(err.Error()) } if n != 2 { t.Fail() } n, _ = r.PFCount("hll", "hll2") if n != 3 { t.Fail() } }
explode_data.jsonl/81268
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 152 }
[ 2830, 3393, 19701, 2507, 1155, 353, 8840, 836, 8, 341, 7000, 909, 301, 445, 71, 654, 497, 330, 71, 654, 17, 1138, 7000, 1069, 37, 2212, 445, 71, 654, 497, 330, 16, 497, 330, 17, 1138, 7000, 1069, 37, 2212, 445, 71, 654, 17, 497,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDeliverServiceBadConfig(t *testing.T) { // Empty endpoints service, err := NewDeliverService(&Config{ Endpoints: []string{}, Gossip: &mocks.MockGossipServiceAdapter{}, CryptoSvc: &mockMCS{}, ABCFactory: DefaultABCFactory, ConnFactory: DefaultConnectionFactory, }) assert.Error(t, err) assert.Nil(t, service) // Nil gossip adapter service, err = NewDeliverService(&Config{ Endpoints: []string{"a"}, Gossip: nil, CryptoSvc: &mockMCS{}, ABCFactory: DefaultABCFactory, ConnFactory: DefaultConnectionFactory, }) assert.Error(t, err) assert.Nil(t, service) // Nil crypto service service, err = NewDeliverService(&Config{ Endpoints: []string{"a"}, Gossip: &mocks.MockGossipServiceAdapter{}, CryptoSvc: nil, ABCFactory: DefaultABCFactory, ConnFactory: DefaultConnectionFactory, }) assert.Error(t, err) assert.Nil(t, service) // Nil ABCFactory service, err = NewDeliverService(&Config{ Endpoints: []string{"a"}, Gossip: &mocks.MockGossipServiceAdapter{}, CryptoSvc: &mockMCS{}, ABCFactory: nil, ConnFactory: DefaultConnectionFactory, }) assert.Error(t, err) assert.Nil(t, service) // Nil connFactory service, err = NewDeliverService(&Config{ Endpoints: []string{"a"}, Gossip: &mocks.MockGossipServiceAdapter{}, CryptoSvc: &mockMCS{}, ABCFactory: DefaultABCFactory, }) assert.Error(t, err) assert.Nil(t, service) }
explode_data.jsonl/10585
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 565 }
[ 2830, 3393, 16532, 1524, 1860, 17082, 2648, 1155, 353, 8840, 836, 8, 341, 197, 322, 22228, 36342, 198, 52934, 11, 1848, 1669, 1532, 16532, 1524, 1860, 2099, 2648, 515, 197, 38407, 7706, 25, 256, 3056, 917, 38837, 197, 9600, 41473, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStrArray_Sort(t *testing.T) { gtest.C(t, func(t *gtest.T) { expect1 := []string{"0", "1", "2", "3"} expect2 := []string{"3", "2", "1", "0"} array := garray.NewStrArray() for i := 3; i >= 0; i-- { array.Append(gconv.String(i)) } array.Sort() t.Assert(array.Slice(), expect1) array.Sort(true) t.Assert(array.Slice(), expect2) }) }
explode_data.jsonl/53082
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 175 }
[ 2830, 3393, 2580, 1857, 1098, 371, 1155, 353, 8840, 836, 8, 341, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 24952, 16, 1669, 3056, 917, 4913, 15, 497, 330, 16, 497, 330, 17, 497, 330, 18, 16707, 197, 2495...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEmptyTemplate(t *testing.T) { cases := []struct { defn []string in string want string }{ {[]string{""}, "once", ""}, {[]string{"", ""}, "twice", ""}, {[]string{"{{.}}", "{{.}}"}, "twice", "twice"}, {[]string{"{{/* a comment */}}", "{{/* a comment */}}"}, "comment", ""}, {[]string{"{{.}}", ""}, "twice", ""}, } for i, c := range cases { root := New("root") var ( m *Template err error ) for _, d := range c.defn { m, err = root.New(c.in).Parse(d) if err != nil { t.Fatal(err) } } buf := &bytes.Buffer{} if err := m.Execute(buf, c.in); err != nil { t.Error(i, err) continue } if buf.String() != c.want { t.Errorf("expected string %q: got %q", c.want, buf.String()) } } }
explode_data.jsonl/71993
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 352 }
[ 2830, 3393, 3522, 7275, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 7452, 77, 3056, 917, 198, 197, 17430, 256, 914, 198, 197, 50780, 914, 198, 197, 59403, 197, 197, 90, 1294, 917, 90, 3014, 2137, 330, 13184...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestTypeSystem_ObjectTypesMustBeAssertable_AcceptsAnObjectTypeWithAnIsTypeOfFunction(t *testing.T) { _, err := schemaWithFieldType(graphql.NewObject(graphql.ObjectConfig{ Name: "AnotherObject", IsTypeOf: func(p graphql.IsTypeOfParams) bool { return true }, Fields: graphql.Fields{ "f": &graphql.Field{ Type: graphql.String, }, }, })) if err != nil { t.Fatalf("unexpected error: %v", err) } }
explode_data.jsonl/79159
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 174 }
[ 2830, 3393, 929, 2320, 27839, 4173, 31776, 3430, 8534, 480, 1566, 66, 57771, 2082, 49530, 2354, 2082, 3872, 929, 2124, 5152, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 10802, 2354, 63733, 24312, 1470, 7121, 1190, 24312, 1470, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_timeDurationDecorator(t *testing.T) { f := new(Field) (*DefaultMappings["time.Duration"]).Decorators.Run(&Package{}, &Message{}, f) assert.Equal(t, NewLiteralValue("true"), f.Options["(gogoproto.stdduration)"]) }
explode_data.jsonl/53659
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 87 }
[ 2830, 3393, 3009, 12945, 47951, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 57788, 340, 197, 4071, 3675, 83421, 1183, 1678, 33795, 45014, 35227, 2973, 16708, 2099, 13100, 22655, 609, 2052, 22655, 282, 692, 6948, 12808, 1155, 11, 1532, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestHash(t *testing.T) { data := []byte("some other data") block := NewBlock(data) hash, err := mh.Sum(data, mh.SHA2_256, -1) if err != nil { t.Fatal(err) } if !bytes.Equal(block.Multihash(), hash) { t.Error("wrong multihash") } }
explode_data.jsonl/74315
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 110 }
[ 2830, 3393, 6370, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 3056, 3782, 445, 14689, 1008, 821, 1138, 47996, 1669, 1532, 4713, 2592, 692, 50333, 11, 1848, 1669, 72317, 41676, 2592, 11, 72317, 808, 17020, 17, 62, 17, 20, 21, 11, 481, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestClient_GetTransferInfo(t *testing.T) { // 初始化参数结构体 bm := make(gopay.BodyMap) bm.Set("nonce_str", util.GetRandomString(32)). Set("partner_trade_no", util.GetRandomString(32)) // 查询企业付款 // body:参数Body // certFilePath:cert证书路径 // keyFilePath:Key证书路径 // pkcs12FilePath:p12证书路径 wxRsp, err := client.GetTransferInfo(bm, nil, nil, nil) if err != nil { xlog.Errorf("client.GetTransferInfo(%+v),error:%+v", bm, err) return } xlog.Debug("wxRsp:", *wxRsp) }
explode_data.jsonl/56605
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 278 }
[ 2830, 3393, 2959, 13614, 21970, 1731, 1155, 353, 8840, 836, 8, 341, 197, 322, 76090, 32665, 100166, 31914, 198, 2233, 76, 1669, 1281, 3268, 453, 352, 20934, 2227, 340, 2233, 76, 4202, 445, 39593, 2895, 497, 4094, 2234, 13999, 703, 7, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFindAllPairs(t *testing.T) { pairs := []struct { pairs []string }{ { []string{ "uatom", "ubtsg", "udvpn", "uxprt", "uakt", "uluna", "ungm", "uiris", "xrun", "uregen", "udsm", "ucom", "ugcyb", }, }, { []string{ "uatom", "ubtsg", "udvpn", "uxprt", "uakt", "uluna", "ungm", "uiris", }, }, } for _, p := range pairs { for i := 0; i < len(p.pairs)-1; i++ { for j := i + 1; j < len(p.pairs); j++ { fmt.Println(p.pairs[i], p.pairs[j]) } } } }
explode_data.jsonl/23722
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 367 }
[ 2830, 3393, 9885, 2403, 54228, 1155, 353, 8840, 836, 8, 341, 3223, 4720, 1669, 3056, 1235, 341, 197, 3223, 4720, 3056, 917, 198, 197, 59403, 197, 197, 515, 298, 197, 1294, 917, 515, 571, 197, 1, 84, 21855, 756, 571, 197, 1, 392, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestSRVGetCluster(t *testing.T) { defer func() { lookupSRV = net.LookupSRV resolveTCPAddr = net.ResolveTCPAddr }() name := "dnsClusterTest" dns := map[string]string{ "1.example.com.:2480": "10.0.0.1:2480", "2.example.com.:2480": "10.0.0.2:2480", "3.example.com.:2480": "10.0.0.3:2480", "4.example.com.:2380": "10.0.0.3:2380", } srvAll := []*net.SRV{ {Target: "1.example.com.", Port: 2480}, {Target: "2.example.com.", Port: 2480}, {Target: "3.example.com.", Port: 2480}, } tests := []struct { scheme string records []*net.SRV urls []string expected string }{ { "https", []*net.SRV{}, nil, "", }, { "https", srvAll, nil, "0=https://1.example.com:2480,1=https://2.example.com:2480,2=https://3.example.com:2480", }, { "http", srvAll, nil, "0=http://1.example.com:2480,1=http://2.example.com:2480,2=http://3.example.com:2480", }, { "https", srvAll, []string{"https://10.0.0.1:2480"}, "dnsClusterTest=https://1.example.com:2480,0=https://2.example.com:2480,1=https://3.example.com:2480", }, // matching local member with resolved addr and return unresolved hostnames { "https", srvAll, []string{"https://10.0.0.1:2480"}, "dnsClusterTest=https://1.example.com:2480,0=https://2.example.com:2480,1=https://3.example.com:2480", }, // reject if apurls are TLS but SRV is only http { "http", srvAll, []string{"https://10.0.0.1:2480"}, "0=http://2.example.com:2480,1=http://3.example.com:2480", }, } resolveTCPAddr = func(network, addr string) (*net.TCPAddr, error) { if strings.Contains(addr, "10.0.0.") { // accept IP addresses when resolving apurls return net.ResolveTCPAddr(network, addr) } if dns[addr] == "" { return nil, errors.New("missing dns record") } return net.ResolveTCPAddr(network, dns[addr]) } for i, tt := range tests { lookupSRV = func(service string, proto string, domain string) (string, []*net.SRV, error) { return "", tt.records, nil } urls := testutil.MustNewURLs(t, tt.urls) str, err := GetCluster(tt.scheme, "etcd-server", name, "example.com", urls) if err != nil { t.Fatalf("%d: err: %#v", i, err) } if strings.Join(str, ",") != tt.expected { t.Errorf("#%d: cluster = %s, want %s", i, str, tt.expected) } } }
explode_data.jsonl/26136
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1116 }
[ 2830, 3393, 14557, 53, 1949, 28678, 1155, 353, 8840, 836, 8, 341, 16867, 2915, 368, 341, 197, 197, 21020, 14557, 53, 284, 4179, 79261, 14557, 53, 198, 197, 87778, 49896, 13986, 284, 4179, 57875, 49896, 13986, 198, 197, 66816, 11609, 166...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEntryCounterRateLimiting(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() e, _, now := testEntry(ctrl, testEntryOptions{}) // Reset runtime options to disable rate limiting. noRateLimitRuntimeOpts := runtime.NewOptions().SetWriteValuesPerMetricLimitPerSecond(0) e.SetRuntimeOptions(noRateLimitRuntimeOpts) require.NoError(t, e.AddUntimed(testCounter, testDefaultStagedMetadatas)) // Reset runtime options to enable a rate limit of 10/s. limitPerSecond := 10 runtimeOpts := runtime.NewOptions().SetWriteValuesPerMetricLimitPerSecond(int64(limitPerSecond)) e.SetRuntimeOptions(runtimeOpts) for i := 0; i < limitPerSecond; i++ { require.NoError(t, e.AddUntimed(testCounter, testDefaultStagedMetadatas)) } require.Equal(t, errWriteValueRateLimitExceeded, e.AddUntimed(testCounter, testDefaultStagedMetadatas)) // Reset limit to enable a rate limit of 100/s. limitPerSecond = 100 runtimeOpts = runtime.NewOptions().SetWriteValuesPerMetricLimitPerSecond(int64(limitPerSecond)) e.SetRuntimeOptions(runtimeOpts) for i := 0; i < limitPerSecond; i++ { require.NoError(t, e.AddUntimed(testCounter, testDefaultStagedMetadatas)) } require.Equal(t, errWriteValueRateLimitExceeded, e.AddUntimed(testCounter, testDefaultStagedMetadatas)) // Advancing the time will reset the quota. *now = (*now).Add(time.Second) require.NoError(t, e.AddUntimed(testCounter, testDefaultStagedMetadatas)) }
explode_data.jsonl/24206
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 486 }
[ 2830, 3393, 5874, 14099, 11564, 16527, 287, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 7727, 11, 8358, 1431, 1669, 1273, 5874, 62100, 11, 1273, 5874, 3798, 6257, 692...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAlign(t *testing.T) { t.Parallel() const input = ` foo$0(a ptr[in, s0]) s0 { f0 int8 f1 int16 } foo$1(a ptr[in, s1]) s1 { f0 ptr[in, s2, opt] } s2 { f1 s1 f2 array[s1, 2] f3 array[array[s1, 2], 2] } ` eh := func(pos ast.Pos, msg string) { t.Errorf("%v: %v", pos, msg) } desc := ast.Parse([]byte(input), "input", eh) if desc == nil { t.Fatal("failed to parse") } p := Compile(desc, map[string]uint64{"SYS_foo": 1}, targets.List[targets.TestOS][targets.TestArch64], eh) if p == nil { t.Fatal("failed to compile") } }
explode_data.jsonl/53123
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 281 }
[ 2830, 3393, 10069, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 4777, 1946, 284, 22074, 7975, 3, 15, 2877, 10087, 15375, 11, 274, 15, 2546, 82, 15, 341, 1166, 15, 2084, 23, 198, 1166, 16, 2084, 16, 21, 198, 630, 7975, 3, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRateLimiterWithConfig(t *testing.T) { var inMemoryStore = NewRateLimiterMemoryStoreWithConfig(RateLimiterMemoryStoreConfig{Rate: 1, Burst: 3}) e := echo.New() handler := func(c echo.Context) error { return c.String(http.StatusOK, "test") } mw := RateLimiterWithConfig(RateLimiterConfig{ IdentifierExtractor: func(c echo.Context) (string, error) { id := c.Request().Header.Get(echo.HeaderXRealIP) if id == "" { return "", errors.New("invalid identifier") } return id, nil }, DenyHandler: func(ctx echo.Context, identifier string, err error) error { return ctx.JSON(http.StatusForbidden, nil) }, ErrorHandler: func(ctx echo.Context, err error) error { return ctx.JSON(http.StatusBadRequest, nil) }, Store: inMemoryStore, }) testCases := []struct { id string code int }{ {"127.0.0.1", http.StatusOK}, {"127.0.0.1", http.StatusOK}, {"127.0.0.1", http.StatusOK}, {"127.0.0.1", http.StatusForbidden}, {"", http.StatusBadRequest}, {"127.0.0.1", http.StatusForbidden}, {"127.0.0.1", http.StatusForbidden}, } for _, tc := range testCases { req := httptest.NewRequest(http.MethodGet, "/", nil) req.Header.Add(echo.HeaderXRealIP, tc.id) rec := httptest.NewRecorder() c := e.NewContext(req, rec) _ = mw(handler)(c) assert.Equal(t, tc.code, rec.Code) } }
explode_data.jsonl/33963
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 540 }
[ 2830, 3393, 11564, 43, 17700, 2354, 2648, 1155, 353, 8840, 836, 8, 341, 2405, 304, 10642, 6093, 284, 1532, 11564, 43, 17700, 10642, 6093, 2354, 2648, 2785, 349, 43, 17700, 10642, 6093, 2648, 90, 11564, 25, 220, 16, 11, 77467, 25, 220,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateAccessApplications(t *testing.T) { setup() defer teardown() handler := func(w http.ResponseWriter, r *http.Request) { assert.Equal(t, r.Method, "POST", "Expected method 'POST', got %s", r.Method) w.Header().Set("content-type", "application/json") fmt.Fprintf(w, `{ "success": true, "errors": [], "messages": [], "result": { "id": "480f4f69-1a28-4fdd-9240-1ed29f0ac1db", "created_at": "2014-01-01T05:20:00.12345Z", "updated_at": "2014-01-01T05:20:00.12345Z", "aud": "737646a56ab1df6ec9bddc7e5ca84eaf3b0768850f3ffb5d74f1534911fe3893", "name": "Admin Site", "domain": "test.example.com/admin", "session_duration": "24h", "allowed_idps": ["f174e90a-fafe-4643-bbbc-4a0ed4fc8415"], "auto_redirect_to_identity": false, "enable_binding_cookie": false, "custom_deny_url": "https://www.cloudflare.com", "custom_deny_message": "denied!" } } `) } createdAt, _ := time.Parse(time.RFC3339, "2014-01-01T05:20:00.12345Z") updatedAt, _ := time.Parse(time.RFC3339, "2014-01-01T05:20:00.12345Z") fullAccessApplication := AccessApplication{ ID: "480f4f69-1a28-4fdd-9240-1ed29f0ac1db", Name: "Admin Site", Domain: "test.example.com/admin", SessionDuration: "24h", AUD: "737646a56ab1df6ec9bddc7e5ca84eaf3b0768850f3ffb5d74f1534911fe3893", AllowedIdps: []string{"f174e90a-fafe-4643-bbbc-4a0ed4fc8415"}, AutoRedirectToIdentity: false, EnableBindingCookie: false, CustomDenyMessage: "denied!", CustomDenyURL: "https://www.cloudflare.com", CreatedAt: &createdAt, UpdatedAt: &updatedAt, } mux.HandleFunc("/accounts/"+accountID+"/access/apps", handler) actual, err := client.CreateAccessApplication( accountID, AccessApplication{ Name: "Admin Site", Domain: "test.example.com/admin", SessionDuration: "24h", }, ) if assert.NoError(t, err) { assert.Equal(t, fullAccessApplication, actual) } mux.HandleFunc("/zones/"+zoneID+"/access/apps", handler) actual, err = client.CreateZoneLevelAccessApplication( zoneID, AccessApplication{ Name: "Admin Site", Domain: "test.example.com/admin", SessionDuration: "24h", }, ) if assert.NoError(t, err) { assert.Equal(t, fullAccessApplication, actual) } }
explode_data.jsonl/45470
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1180 }
[ 2830, 3393, 4021, 6054, 50359, 1155, 353, 8840, 836, 8, 341, 84571, 741, 16867, 49304, 2822, 53326, 1669, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 6948, 12808, 1155, 11, 435, 20798, 11, 330, 2946, 497, 330, 1889...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValuesInNonInsertStmt(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec(`use test;`) tk.MustExec(`drop table if exists t;`) tk.MustExec(`create table t(a bigint, b double, c decimal, d varchar(20), e datetime, f time, g json);`) tk.MustExec(`insert into t values(1, 1.1, 2.2, "abc", "2018-10-24", NOW(), "12");`) res := tk.MustQuery(`select values(a), values(b), values(c), values(d), values(e), values(f), values(g) from t;`) res.Check(testkit.Rows(`<nil> <nil> <nil> <nil> <nil> <nil> <nil>`)) }
explode_data.jsonl/65471
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 246 }
[ 2830, 3393, 6227, 641, 8121, 13780, 31063, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 340, 3244, 74, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCustomCertsTransform(t *testing.T) { tests := []struct { name string input servingv1alpha1.CustomCerts expectError bool expectSource *v1.VolumeSource }{{ name: "FromSecret", input: servingv1alpha1.CustomCerts{ Type: "Secret", Name: "my-secret", }, expectError: false, expectSource: &v1.VolumeSource{ Secret: &v1.SecretVolumeSource{ SecretName: "my-secret", }, }, }, { name: "FromConfigMap", input: servingv1alpha1.CustomCerts{ Type: "ConfigMap", Name: "my-map", }, expectError: false, expectSource: &v1.VolumeSource{ ConfigMap: &v1.ConfigMapVolumeSource{ LocalObjectReference: v1.LocalObjectReference{ Name: "my-map", }, }, }, }, { name: "NoCerts", input: servingv1alpha1.CustomCerts{}, expectError: false, }, { name: "InvalidType", input: servingv1alpha1.CustomCerts{ Type: "invalid", }, expectError: true, }, { name: "MissingName", input: servingv1alpha1.CustomCerts{ Type: "Secret", }, expectError: true, }} for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { unstructured := util.MakeUnstructured(t, util.MakeDeployment("controller", v1.PodSpec{ Containers: []v1.Container{{ Name: "controller", }}, })) instance := &servingv1alpha1.KnativeServing{ Spec: servingv1alpha1.KnativeServingSpec{ ControllerCustomCerts: tt.input, }, } customCertsTransform := CustomCertsTransform(instance, log) err := customCertsTransform(&unstructured) if tt.expectError && err == nil { t.Fatal("Transformer should've returned an error and did not") } deployment := &appsv1.Deployment{} err = scheme.Scheme.Convert(&unstructured, deployment, nil) util.AssertEqual(t, err, nil) spec := deployment.Spec.Template.Spec if tt.expectSource != nil { util.AssertEqual(t, spec.Volumes[0].Name, customCertsNamePrefix+tt.input.Name) util.AssertDeepEqual(t, &spec.Volumes[0].VolumeSource, tt.expectSource) util.AssertDeepEqual(t, spec.Containers[0].Env[0], v1.EnvVar{ Name: customCertsEnvName, Value: customCertsMountPath, }) util.AssertDeepEqual(t, spec.Containers[0].VolumeMounts[0], v1.VolumeMount{ Name: customCertsNamePrefix + tt.input.Name, MountPath: customCertsMountPath, }) } else { util.AssertEqual(t, len(spec.Volumes), 0) util.AssertEqual(t, len(spec.Containers[0].Env), 0) util.AssertEqual(t, len(spec.Containers[0].VolumeMounts), 0) } }) } }
explode_data.jsonl/68528
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1122 }
[ 2830, 3393, 10268, 34, 15546, 8963, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 260, 914, 198, 197, 22427, 286, 13480, 85, 16, 7141, 16, 27649, 34, 15546, 198, 197, 24952, 1454, 220, 1807, 198, 197, 24952, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCraftSweepAllTx(t *testing.T) { t.Parallel() // First, we'll make a mock signer along with a fee estimator, We'll // use zero fees to we can assert a precise output value. signer := &mockSigner{} feeEstimator := newMockFeeEstimator(0, 0) // For our UTXO source, we'll pass in all the UTXOs that we know of, // other than the final one which is of an unknown witness type. targetUTXOs := testUtxos[:2] utxoSource := newMockUtxoSource(targetUTXOs) coinSelectLocker := &mockCoinSelectionLocker{} utxoLocker := newMockOutpointLocker() sweepPkg, err := CraftSweepAllTx( 0, 100, deliveryAddr, coinSelectLocker, utxoSource, utxoLocker, feeEstimator, signer, ) if err != nil { t.Fatalf("unable to make sweep tx: %v", err) } // At this point, all of the UTXOs that we made above should be locked // and none of them unlocked. assertUtxosLocked(t, utxoLocker, testUtxos[:2]) assertNoUtxosUnlocked(t, utxoLocker, testUtxos[:2]) // Now that we have our sweep transaction, we should find that we have // a UTXO for each input, and also that our final output value is the // sum of all our inputs. sweepTx := sweepPkg.SweepTx if len(sweepTx.TxIn) != len(targetUTXOs) { t.Fatalf("expected %v utxo, got %v", len(targetUTXOs), len(sweepTx.TxIn)) } // We should have a single output that pays to our sweep script // generated above. expectedSweepValue := int64(3000) if len(sweepTx.TxOut) != 1 { t.Fatalf("should have %v outputs, instead have %v", 1, len(sweepTx.TxOut)) } output := sweepTx.TxOut[0] switch { case output.Value != expectedSweepValue: t.Fatalf("expected %v sweep value, instead got %v", expectedSweepValue, output.Value) case !bytes.Equal(sweepScript, output.PkScript): t.Fatalf("expected %x sweep script, instead got %x", sweepScript, output.PkScript) } // If we cancel the sweep attempt, then we should find that all the // UTXOs within the sweep transaction are now unlocked. sweepPkg.CancelSweepAttempt() assertUtxosUnlocked(t, utxoLocker, testUtxos[:2]) }
explode_data.jsonl/37471
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 756 }
[ 2830, 3393, 38849, 50, 48542, 2403, 31584, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 322, 5512, 11, 582, 3278, 1281, 264, 7860, 70039, 3156, 448, 264, 11060, 67298, 11, 1205, 3278, 198, 197, 322, 990, 7168, 12436, 31...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestGetAppDetailsKustomize(t *testing.T) { service := newService("../..") res, err := service.GetAppDetails(context.Background(), &apiclient.RepoServerAppDetailsQuery{ Repo: &argoappv1.Repository{}, Source: &argoappv1.ApplicationSource{ Path: "./util/kustomize/testdata/kustomization_yaml", }, }) assert.NoError(t, err) assert.Equal(t, "Kustomize", res.Type) assert.NotNil(t, res.Kustomize) assert.EqualValues(t, []string{"nginx:1.15.4", "k8s.gcr.io/nginx-slim:0.8"}, res.Kustomize.Images) }
explode_data.jsonl/5691
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 213 }
[ 2830, 3393, 1949, 2164, 7799, 42, 1450, 551, 1155, 353, 8840, 836, 8, 341, 52934, 1669, 501, 1860, 17409, 496, 5130, 10202, 11, 1848, 1669, 2473, 2234, 2164, 7799, 5378, 19047, 1507, 609, 391, 292, 1451, 2817, 5368, 5475, 2164, 7799, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStrOpsTextJustify_String_03(t *testing.T) { testStr := "Right" txtJustify := TextJustify(0).Right() actualStr := txtJustify.String() if actualStr != testStr { t.Errorf("Error: Expected return of object string value= \"Right\".\n"+ "Instead, object string value = '%v'\n", testStr) } }
explode_data.jsonl/29338
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 119 }
[ 2830, 3393, 2580, 38904, 1178, 9952, 1437, 31777, 62, 15, 18, 1155, 353, 8840, 836, 8, 1476, 18185, 2580, 1669, 330, 5979, 1837, 68272, 9952, 1437, 1669, 2918, 9952, 1437, 7, 15, 568, 5979, 2822, 88814, 2580, 1669, 7932, 9952, 1437, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFromIDAndToken(t *testing.T) { webhook1, err := FromIDAndToken(id, token) if err != nil { t.Error("Got unexpected error ", err) } if *webhook != *webhook1 { t.Fail() } }
explode_data.jsonl/3988
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 78 }
[ 2830, 3393, 3830, 915, 3036, 3323, 1155, 353, 8840, 836, 8, 341, 97250, 20873, 16, 11, 1848, 1669, 5542, 915, 3036, 3323, 3724, 11, 3950, 340, 743, 1848, 961, 2092, 341, 197, 3244, 6141, 445, 32462, 16500, 1465, 3670, 1848, 340, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
3
func Test_fsSource_ReadUp(t *testing.T) { s := getTestSource(t, "sample-migrations") up, identifier, err := s.ReadUp(1) if err != nil { t.Fatalf("unexpected error: %v", err) } if identifier != "some-text" { t.Fatalf("expected identifier to be some-text, got: %s", identifier) } defer up.Close() contents, _ := ioutil.ReadAll(up) if bytes.Compare(contents, []byte("{\"1\": \"up\"}")) != 0 { t.Fatalf("unexpected contents, got: %s", contents) } }
explode_data.jsonl/81892
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 185 }
[ 2830, 3393, 34470, 3608, 38381, 2324, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 633, 2271, 3608, 1155, 11, 330, 13611, 1448, 17824, 1138, 59810, 11, 12816, 11, 1848, 1669, 274, 6503, 2324, 7, 16, 340, 743, 1848, 961, 2092, 341, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestConstrainedPrefAttachmentSelectEmptyGraph(t *testing.T) { const ( minChanSize = 0 maxChanSize = btcutil.Amount(btcutil.SatoshiPerBitcoin) chanLimit = 3 threshold = 0.5 ) // First, we'll generate a random key that represents "us", and create // a new instance of the heuristic with our set parameters. self, err := randKey() if err != nil { t.Fatalf("unable to generate self key: %v", err) } prefAttach := NewConstrainedPrefAttachment(minChanSize, maxChanSize, chanLimit, threshold) skipNodes := make(map[NodeID]struct{}) for _, graph := range chanGraphs { success := t.Run(graph.name, func(t1 *testing.T) { graph, cleanup, err := graph.genFunc() if err != nil { t1.Fatalf("unable to create graph: %v", err) } if cleanup != nil { defer cleanup() } // With the necessary state initialized, we'll not // attempt to select a set of candidates channel for // creation given the current state of the graph. const walletFunds = btcutil.SatoshiPerBitcoin directives, err := prefAttach.Select(self, graph, walletFunds, 5, skipNodes) if err != nil { t1.Fatalf("unable to select attachment "+ "directives: %v", err) } // We shouldn't have selected any new directives as we // started with an empty graph. if len(directives) != 0 { t1.Fatalf("zero attachment directives "+ "should have been returned instead %v were", len(directives)) } }) if !success { break } } }
explode_data.jsonl/28781
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 556 }
[ 2830, 3393, 1109, 57727, 29978, 33569, 3379, 3522, 11212, 1155, 353, 8840, 836, 8, 341, 4777, 2399, 197, 25320, 46019, 1695, 284, 220, 15, 198, 197, 22543, 46019, 1695, 284, 86037, 1314, 62192, 1883, 10413, 1314, 808, 14030, 6023, 3889, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestFiles_Delete(t *testing.T) { setup() defer teardown() mux.HandleFunc("/v2/files/delete", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "POST") testHeader(t, r, "Content-Type", "application/x-www-form-urlencoded") fmt.Fprintln(w, `{"status": "OK"}`) }) ctx := context.Background() err := client.Files.Delete(ctx, 1, 2, 3) if err != nil { t.Error(err) } // empty params err = client.Files.Delete(ctx) if err == nil { t.Errorf("empty parameters accepted") } err = client.Files.Delete(ctx, 1, 2, -1) if err == nil { t.Errorf("negative id accepted") } }
explode_data.jsonl/48317
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 248 }
[ 2830, 3393, 10809, 57418, 1155, 353, 8840, 836, 8, 341, 84571, 741, 16867, 49304, 2822, 2109, 2200, 63623, 4283, 85, 17, 33220, 32275, 497, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 18185, 3523, 1155, 11, 435, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestComplex(t *testing.T) { for _, format := range formats { format := format t.Run(fmt.Sprintf("amd64-%s", format), func(t *testing.T) { accept(t, acceptParms{ Name: fmt.Sprintf("complex_%s", format), Conf: "complex.yaml", Format: format, Dockerfile: fmt.Sprintf("%s.complex.dockerfile", format), }) }) t.Run(fmt.Sprintf("i386-%s", format), func(t *testing.T) { accept(t, acceptParms{ Name: fmt.Sprintf("complex_%s_386", format), Conf: "complex.386.yaml", Format: format, Dockerfile: fmt.Sprintf("%s.386.complex.dockerfile", format), }) }) } }
explode_data.jsonl/15456
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 303 }
[ 2830, 3393, 31137, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 3561, 1669, 2088, 19856, 341, 197, 59416, 1669, 3561, 198, 197, 3244, 16708, 28197, 17305, 445, 67913, 21, 19, 11069, 82, 497, 3561, 701, 2915, 1155, 353, 8840, 836, 8, 341,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRedis_Keys(t *testing.T) { runOnRedis(t, func(client *Redis) { err := client.Set("key1", "value1") assert.Nil(t, err) err = client.Set("key2", "value2") assert.Nil(t, err) _, err = NewRedis(client.Addr, "").Keys("*") assert.NotNil(t, err) keys, err := client.Keys("*") assert.Nil(t, err) assert.ElementsMatch(t, []string{"key1", "key2"}, keys) }) }
explode_data.jsonl/39167
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 175 }
[ 2830, 3393, 48137, 62, 8850, 1155, 353, 8840, 836, 8, 341, 56742, 1925, 48137, 1155, 11, 2915, 12805, 353, 48137, 8, 341, 197, 9859, 1669, 2943, 4202, 445, 792, 16, 497, 330, 957, 16, 1138, 197, 6948, 59678, 1155, 11, 1848, 340, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTypeSystem_ObjectsMustHaveFields_AcceptsAnObjectTypeWithFieldsObject(t *testing.T) { _, err := schemaWithFieldType(graphql.NewObject(graphql.ObjectConfig{ Name: "SomeObject", Fields: graphql.Fields{ "f": &graphql.Field{ Type: graphql.String, }, }, })) if err != nil { t.Fatalf("unexpected error: %v", err) } }
explode_data.jsonl/79143
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 140 }
[ 2830, 3393, 929, 2320, 62, 11543, 31776, 12116, 8941, 1566, 66, 57771, 2082, 49530, 2354, 8941, 1190, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 10802, 2354, 63733, 24312, 1470, 7121, 1190, 24312, 1470, 8348, 2648, 515, 197, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMountStores(t *testing.T) { app := setupBaseApp(t) // check both stores store1 := app.CMS().GetCommitKVStore(capKey1) require.NotNil(t, store1) store2 := app.CMS().GetCommitKVStore(capKey2) require.NotNil(t, store2) }
explode_data.jsonl/30021
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 99 }
[ 2830, 3393, 16284, 69026, 1155, 353, 8840, 836, 8, 341, 28236, 1669, 6505, 3978, 2164, 1155, 692, 197, 322, 1779, 2176, 10533, 198, 57279, 16, 1669, 906, 727, 4826, 1005, 1949, 33441, 82707, 6093, 51386, 1592, 16, 340, 17957, 93882, 115...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConverterRecursiveMixedAll(t *testing.T) { fieldMap := make(map[string]codegen.FieldMapperEntry) lines, err := convertTypes( "Foo", "Bar", ` struct NestedFooA { 1: optional map<string, NestedFooAA> one } struct NestedFooAA { 1: required list<NestedFooAAA> two } struct NestedFooAAA { 1: required string three 2: optional NestedFooAAA four 3: optional NestedFooAA five } struct NestedFooB { 1: optional map<string, NestedFooBB> one } struct NestedFooBB { 1: required list<NestedFooBBB> two } struct NestedFooBBB { 1: required string three 2: optional NestedFooBBB four 3: optional NestedFooBB five } struct Foo { 1: optional NestedFooA six } struct Bar { 2: optional NestedFooB six }`, nil, fieldMap, ) assert.NoError(t, err) assertPrettyEqual(t, trim(` var convertNestedFooBHelper1 func(in *structs.NestedFooA) (out *structs.NestedFooB) convertNestedFooBHelper1 = func(in *structs.NestedFooA) (out *structs.NestedFooB) { if in != nil { out = &structs.NestedFooB{} out.One = make(map[string]*structs.NestedFooBB, len(in.One)) for key2, value3 := range in.One { if value3 != nil { out.One[key2] = &structs.NestedFooBB{} out.One[key2].Two = make([]*structs.NestedFooBBB, len(in.One[key2].Two)) for index4, value5 := range in.One[key2].Two { if value5 != nil { out.One[key2].Two[index4] = &structs.NestedFooBBB{} out.One[key2].Two[index4].Three = string(in.One[key2].Two[index4].Three) var convertNestedFooBBBHelper6 func(in *structs.NestedFooAAA) (out *structs.NestedFooBBB) convertNestedFooBBBHelper6 = func(in *structs.NestedFooAAA) (out *structs.NestedFooBBB) { if in != nil { out = &structs.NestedFooBBB{} out.Three = string(in.Three) out.Four = convertNestedFooBBBHelper6(in.Four) var convertNestedFooBBHelper7 func(in *structs.NestedFooAA) (out *structs.NestedFooBB) convertNestedFooBBHelper7 = func(in *structs.NestedFooAA) (out *structs.NestedFooBB) { if in != nil { out = &structs.NestedFooBB{} out.Two = make([]*structs.NestedFooBBB, len(in.Two)) for index8, value9 := range in.Two { if value9 != nil { out.Two[index8] = &structs.NestedFooBBB{} out.Two[index8].Three = string(in.Two[index8].Three) out.Two[index8].Four = convertNestedFooBBBHelper6(in.Two[index8].Four) out.Two[index8].Five = convertNestedFooBBHelper7(in.Two[index8].Five) } else { out.Two[index8] = nil } } } else { out = nil } return } out.Five = convertNestedFooBBHelper7(in.Five) } else { out = nil } return } out.One[key2].Two[index4].Four = convertNestedFooBBBHelper6(in.One[key2].Two[index4].Four) var convertNestedFooBBHelper10 func(in *structs.NestedFooAA) (out *structs.NestedFooBB) convertNestedFooBBHelper10 = func(in *structs.NestedFooAA) (out *structs.NestedFooBB) { if in != nil { out = &structs.NestedFooBB{} out.Two = make([]*structs.NestedFooBBB, len(in.Two)) for index11, value12 := range in.Two { if value12 != nil { out.Two[index11] = &structs.NestedFooBBB{} out.Two[index11].Three = string(in.Two[index11].Three) var convertNestedFooBBBHelper13 func(in *structs.NestedFooAAA) (out *structs.NestedFooBBB) convertNestedFooBBBHelper13 = func(in *structs.NestedFooAAA) (out *structs.NestedFooBBB) { if in != nil { out = &structs.NestedFooBBB{} out.Three = string(in.Three) out.Four = convertNestedFooBBBHelper13(in.Four) out.Five = convertNestedFooBBHelper10(in.Five) } else { out = nil } return } out.Two[index11].Four = convertNestedFooBBBHelper13(in.Two[index11].Four) out.Two[index11].Five = convertNestedFooBBHelper10(in.Two[index11].Five) } else { out.Two[index11] = nil } } } else { out = nil } return } out.One[key2].Two[index4].Five = convertNestedFooBBHelper10(in.One[key2].Two[index4].Five) } else { out.One[key2].Two[index4] = nil } } } else { out.One[key2] = nil } } } else { out = nil } return } out.Six = convertNestedFooBHelper1(in.Six) `), lines) }
explode_data.jsonl/62101
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2464 }
[ 2830, 3393, 14920, 78542, 86433, 2403, 1155, 353, 8840, 836, 8, 341, 39250, 2227, 1669, 1281, 9147, 14032, 60, 95859, 17087, 10989, 5874, 692, 78390, 11, 1848, 1669, 5508, 4173, 1006, 197, 197, 1, 40923, 497, 330, 3428, 756, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAnchorIndex(t *testing.T) { type args struct { b []byte anchor *regexp.Regexp } tests := []struct { name string args args want int }{ { "nil", args{ nil, regexp.MustCompile(`exports=\{`), }, -1, }, { "empty", args{ []byte(""), regexp.MustCompile(`exports=\{`), }, -1, }, { "no object", args{ []byte("something:llll"), regexp.MustCompile(`exports=\{`), }, -1, }, { "tooshort", args{ []byte("1234"), regexp.MustCompile(`exports=\{`), }, -1, }, { "easy", args{ []byte("exports={obj:12}"), regexp.MustCompile(`exports=\{`), }, 8, }, { "long", args{ []byte(`void 0===(a="function"==typeof(r=function(e){"use strict";e.exports={obj:12};}`), regexp.MustCompile(`exports=\{`), }, 68, }, { "repeated", args{ []byte(`void 0===(a="function"==typeof(r=function(e){"use strict";e.exports={obj:12};}void 0===(a="function"==typeof(r=function(e){"use strict";e.exports={obj:12};}`), regexp.MustCompile(`exports=\{`), }, 68, }, { "anchor", args{ []byte(`void 0===(a="function"==typeof(r=function(e){"use strict";e.exports={obj:12};};void 0===(a="function"==typeof(r=function(e){"use strict";e.exports={fr:{Club:{home:"Accéder mon ARTE",profile:"Modifier mon profil",logout:"Me déconnecter",pseudo:"Mon ARTE"},LogoNavigation:{label:"Accueil",href:"https://www.arte.tv/fr/"},DesktopNavigation:{ariaLabel:"Menu secondaire",links:[{label:"Guide +7"`), regexp.MustCompile(`exports=\{[a-z]{2}:\{Club`), }, 147, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := AnchorIndex(tt.args.b, tt.args.anchor); got != tt.want { t.Errorf("AnchorIndex() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/19265
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 950 }
[ 2830, 3393, 14677, 1552, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 2233, 414, 3056, 3782, 198, 197, 197, 17109, 353, 55796, 8989, 4580, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 28...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestServerDoS_MaxHeaderListSize(t *testing.T) { st := newServerTester(t, func(w http.ResponseWriter, r *http.Request) {}) defer st.Close() // shake hands frameSize := defaultMaxReadFrameSize var advHeaderListSize *uint32 st.greetAndCheckSettings(func(s Setting) error { switch s.ID { case SettingMaxFrameSize: if s.Val < minMaxFrameSize { frameSize = minMaxFrameSize } else if s.Val > maxFrameSize { frameSize = maxFrameSize } else { frameSize = int(s.Val) } case SettingMaxHeaderListSize: advHeaderListSize = &s.Val } return nil }) if advHeaderListSize == nil { t.Errorf("server didn't advertise a max header list size") } else if *advHeaderListSize == 0 { t.Errorf("server advertised a max header list size of 0") } st.encodeHeaderField(":method", "GET") st.encodeHeaderField(":path", "/") st.encodeHeaderField(":scheme", "https") cookie := strings.Repeat("*", 4058) st.encodeHeaderField("cookie", cookie) st.writeHeaders(HeadersFrameParam{ StreamID: 1, BlockFragment: st.headerBuf.Bytes(), EndStream: true, EndHeaders: false, }) // Capture the short encoding of a duplicate ~4K cookie, now // that we've already sent it once. st.headerBuf.Reset() st.encodeHeaderField("cookie", cookie) // Now send 1MB of it. const size = 1 << 20 b := bytes.Repeat(st.headerBuf.Bytes(), size/st.headerBuf.Len()) for len(b) > 0 { chunk := b if len(chunk) > frameSize { chunk = chunk[:frameSize] } b = b[len(chunk):] st.fr.WriteContinuation(1, len(b) == 0, chunk) } h := st.wantHeaders() if !h.HeadersEnded() { t.Fatalf("Got HEADERS without END_HEADERS set: %v", h) } headers := st.decodeHeader(h.HeaderBlockFragment()) want := [][2]string{ {":status", "431"}, {"content-type", "text/html; charset=utf-8"}, {"content-length", "63"}, } if !reflect.DeepEqual(headers, want) { t.Errorf("Headers mismatch.\n got: %q\nwant: %q\n", headers, want) } }
explode_data.jsonl/71685
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 775 }
[ 2830, 3393, 5475, 5404, 50, 58843, 4047, 852, 1695, 1155, 353, 8840, 836, 8, 341, 18388, 1669, 501, 5475, 58699, 1155, 11, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 35248, 16867, 357, 10421, 2822, 197, 322, 26025, 6078, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOptionsValidate(t *testing.T) { validateData := struct { flagName string err error }{ flagName: "token-auth-file", err: errors.New("token-auth-file must be non-empty"), } testdata := []struct { opts Options expectedErr []error }{ {Options{empty}, []error{validateData.err}, }, { Options{nonempty}, nil, }, } for _, test := range testdata { var testName string if test.opts.AuthFile == empty { testName = validateData.flagName + "empty" } else { testName = validateData.flagName + "non-empty" } t.Run(testName, func(t *testing.T) { errs := test.opts.Validate() if test.expectedErr == nil { assert.Nil(t, errs, "expected error nil") } else { if assert.NotNil(t, errs, "expected errors") { assert.EqualError(t, aggregator.NewAggregate(errs), aggregator.NewAggregate(test.expectedErr).Error(), "token auth options validation") } } }) } }
explode_data.jsonl/35576
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 401 }
[ 2830, 3393, 3798, 17926, 1155, 353, 8840, 836, 8, 341, 197, 7067, 1043, 1669, 2036, 341, 197, 30589, 675, 914, 198, 197, 9859, 414, 1465, 198, 197, 59403, 197, 30589, 675, 25, 330, 5839, 33503, 14203, 756, 197, 9859, 25, 414, 5975, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetNextTrigger(t *testing.T) { tests := []struct { timezone string now time.Time sched *Schedule trigger time.Time err bool }{ { timezone: "UTC", now: time.Date(2019, 3, 4, 8, 0, 0, 0, time.UTC), // monday sched: &Schedule{ hour: 18, min: 0, dayOfWeek: map[time.Weekday]bool{ 0: true, // sunday 1: true, }}, trigger: time.Date(2019, 3, 4, 18, 0, 0, 0, time.UTC), // monday err: false, }, { timezone: "UTC", now: time.Date(2019, 3, 4, 19, 0, 0, 0, time.UTC), // monday sched: &Schedule{ hour: 18, min: 0, dayOfWeek: map[time.Weekday]bool{ 0: true, // sunday 1: true, }}, trigger: time.Date(2019, 3, 10, 18, 0, 0, 0, time.UTC), // monday err: false, }, { timezone: "UTC", now: time.Date(2019, 3, 4, 19, 0, 0, 0, time.UTC), // monday sched: &Schedule{ hour: 18, min: 0, dayOfWeek: map[time.Weekday]bool{}}, err: true, }, } for i, tst := range tests { SetTimeZone(tst.timezone) trig, err := tst.sched.GetNextTrigger(tst.now) if err != nil && !tst.err { t.Errorf("failed test %d - unexpected err: %s", i, err) } if err == nil && tst.err { t.Errorf("failed test %d - expected err, but got none", i) } if !tst.err && !trig.Equal(tst.trigger) { t.Errorf("failed test %d - expected time equal to %s, but got %s", i, tst.trigger, trig) } } }
explode_data.jsonl/5946
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 741 }
[ 2830, 3393, 1949, 5847, 17939, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 21957, 8684, 914, 198, 197, 80922, 414, 882, 16299, 198, 197, 1903, 2397, 262, 353, 32210, 198, 197, 83228, 220, 882, 16299, 198, 197, 985...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestRecreateWithObjectName(t *testing.T) { // Arrange iCmd := RecreateCommand( "ingress", "test-ingress-0", "-n", "test-0", ) ingressesBefore := mustLsIngress(t, "test-0") // Act mustExecute(t, iCmd) // Assert ingressesAfter := mustLsIngress(t, "test-0") assert.NotEqual(t, ingressesBefore[0].ResourceVersion, ingressesAfter[0].ResourceVersion) for i := 1; i < 10; i++ { assert.Equal(t, ingressesBefore[i].ResourceVersion, ingressesAfter[i].ResourceVersion) } }
explode_data.jsonl/68718
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 198 }
[ 2830, 3393, 693, 3182, 2354, 37748, 1155, 353, 8840, 836, 8, 341, 197, 322, 40580, 198, 8230, 15613, 1669, 4067, 964, 4062, 1006, 197, 197, 1, 287, 673, 756, 197, 197, 1, 1944, 83905, 673, 12, 15, 756, 197, 197, 34294, 77, 756, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReversiAnz34(t *testing.T) { r := NewReversiAnz() if r.GetOwnEdgeSideThreeCnt() != 0 { t.Errorf("NG") } }
explode_data.jsonl/23057
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 58 }
[ 2830, 3393, 693, 3004, 72, 2082, 89, 18, 19, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1532, 693, 3004, 72, 2082, 89, 741, 743, 435, 2234, 14182, 11656, 16384, 19641, 33747, 368, 961, 220, 15, 341, 197, 3244, 13080, 445, 6140, 1138,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestKeyManagementKeyRing(t *testing.T) { kb, err := New("keybasename", "test", t.TempDir(), nil) require.NoError(t, err) algo := hd.Secp256k1 n1, n2, n3 := "personal", "business", "other" // Check empty state l, err := kb.List() require.Nil(t, err) require.Empty(t, l) _, _, err = kb.NewMnemonic(n1, English, sdk.FullFundraiserPath, notSupportedAlgo{}) require.Error(t, err, "ed25519 keys are currently not supported by keybase") // create some keys _, err = kb.Key(n1) require.Error(t, err) i, _, err := kb.NewMnemonic(n1, English, sdk.FullFundraiserPath, algo) require.NoError(t, err) require.Equal(t, n1, i.GetName()) _, _, err = kb.NewMnemonic(n2, English, sdk.FullFundraiserPath, algo) require.NoError(t, err) // we can get these keys i2, err := kb.Key(n2) require.NoError(t, err) _, err = kb.Key(n3) require.NotNil(t, err) _, err = kb.KeyByAddress(accAddr(i2)) require.NoError(t, err) addr, err := sdk.AccAddressFromBech32("cosmos1yq8lgssgxlx9smjhes6ryjasmqmd3ts2559g0t") require.NoError(t, err) _, err = kb.KeyByAddress(addr) require.NotNil(t, err) // list shows them in order keyS, err := kb.List() require.NoError(t, err) require.Equal(t, 2, len(keyS)) // note these are in alphabetical order require.Equal(t, n2, keyS[0].GetName()) require.Equal(t, n1, keyS[1].GetName()) require.Equal(t, i2.GetPubKey(), keyS[0].GetPubKey()) // deleting a key removes it err = kb.Delete("bad name") require.NotNil(t, err) err = kb.Delete(n1) require.NoError(t, err) keyS, err = kb.List() require.NoError(t, err) require.Equal(t, 1, len(keyS)) _, err = kb.Key(n1) require.Error(t, err) // create an offline key o1 := "offline" priv1 := ed25519.GenPrivKey() pub1 := priv1.PubKey() i, err = kb.SavePubKey(o1, pub1, hd.Ed25519Type) require.Nil(t, err) require.Equal(t, pub1, i.GetPubKey()) require.Equal(t, o1, i.GetName()) keyS, err = kb.List() require.NoError(t, err) require.Equal(t, 2, len(keyS)) // delete the offline key err = kb.Delete(o1) require.NoError(t, err) keyS, err = kb.List() require.NoError(t, err) require.Equal(t, 1, len(keyS)) // addr cache gets nuked - and test skip flag require.NoError(t, kb.Delete(n2)) }
explode_data.jsonl/73434
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 962 }
[ 2830, 3393, 1592, 22237, 1592, 43466, 1155, 353, 8840, 836, 8, 341, 16463, 65, 11, 1848, 1669, 1532, 445, 792, 42953, 497, 330, 1944, 497, 259, 65009, 6184, 1507, 2092, 340, 17957, 35699, 1155, 11, 1848, 692, 69571, 3346, 1669, 17907, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWatchRestoreSyncedWatcher(t *testing.T) { b1, b1Path := backend.NewDefaultTmpBackend() s1 := newWatchableStore(zap.NewExample(), b1, &lease.FakeLessor{}, cindex.NewConsistentIndex(b1.BatchTx()), StoreConfig{}) defer cleanup(s1, b1, b1Path) b2, b2Path := backend.NewDefaultTmpBackend() s2 := newWatchableStore(zap.NewExample(), b2, &lease.FakeLessor{}, cindex.NewConsistentIndex(b2.BatchTx()), StoreConfig{}) defer cleanup(s2, b2, b2Path) testKey, testValue := []byte("foo"), []byte("bar") rev := s1.Put(testKey, testValue, lease.NoLease) startRev := rev + 2 // create a watcher with a future revision // add to "synced" watcher group (startRev > s.store.currentRev) w1 := s1.NewWatchStream() w1.Watch(0, testKey, nil, startRev) // make "s2" ends up with a higher last revision s2.Put(testKey, testValue, lease.NoLease) s2.Put(testKey, testValue, lease.NoLease) // overwrite storage with higher revisions if err := s1.Restore(b2); err != nil { t.Fatal(err) } // wait for next "syncWatchersLoop" iteration // and the unsynced watcher should be chosen time.Sleep(2 * time.Second) // trigger events for "startRev" s1.Put(testKey, testValue, lease.NoLease) select { case resp := <-w1.Chan(): if resp.Revision != startRev { t.Fatalf("resp.Revision expect %d, got %d", startRev, resp.Revision) } if len(resp.Events) != 1 { t.Fatalf("len(resp.Events) expect 1, got %d", len(resp.Events)) } if resp.Events[0].Kv.ModRevision != startRev { t.Fatalf("resp.Events[0].Kv.ModRevision expect %d, got %d", startRev, resp.Events[0].Kv.ModRevision) } case <-time.After(time.Second): t.Fatal("failed to receive event in 1 second") } }
explode_data.jsonl/67208
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 656 }
[ 2830, 3393, 14247, 56284, 12154, 291, 47248, 1155, 353, 8840, 836, 8, 341, 2233, 16, 11, 293, 16, 1820, 1669, 19163, 7121, 3675, 35986, 29699, 741, 1903, 16, 1669, 501, 14247, 480, 6093, 13174, 391, 7121, 13314, 1507, 293, 16, 11, 609...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestTopDownWalkBuiltin(t *testing.T) { tests := []struct { note string rules []string expected interface{} }{ { note: "scalar", rules: []string{ `p[x] { walk(data.a[0], x) }`, }, expected: `[ [[], 1] ]`, }, { note: "arrays", rules: []string{ `p[x] { walk(data.a, x) }`, }, expected: `[ [[], [1,2,3,4]], [[0], 1], [[1], 2], [[2], 3], [[3], 4] ]`, }, { note: "objects", rules: []string{ "p[x] { walk(data.b, x) }", }, expected: `[ [[], {"v1": "hello", "v2": "goodbye"}], [["v1"], "hello"], [["v2"], "goodbye"] ]`, }, { note: "sets", rules: []string{ "p[x] { walk(q, x) }", `q = {{1,2,3}} { true }`, }, expected: `[ [[], [[1,2,3]]], [[[1,2,3]], [1,2,3]], [[[1,2,3], 1], 1], [[[1,2,3], 2], 2], [[[1,2,3], 3], 3] ]`, }, { note: "match and filter", rules: []string{ `p[[k,x]] { walk(q, [k, x]); contains(k[1], "oo") }`, `q = [ { "foo": 1, "bar": 2, "bazoo": 3, } ] { true }`, }, expected: `[[[0, "foo"], 1], [[0, "bazoo"], 3]]`, }, } data := loadSmallTestData() for _, tc := range tests { runTopDownTestCase(t, data, tc.note, tc.rules, tc.expected) } }
explode_data.jsonl/25218
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 757 }
[ 2830, 3393, 5366, 4454, 48849, 33, 25628, 1155, 353, 8840, 836, 8, 1476, 78216, 1669, 3056, 1235, 341, 197, 9038, 1272, 257, 914, 198, 197, 7000, 2425, 262, 3056, 917, 198, 197, 42400, 3749, 16094, 197, 59403, 197, 197, 515, 298, 9038...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSharedDoNotAutoprovisionVolume(t *testing.T) { taskEngine, done, _ := setupWithDefaultConfig(t) defer done() stateChangeEvents := taskEngine.StateChangeEvents() client := taskEngine.(*DockerTaskEngine).client // Set the task clean up duration to speed up the test taskEngine.(*DockerTaskEngine).cfg.TaskCleanupWaitDuration = 1 * time.Second testTask, tmpDirectory, err := createVolumeTask("shared", "TestSharedDoNotAutoprovisionVolume", "TestSharedDoNotAutoprovisionVolume", false) defer os.Remove(tmpDirectory) require.NoError(t, err, "creating test task failed") // creating volume to simulate previously provisioned volume volumeConfig := testTask.Volumes[0].Volume.(*taskresourcevolume.DockerVolumeConfig) volumeMetadata := client.CreateVolume(context.TODO(), "TestSharedDoNotAutoprovisionVolume", volumeConfig.Driver, volumeConfig.DriverOpts, volumeConfig.Labels, 1*time.Minute) require.NoError(t, volumeMetadata.Error) go taskEngine.AddTask(testTask) verifyTaskIsRunning(stateChangeEvents, testTask) verifyTaskIsStopped(stateChangeEvents, testTask) assert.Equal(t, *testTask.Containers[0].GetKnownExitCode(), 0) assert.Len(t, testTask.ResourcesMapUnsafe["dockerVolume"], 0, "volume that has been provisioned does not require the agent to create it again") // Wait for task to be cleaned up testTask.SetSentStatus(apitaskstatus.TaskStopped) waitForTaskCleanup(t, taskEngine, testTask.Arn, 5) response := client.InspectVolume(context.TODO(), "TestSharedDoNotAutoprovisionVolume", 1*time.Second) assert.NoError(t, response.Error, "expect shared volume not removed") cleanVolumes(testTask, taskEngine) }
explode_data.jsonl/39474
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 509 }
[ 2830, 3393, 16997, 5404, 2623, 19602, 45926, 13013, 18902, 1155, 353, 8840, 836, 8, 341, 49115, 4571, 11, 2814, 11, 716, 1669, 6505, 2354, 3675, 2648, 1155, 340, 16867, 2814, 741, 24291, 4072, 7900, 1669, 3383, 4571, 18942, 4072, 7900, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPublishSubscribe(t *testing.T) { blockSent := blocks.NewBlock([]byte("Greetings from The Interval")) n := New() defer n.Shutdown() ch := n.Subscribe(context.Background(), blockSent.Cid()) n.Publish(blockSent) blockRecvd, ok := <-ch if !ok { t.Fail() } assertBlocksEqual(t, blockRecvd, blockSent) }
explode_data.jsonl/8955
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 121 }
[ 2830, 3393, 50145, 28573, 1155, 353, 8840, 836, 8, 341, 47996, 31358, 1669, 10010, 7121, 4713, 10556, 3782, 445, 91786, 504, 576, 40584, 28075, 9038, 1669, 1532, 741, 16867, 308, 10849, 18452, 741, 23049, 1669, 308, 82628, 5378, 19047, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestClosedBySelf(t *testing.T) { tcb := connected(t, 1234, 789, 30000, 50000) // Send FIN. tcp := make(header.TCP, header.TCPMinimumSize) tcp.Encode(&header.TCPFields{ SeqNum: 1235, AckNum: 790, DataOffset: header.TCPMinimumSize, Flags: header.TCPFlagAck | header.TCPFlagFin, WindowSize: 30000, }) if r := tcb.UpdateStateOutbound(tcp); r != tcpconntrack.ResultAlive { t.Fatalf("Bad result: got %v, want %v", r, tcpconntrack.ResultAlive) } // Receive FIN/ACK. tcp.Encode(&header.TCPFields{ SeqNum: 790, AckNum: 1236, DataOffset: header.TCPMinimumSize, Flags: header.TCPFlagAck | header.TCPFlagFin, WindowSize: 50000, }) if r := tcb.UpdateStateInbound(tcp); r != tcpconntrack.ResultAlive { t.Fatalf("Bad result: got %v, want %v", r, tcpconntrack.ResultAlive) } // Send ACK. tcp.Encode(&header.TCPFields{ SeqNum: 1236, AckNum: 791, DataOffset: header.TCPMinimumSize, Flags: header.TCPFlagAck, WindowSize: 30000, }) if r := tcb.UpdateStateOutbound(tcp); r != tcpconntrack.ResultClosedBySelf { t.Fatalf("Bad result: got %v, want %v", r, tcpconntrack.ResultClosedBySelf) } }
explode_data.jsonl/17725
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 538 }
[ 2830, 3393, 26884, 1359, 12092, 1155, 353, 8840, 836, 8, 341, 3244, 7221, 1669, 8433, 1155, 11, 220, 16, 17, 18, 19, 11, 220, 22, 23, 24, 11, 220, 18, 15, 15, 15, 15, 11, 220, 20, 15, 15, 15, 15, 692, 197, 322, 11000, 24344, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestUpStatuses(t *testing.T) { for _, status := range upStatuses { status := status t.Run(fmt.Sprintf("Test OK HTTP status %d", status), func(t *testing.T) { server, event := checkServer(t, hbtest.HelloWorldHandler(status)) port, err := hbtest.ServerPort(server) require.NoError(t, err) mapvaltest.Test( t, mapval.Strict(mapval.Compose( hbtest.MonitorChecks("http@"+server.URL, server.URL, "127.0.0.1", "http", "up"), hbtest.RespondingTCPChecks(port), respondingHTTPChecks(server.URL, status), )), event.Fields, ) }) } }
explode_data.jsonl/39389
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 259 }
[ 2830, 3393, 2324, 2522, 288, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 2639, 1669, 2088, 705, 2522, 288, 341, 197, 23847, 1669, 2639, 198, 197, 3244, 16708, 28197, 17305, 445, 2271, 10402, 10130, 2639, 1018, 67, 497, 2639, 701, 2915, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddEntry(t *testing.T) { var d = NewDirectory("z", GetTemplate(t)) d.AddEntry("", true) d.AddEntry("dir", true) d.AddEntry("a/b/c/d.txt", false) d.AddEntry("a/b/c/colon:colon.txt", false) d.AddEntry("\"quotes\".txt", false) assert.Equal(t, []DirEntry{ {remote: "", URL: "/", Leaf: "/"}, {remote: "dir", URL: "dir/", Leaf: "dir/"}, {remote: "a/b/c/d.txt", URL: "d.txt", Leaf: "d.txt"}, {remote: "a/b/c/colon:colon.txt", URL: "./colon:colon.txt", Leaf: "colon:colon.txt"}, {remote: "\"quotes\".txt", URL: "%22quotes%22.txt", Leaf: "\"quotes\".txt"}, }, d.Entries) // Now test with a query parameter d = NewDirectory("z", GetTemplate(t)).SetQuery(url.Values{"potato": []string{"42"}}) d.AddEntry("file", false) d.AddEntry("dir", true) assert.Equal(t, []DirEntry{ {remote: "file", URL: "file?potato=42", Leaf: "file"}, {remote: "dir", URL: "dir/?potato=42", Leaf: "dir/"}, }, d.Entries) }
explode_data.jsonl/965
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 408 }
[ 2830, 3393, 2212, 5874, 1155, 353, 8840, 836, 8, 341, 2405, 294, 284, 1532, 9310, 445, 89, 497, 2126, 7275, 1155, 1171, 2698, 1904, 5874, 19814, 830, 340, 2698, 1904, 5874, 445, 3741, 497, 830, 340, 2698, 1904, 5874, 445, 64, 3470, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIdentifyLite(t *testing.T) { tt := newTeamTester(t) defer tt.cleanup() tt.addUser("abc") teamName := tt.users[0].createTeam() g := tt.users[0].tc.G t.Logf("make a team") team, err := GetTeamForTestByStringName(context.Background(), g, teamName) require.NoError(t, err) getTeamName := func(teamID keybase1.TeamID) keybase1.TeamName { team, err := teams.Load(context.Background(), g, keybase1.LoadTeamArg{ ID: teamID, }) require.NoError(t, err) return team.Name() } t.Logf("make an implicit team") iTeamCreateName := strings.Join([]string{tt.users[0].username, "bob@github"}, ",") iTeam, _, _, err := teams.LookupOrCreateImplicitTeam(context.TODO(), g, iTeamCreateName, false /*isPublic*/) require.NoError(t, err) iTeamImpName := getTeamName(iTeam.ID) require.True(t, iTeamImpName.IsImplicit()) require.NoError(t, err) cli, err := client.GetIdentifyClient(g) require.NoError(t, err, "failed to get new identifyclient") // test ok assertions var units = []struct { assertion string resID keybase1.TeamID resName string }{ { assertion: "t_alice", resName: "t_alice", }, { assertion: "team:" + teamName, resID: team.ID, resName: teamName, }, { assertion: "tid:" + team.ID.String(), resID: team.ID, resName: teamName, }, } for _, unit := range units { res, err := cli.IdentifyLite(context.Background(), idLiteArg("", unit.assertion)) require.NoError(t, err, "IdentifyLite (%s) failed", unit.assertion) if len(unit.resID) > 0 { require.Equal(t, unit.resID.String(), res.Ul.Id.String()) } if len(unit.resName) > 0 { require.Equal(t, unit.resName, res.Ul.Name) } } // test identify by assertion and id assertions := []string{"team:" + teamName, "tid:" + team.ID.String()} for _, assertion := range assertions { _, err := cli.IdentifyLite(context.Background(), idLiteArg(team.ID.AsUserOrTeam(), assertion)) require.NoError(t, err, "IdentifyLite by assertion and id (%s)", assertion) } // test identify by id only _, err = cli.IdentifyLite(context.Background(), idLiteArg(team.ID.AsUserOrTeam(), "")) require.NoError(t, err, "IdentifyLite id only") // test invalid user format _, err = cli.IdentifyLite(context.Background(), idLiteArg("", "__t_alice")) require.Error(t, err) require.Contains(t, err.Error(), "bad keybase username") // test team read error assertions = []string{"team:jwkj22111z"} for _, assertion := range assertions { _, err := cli.IdentifyLite(context.Background(), idLiteArg("", assertion)) aerr, ok := err.(libkb.AppStatusError) if ok { if aerr.Code != libkb.SCTeamNotFound { t.Fatalf("app status code: %d, expected %d", aerr.Code, libkb.SCTeamNotFound) } } else { require.True(t, regexp.MustCompile("Team .* does not exist").MatchString(err.Error()), "Expected an AppStatusError or team-does-not-exist for %s, but got: %v (%T)", assertion, err, err) } } // test not found assertions assertions = []string{"t_weriojweroi"} for _, assertion := range assertions { _, err := cli.IdentifyLite(context.Background(), idLiteArg("", assertion)) if _, ok := err.(libkb.NotFoundError); !ok { t.Fatalf("assertion %s, error: %s (%T), expected libkb.NotFoundError", assertion, err, err) } } }
explode_data.jsonl/42658
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1302 }
[ 2830, 3393, 28301, 1437, 43844, 1155, 353, 8840, 836, 8, 341, 3244, 83, 1669, 501, 14597, 58699, 1155, 340, 16867, 17853, 87689, 2822, 3244, 83, 1364, 1474, 445, 13683, 1138, 197, 9196, 675, 1669, 17853, 20653, 58, 15, 936, 3182, 14597,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Run_Balance_Fail(t *testing.T) { mockScheduler := mocks.Scheduler{} mockExchange := mocks.ExchangeClient{} errFailAddFunc := errors.New("fail_add_func") mockConfig := &shresiesbot.AutoInvest{ Balance: &shresiesbot.BalanceConfiguration{ Scheduler: "MY_SCHEDULER", Holds: []shresiesbot.Hold{}, }, } entryId := new(cron.EntryID) mockScheduler.On("AddFunc", "MY_SCHEDULER", mock.Anything).Return(*entryId, errFailAddFunc) bot := shresiesbot.New(&mockScheduler, &mockExchange, mockConfig) err := bot.Run() assert.NotNil(t, bot) assert.ErrorIs(t, err, errFailAddFunc) mockScheduler.AssertExpectations(t) }
explode_data.jsonl/5835
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 269 }
[ 2830, 3393, 84158, 1668, 4978, 1400, 604, 1155, 353, 8840, 836, 8, 341, 77333, 38878, 1669, 68909, 808, 15222, 16094, 77333, 31564, 1669, 68909, 86997, 2959, 31483, 9859, 19524, 2212, 9626, 1669, 5975, 7121, 445, 18403, 2891, 9596, 5130, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAuthOptions_ToAuthMethod(t *testing.T) { // given for testName, testData := range map[string]struct { authType git.RepositoryAuthType credentials map[string]string expectedCallback gomega.OmegaMatcher expectedCertCheck gomega.OmegaMatcher expectedErr gomega.OmegaMatcher }{ "should be ok when basic": { authType: git.RepositoryAuthBasic, credentials: map[string]string{ git.UsernameKey: "user", git.PasswordKey: "password", }, expectedCallback: gomega.Not(gomega.BeNil()), expectedCertCheck: gomega.BeNil(), expectedErr: gomega.BeNil(), }, "should be ok when ssh without passphrase": { authType: git.RepositoryAuthSSHKey, credentials: map[string]string{ git.KeyKey: testSSHPrivateKey, }, expectedCallback: gomega.Not(gomega.BeNil()), expectedCertCheck: gomega.Not(gomega.BeNil()), expectedErr: gomega.BeNil(), }, "should be ok when ssh with passphrase": { authType: git.RepositoryAuthSSHKey, credentials: map[string]string{ git.PasswordKey: "test", git.KeyKey: testSSHPrivateKeyPassphrase, }, expectedCallback: gomega.Not(gomega.BeNil()), expectedCertCheck: gomega.Not(gomega.BeNil()), expectedErr: gomega.BeNil(), }, "error when invalid auth type": { authType: "invalid", credentials: map[string]string{ git.UsernameKey: "user", git.PasswordKey: "password", }, expectedCallback: gomega.BeNil(), expectedCertCheck: gomega.BeNil(), expectedErr: gomega.HaveOccurred(), }, "error when invalid key format": { authType: git.RepositoryAuthSSHKey, credentials: map[string]string{ git.KeyKey: "invalid format", }, expectedCallback: gomega.BeNil(), expectedCertCheck: gomega.BeNil(), expectedErr: gomega.HaveOccurred(), }, "error when missing field username in basic auth": { authType: git.RepositoryAuthBasic, credentials: map[string]string{}, expectedCallback: gomega.BeNil(), expectedCertCheck: gomega.BeNil(), expectedErr: gomega.HaveOccurred(), }, "error when missing field password in basic auth": { authType: git.RepositoryAuthBasic, credentials: map[string]string{ git.UsernameKey: "test", }, expectedCallback: gomega.BeNil(), expectedCertCheck: gomega.BeNil(), expectedErr: gomega.HaveOccurred(), }, "error when missing fields in key auth": { authType: git.RepositoryAuthSSHKey, credentials: map[string]string{}, expectedCallback: gomega.BeNil(), expectedCertCheck: gomega.BeNil(), expectedErr: gomega.HaveOccurred(), }, } { t.Run(testName, func(t *testing.T) { g := gomega.NewWithT(t) options := git.AuthOptions{ Type: testData.authType, Credentials: testData.credentials, } // when result, err := git.GetAuth(&options) //then g.Expect(err).To(testData.expectedErr) g.Expect(result.CredentialsCallback).To(testData.expectedCallback) }) } t.Run("should return nil when AuthOptions is nil", func(t *testing.T) { // given g := gomega.NewWithT(t) var authOptions *git.AuthOptions // when result, err := git.GetAuth(authOptions) // then g.Expect(err).To(gomega.BeNil()) g.Expect(result.CredentialsCallback).To(gomega.BeNil()) g.Expect(result.CertificateCheckCallback).To(gomega.BeNil()) }) }
explode_data.jsonl/71586
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1465 }
[ 2830, 3393, 5087, 3798, 38346, 5087, 3523, 1155, 353, 8840, 836, 8, 341, 197, 322, 2661, 198, 2023, 94396, 11, 67348, 1669, 2088, 2415, 14032, 60, 1235, 341, 197, 78011, 929, 262, 16345, 25170, 5087, 929, 198, 197, 197, 32353, 2415, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParse(t *testing.T) { type args struct { req *http.Request provider providers.Provider } tests := []struct { name string args args want *providers.Hook wantErr bool }{ { name: "TestParseWithCorrectRequestValues", args: args{ req: createGitlabRequest(http.MethodPost, "/dummy", parserGitlabTestSecret, parserGitlabTestEvent, parserGitlabTestBody), provider: createGitlabProvider(parserGitlabTestSecret), }, want: createGitlabHook(parserGitlabTestSecret, parserGitlabTestEvent, parserGitlabTestBody, http.MethodPost), }, { name: "TestParseWithEmptyTokenHeaderValue", args: args{ req: createGitlabRequest(http.MethodPost, "/dummy", "", parserGitlabTestEvent, parserGitlabTestBody), provider: createGitlabProvider(parserGitlabTestSecret), }, wantErr: true, }, { name: "TestParseWithNoEventHeaderValue", args: args{ req: createGitlabRequest(http.MethodPost, "/dummy", parserGitlabTestSecret, "", parserGitlabTestBody), provider: createGitlabProvider(parserGitlabTestSecret), }, wantErr: true, }, { name: "TestParseWithNoBody", args: args{ req: createGitlabRequest(http.MethodPost, "/dummy", parserGitlabTestSecret, parserGitlabTestEvent, ""), provider: createGitlabProvider(parserGitlabTestSecret), }, want: createGitlabHook(parserGitlabTestSecret, parserGitlabTestEvent, "", http.MethodPost), }, { name: "TestParseWithNoHeaders", args: args{ req: httptest.NewRequest(http.MethodPost, "/dummy", bytes.NewReader([]byte(parserGitlabTestBody))), provider: createGitlabProvider(parserGitlabTestSecret), }, wantErr: true, }, { name: "TestParseWithWrongHeaderKeys", args: args{ req: createRequestWithWrongHeaders(http.MethodPost, "/dummy", parserGitlabTestSecret, parserGitlabTestEvent, parserGitlabTestBody), provider: createGitlabProvider(parserGitlabTestSecret), }, wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := Parse(tt.args.req, tt.args.provider) if (err != nil) != tt.wantErr { t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantErr) return } if !reflect.DeepEqual(got, tt.want) { t.Errorf("Parse() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/7633
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1001 }
[ 2830, 3393, 14463, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 24395, 414, 353, 1254, 9659, 198, 197, 197, 19979, 12565, 36208, 198, 197, 630, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 31215, 262, 28...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRefreshTokenRenewExpired(t *testing.T) { s := newTestService(t) ctx := context.Background() clientID := datastore.NameKey(kindClient, newRandomID(), nil) rt := &hubauth.RefreshToken{ ClientID: clientID.Encode(), CodeID: datastore.NameKey(kindCode, newRandomID(), clientID).Encode(), UserID: "123", UserEmail: "foo@example.com", IssueTime: time.Now().Add(-5 * time.Minute), ExpiryTime: time.Now().Add(-time.Minute), } id, err := s.CreateRefreshToken(ctx, rt) require.NoError(t, err) _, err = s.RenewRefreshToken(ctx, rt.ClientID, id, rt.IssueTime, time.Now()) require.Truef(t, errors.Is(err, hubauth.ErrExpired), "wrong err %v", err) err = s.DeleteRefreshToken(ctx, id) require.NoError(t, err) }
explode_data.jsonl/56426
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 303 }
[ 2830, 3393, 14567, 3323, 34625, 365, 54349, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 501, 2271, 1860, 1155, 340, 20985, 1669, 2266, 19047, 2822, 25291, 915, 1669, 64986, 2967, 1592, 62697, 2959, 11, 501, 13999, 915, 1507, 2092, 340, 55...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAWSCluster_DefaultCNIIngressRules(t *testing.T) { AZUsageLimit := 3 defaultVPCSpec := VPCSpec{ AvailabilityZoneUsageLimit: &AZUsageLimit, AvailabilityZoneSelection: &AZSelectionSchemeOrdered, } g := NewWithT(t) tests := []struct { name string beforeCluster *AWSCluster afterCluster *AWSCluster }{ { name: "CNI ingressRules are updated cni spec undefined", beforeCluster: &AWSCluster{ Spec: AWSClusterSpec{}, }, afterCluster: &AWSCluster{ Spec: AWSClusterSpec{ NetworkSpec: NetworkSpec{ VPC: defaultVPCSpec, CNI: &CNISpec{ CNIIngressRules: CNIIngressRules{ { Description: "bgp (calico)", Protocol: SecurityGroupProtocolTCP, FromPort: 179, ToPort: 179, }, { Description: "IP-in-IP (calico)", Protocol: SecurityGroupProtocolIPinIP, FromPort: -1, ToPort: 65535, }, }, }, }, }, }, }, { name: "CNIIngressRules are not added for empty CNISpec", beforeCluster: &AWSCluster{ Spec: AWSClusterSpec{ NetworkSpec: NetworkSpec{ VPC: defaultVPCSpec, CNI: &CNISpec{}, }, }, }, afterCluster: &AWSCluster{ Spec: AWSClusterSpec{ NetworkSpec: NetworkSpec{ VPC: defaultVPCSpec, CNI: &CNISpec{}, }, }, }, }, { name: "CNI ingressRules are unmodified when they exist", beforeCluster: &AWSCluster{ Spec: AWSClusterSpec{ NetworkSpec: NetworkSpec{ VPC: defaultVPCSpec, CNI: &CNISpec{ CNIIngressRules: CNIIngressRules{ { Description: "Antrea 1", Protocol: SecurityGroupProtocolTCP, FromPort: 10349, ToPort: 10349, }, }, }, }, }, }, afterCluster: &AWSCluster{ Spec: AWSClusterSpec{ NetworkSpec: NetworkSpec{ VPC: defaultVPCSpec, CNI: &CNISpec{ CNIIngressRules: CNIIngressRules{ { Description: "Antrea 1", Protocol: SecurityGroupProtocolTCP, FromPort: 10349, ToPort: 10349, }, }, }, }, }, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { ctx := context.TODO() cluster := tt.beforeCluster.DeepCopy() cluster.ObjectMeta = metav1.ObjectMeta{ GenerateName: "cluster-", Namespace: "default", } g.Expect(testEnv.Create(ctx, cluster)).To(Succeed()) g.Expect(cluster.Spec.NetworkSpec).To(Equal(tt.afterCluster.Spec.NetworkSpec)) }) } }
explode_data.jsonl/70925
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1374 }
[ 2830, 3393, 14419, 3540, 75, 4993, 60336, 34, 14912, 641, 2483, 26008, 1155, 353, 8840, 836, 8, 341, 22985, 57, 14783, 16527, 1669, 220, 18, 198, 11940, 53, 4872, 8327, 1669, 647, 4872, 8327, 515, 197, 197, 51703, 15363, 14783, 16527, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCheckResourceIntegration(t *testing.T) { rand.Seed(time.Now().UnixNano()) for _, test := range []struct { testName string actual []runtime.Object expected runtime.Object shouldError bool }{ { testName: "match object by labels, first in list matches", actual: []runtime.Object{ testutils.WithSpec(t, testutils.WithLabels(t, testutils.NewPod("labels-match-pod", ""), map[string]string{ "app": "nginx", }), map[string]interface{}{ "containers": []interface{}{ map[string]interface{}{ "image": "nginx:1.7.9", "name": "nginx", }, }, }), testutils.WithSpec(t, testutils.WithLabels(t, testutils.NewPod("bb", ""), map[string]string{ "app": "not-match", }), map[string]interface{}{ "containers": []interface{}{ map[string]interface{}{ "image": "nginx:1.7.9", "name": "nginx", }, }, }), }, expected: &unstructured.Unstructured{ Object: map[string]interface{}{ "apiVersion": "v1", "kind": "Pod", "metadata": map[string]interface{}{ "labels": map[string]interface{}{ "app": "nginx", }, }, "spec": map[string]interface{}{ "containers": []interface{}{ map[string]interface{}{ "image": "nginx:1.7.9", "name": "nginx", }, }, }, }, }, }, { testName: "match object by labels, last in list matches", actual: []runtime.Object{ testutils.WithSpec(t, testutils.WithLabels(t, testutils.NewPod("last-in-list", ""), map[string]string{ "app": "not-match", }), map[string]interface{}{ "containers": []interface{}{ map[string]interface{}{ "image": "nginx:1.7.9", "name": "nginx", }, }, }), testutils.WithSpec(t, testutils.WithLabels(t, testutils.NewPod("bb", ""), map[string]string{ "app": "nginx", }), map[string]interface{}{ "containers": []interface{}{ map[string]interface{}{ "image": "nginx:1.7.9", "name": "nginx", }, }, }), }, expected: &unstructured.Unstructured{ Object: map[string]interface{}{ "apiVersion": "v1", "kind": "Pod", "metadata": map[string]interface{}{ "labels": map[string]interface{}{ "app": "nginx", }, }, "spec": map[string]interface{}{ "containers": []interface{}{ map[string]interface{}{ "image": "nginx:1.7.9", "name": "nginx", }, }, }, }, }, }, { testName: "match object by labels, does not exist", actual: []runtime.Object{ testutils.WithSpec(t, testutils.WithLabels(t, testutils.NewPod("hello", ""), map[string]string{ "app": "NOT-A-MATCH", }), map[string]interface{}{ "containers": []interface{}{ map[string]interface{}{ "image": "nginx:1.7.9", "name": "nginx", }, }, }), }, expected: &unstructured.Unstructured{ Object: map[string]interface{}{ "apiVersion": "v1", "kind": "Pod", "metadata": map[string]interface{}{ "labels": map[string]interface{}{ "app": "nginx", }, }, "spec": map[string]interface{}{ "containers": []interface{}{ map[string]interface{}{ "image": "nginx:1.7.9", "name": "nginx", }, }, }, }, }, shouldError: true, }, { testName: "match object by labels, field mismatch", actual: []runtime.Object{ testutils.WithSpec(t, testutils.WithLabels(t, testutils.NewPod("hello", ""), map[string]string{ "app": "nginx", }), map[string]interface{}{ "containers": []interface{}{ map[string]interface{}{ "image": "otherimage:latest", "name": "nginx", }, }, }), }, expected: &unstructured.Unstructured{ Object: map[string]interface{}{ "apiVersion": "v1", "kind": "Pod", "metadata": map[string]interface{}{ "labels": map[string]interface{}{ "app": "nginx", }, }, "spec": map[string]interface{}{ "containers": []interface{}{ map[string]interface{}{ "image": "nginx:1.7.9", "name": "nginx", }, }, }, }, }, shouldError: true, }, { testName: "step should fail if there are no objects of the same type in the namespace", actual: []runtime.Object{}, expected: &unstructured.Unstructured{ Object: map[string]interface{}{ "apiVersion": "v1", "kind": "Pod", "metadata": map[string]interface{}{ "labels": map[string]interface{}{ "app": "nginx", }, }, "spec": map[string]interface{}{ "containers": []interface{}{ map[string]interface{}{ "image": "nginx:1.7.9", "name": "nginx", }, }, }, }, }, shouldError: true, }, } { t.Run(test.testName, func(t *testing.T) { namespace := fmt.Sprintf("kudo-test-%s", petname.Generate(2, "-")) err := testenv.Client.Create(context.TODO(), testutils.NewResource("v1", "Namespace", namespace, "")) if !k8serrors.IsAlreadyExists(err) { // we are ignoring already exists here because in tests we by default use retry client so this can happen assert.Nil(t, err) } for _, actual := range test.actual { _, _, err := testutils.Namespaced(testenv.DiscoveryClient, actual, namespace) assert.Nil(t, err) assert.Nil(t, testenv.Client.Create(context.TODO(), actual)) } step := Step{ Logger: testutils.NewTestLogger(t, ""), Client: func(bool) (client.Client, error) { return testenv.Client, nil }, DiscoveryClient: func() (discovery.CachedDiscoveryInterface, error) { return testenv.DiscoveryClient, nil }, } errors := step.CheckResource(test.expected, namespace) if test.shouldError { assert.NotEqual(t, []error{}, errors) } else { assert.Equal(t, []error{}, errors) } }) } }
explode_data.jsonl/43658
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2908 }
[ 2830, 3393, 3973, 4783, 52464, 1155, 353, 8840, 836, 8, 341, 7000, 437, 5732, 291, 9730, 13244, 1005, 55832, 83819, 12367, 2023, 8358, 1273, 1669, 2088, 3056, 1235, 341, 197, 18185, 675, 262, 914, 198, 197, 88814, 414, 3056, 22255, 8348...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIstioOutboundIPRangesInjection(t *testing.T) { var annotations map[string]string // A valid IP range in := " 10.10.10.0/24\r,,\t,\n,," want := "10.10.10.0/24" annotations = getPodAnnotationsForConfig(t, in, "") if got := annotations[resources.IstioOutboundIPRangeAnnotation]; want != got { t.Fatalf("%v annotation expected to be %v, but is %v.", resources.IstioOutboundIPRangeAnnotation, want, got) } // Multiple valid ranges with whitespaces in = " \t\t10.10.10.0/24, ,,\t\n\r\n,10.240.10.0/14\n, 192.192.10.0/16" want = "10.10.10.0/24,10.240.10.0/14,192.192.10.0/16" annotations = getPodAnnotationsForConfig(t, in, "") if got := annotations[resources.IstioOutboundIPRangeAnnotation]; want != got { t.Fatalf("%v annotation expected to be %v, but is %v.", resources.IstioOutboundIPRangeAnnotation, want, got) } // An invalid IP range in = "10.10.10.10/33" annotations = getPodAnnotationsForConfig(t, in, "") if got, ok := annotations[resources.IstioOutboundIPRangeAnnotation]; ok { t.Fatalf("Expected to have no %v annotation for invalid option %v. But found value %v", resources.IstioOutboundIPRangeAnnotation, want, got) } // Configuration has an annotation override - its value must be preserved want = "10.240.10.0/14" annotations = getPodAnnotationsForConfig(t, "", want) if got := annotations[resources.IstioOutboundIPRangeAnnotation]; got != want { t.Fatalf("%v annotation is expected to have %v but got %v", resources.IstioOutboundIPRangeAnnotation, want, got) } annotations = getPodAnnotationsForConfig(t, "10.10.10.0/24", want) if got := annotations[resources.IstioOutboundIPRangeAnnotation]; got != want { t.Fatalf("%v annotation is expected to have %v but got %v", resources.IstioOutboundIPRangeAnnotation, want, got) } }
explode_data.jsonl/27423
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 653 }
[ 2830, 3393, 40, 267, 815, 2662, 10891, 3298, 74902, 36653, 1155, 353, 8840, 836, 8, 341, 2405, 32207, 2415, 14032, 30953, 271, 197, 322, 362, 2697, 6790, 2088, 198, 17430, 1669, 330, 220, 220, 16, 15, 13, 16, 15, 13, 16, 15, 13, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestBuildPlatformInvalid(t *testing.T) { skip.If(t, versions.LessThan(testEnv.DaemonAPIVersion(), "1.40"), "experimental in older versions") ctx := context.Background() defer setupTest(t)() dockerfile := `FROM busybox ` buf := bytes.NewBuffer(nil) w := tar.NewWriter(buf) writeTarRecord(t, w, "Dockerfile", dockerfile) err := w.Close() assert.NilError(t, err) apiclient := testEnv.APIClient() _, err = apiclient.ImageBuild(ctx, buf, types.ImageBuildOptions{ Remove: true, ForceRemove: true, Platform: "foobar", }) assert.Assert(t, err != nil) assert.ErrorContains(t, err, "unknown operating system or architecture") assert.Assert(t, errdefs.IsInvalidParameter(err)) }
explode_data.jsonl/82590
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 277 }
[ 2830, 3393, 11066, 17296, 7928, 1155, 353, 8840, 836, 8, 341, 1903, 13389, 32901, 1155, 11, 10795, 1214, 433, 26067, 8623, 14359, 909, 64, 7291, 7082, 5637, 1507, 330, 16, 13, 19, 15, 3975, 330, 86703, 304, 9014, 10795, 5130, 20985, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIntegration_EthLog(t *testing.T) { t.Parallel() rpcClient, gethClient, sub, assertMockCalls := cltest.NewEthMocks(t) defer assertMockCalls() app, cleanup := cltest.NewApplication(t, eth.NewClientWith(rpcClient, gethClient), ) defer cleanup() sub.On("Err").Return(nil).Maybe() sub.On("Unsubscribe").Return(nil).Maybe() gethClient.On("ChainID", mock.Anything).Return(app.Store.Config.ChainID(), nil) gethClient.On("FilterLogs", mock.Anything, mock.Anything).Maybe().Return([]models.Log{}, nil) rpcClient.On("EthSubscribe", mock.Anything, mock.Anything, "newHeads").Return(sub, nil) logsCh := cltest.MockSubscribeToLogsCh(gethClient, sub) gethClient.On("TransactionReceipt", mock.Anything, mock.Anything). Return(&types.Receipt{}, nil) require.NoError(t, app.StartAndConnect()) j := cltest.FixtureCreateJobViaWeb(t, app, "fixtures/web/eth_log_job.json") address := common.HexToAddress("0x3cCad4715152693fE3BC4460591e3D3Fbd071b42") initr := j.Initiators[0] assert.Equal(t, models.InitiatorEthLog, initr.Type) assert.Equal(t, address, initr.Address) logs := <-logsCh logs <- cltest.LogFromFixture(t, "testdata/requestLog0original.json") jrs := cltest.WaitForRuns(t, j, app.Store, 1) cltest.WaitForJobRunToComplete(t, app.Store, jrs[0]) }
explode_data.jsonl/75891
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 500 }
[ 2830, 3393, 52464, 2089, 339, 2201, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 7000, 3992, 2959, 11, 633, 71, 2959, 11, 1186, 11, 2060, 11571, 55292, 1669, 1185, 1944, 7121, 65390, 72577, 1155, 340, 16867, 2060, 11571, 55292...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStringReplace_Replace_InputEmpty(t *testing.T) { stringReplace := StringReplace{Old: "foo", New: "bar"} assert.Equal(t, "", stringReplace.Replace("", NewVariables())) }
explode_data.jsonl/66404
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 69 }
[ 2830, 3393, 703, 23107, 62, 23107, 48653, 3522, 1155, 353, 8840, 836, 8, 341, 262, 914, 23107, 1669, 923, 23107, 90, 18284, 25, 330, 7975, 497, 1532, 25, 330, 2257, 16707, 262, 2060, 12808, 1155, 11, 7342, 914, 23107, 20858, 19814, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestFetchAutoMigsZonal(t *testing.T) { server := NewHttpServerMock() defer server.Close() server.On("handle", "/project1/zones/"+zoneB+"/instanceGroups").Return(buildListInstanceGroupsResponse(zoneB, gceMigA, gceMigB)).Once() server.On("handle", "/project1/zones/"+zoneB+"/instanceGroupManagers/"+gceMigA).Return(buildInstanceGroupManagerResponse(zoneB, gceMigA)).Once() server.On("handle", "/project1/zones/"+zoneB+"/instanceGroupManagers/"+gceMigB).Return(buildInstanceGroupManagerResponse(zoneB, gceMigB)).Once() server.On("handle", "/project1/global/instanceTemplates/"+gceMigA).Return(instanceTemplate).Once() server.On("handle", "/project1/global/instanceTemplates/"+gceMigB).Return(instanceTemplate).Once() server.On("handle", "/project1/zones/"+zoneB+"/instanceGroupManagers/"+gceMigA).Return(buildInstanceGroupManagerResponse(zoneB, gceMigA)).Once() server.On("handle", "/project1/zones/"+zoneB+"/instanceGroupManagers/"+gceMigA+"/listManagedInstances").Return(buildFourRunningInstancesManagedInstancesResponse(zoneB, gceMigA)).Once() server.On("handle", "/project1/zones/"+zoneB+"/instanceGroupManagers/"+gceMigB).Return(buildInstanceGroupManagerResponse(zoneB, gceMigB)).Once() server.On("handle", "/project1/zones/"+zoneB+"/instanceGroupManagers/"+gceMigB+"/listManagedInstances").Return(buildOneRunningInstanceManagedInstancesResponse(zoneB, gceMigB)).Once() regional := false g := newTestGceManager(t, server.URL, regional) min, max := 0, 100 g.migAutoDiscoverySpecs = []cloudprovider.MIGAutoDiscoveryConfig{ {Re: regexp.MustCompile("UNUSED"), MinSize: min, MaxSize: max}, } assert.NoError(t, g.fetchAutoMigs()) migs := g.GetMigs() assert.Equal(t, 2, len(migs)) validateMig(t, migs[0].Config, zoneB, gceMigA, min, max) validateMig(t, migs[1].Config, zoneB, gceMigB, min, max) mock.AssertExpectationsForObjects(t, server) }
explode_data.jsonl/27614
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 696 }
[ 2830, 3393, 20714, 13253, 44, 14462, 57, 24202, 1155, 353, 8840, 836, 8, 341, 41057, 1669, 1532, 2905, 5475, 11571, 741, 16867, 3538, 10421, 2822, 41057, 8071, 445, 8192, 497, 3521, 4987, 16, 31082, 3154, 33778, 8684, 33, 27569, 4851, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSplitManifest(t *testing.T) { manifests := SplitManifests(manifestFile) if len(manifests) != 1 { t.Errorf("Expected 1 manifest, got %v", len(manifests)) } expected := map[string]string{"manifest-0": expectedManifest} if !reflect.DeepEqual(manifests, expected) { t.Errorf("Expected %v, got %v", expected, manifests) } }
explode_data.jsonl/37744
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 128 }
[ 2830, 3393, 20193, 38495, 1155, 353, 8840, 836, 8, 341, 197, 42315, 82, 1669, 27810, 38495, 82, 60671, 6962, 1703, 340, 743, 2422, 60671, 6962, 82, 8, 961, 220, 16, 341, 197, 3244, 13080, 445, 18896, 220, 16, 14455, 11, 2684, 1018, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCreateAzureManagerValidConfig(t *testing.T) { manager, err := CreateAzureManager(strings.NewReader(validAzureCfg), cloudprovider.NodeGroupDiscoveryOptions{}) expectedConfig := &Config{ Cloud: "AzurePublicCloud", Location: "southeastasia", TenantID: "fakeId", SubscriptionID: "fakeId", ResourceGroup: "fakeId", VMType: "vmss", AADClientID: "fakeId", AADClientSecret: "fakeId", VmssCacheTTL: 60, MaxDeploymentsCount: 8, CloudProviderRateLimitConfig: CloudProviderRateLimitConfig{ RateLimitConfig: azclients.RateLimitConfig{ CloudProviderRateLimit: false, CloudProviderRateLimitBucket: 5, CloudProviderRateLimitBucketWrite: 5, CloudProviderRateLimitQPS: 1, CloudProviderRateLimitQPSWrite: 1, }, InterfaceRateLimit: &azclients.RateLimitConfig{ CloudProviderRateLimit: false, CloudProviderRateLimitBucket: 5, CloudProviderRateLimitBucketWrite: 5, CloudProviderRateLimitQPS: 1, CloudProviderRateLimitQPSWrite: 1, }, VirtualMachineRateLimit: &azclients.RateLimitConfig{ CloudProviderRateLimit: false, CloudProviderRateLimitBucket: 5, CloudProviderRateLimitBucketWrite: 5, CloudProviderRateLimitQPS: 1, CloudProviderRateLimitQPSWrite: 1, }, StorageAccountRateLimit: &azclients.RateLimitConfig{ CloudProviderRateLimit: false, CloudProviderRateLimitBucket: 5, CloudProviderRateLimitBucketWrite: 5, CloudProviderRateLimitQPS: 1, CloudProviderRateLimitQPSWrite: 1, }, DiskRateLimit: &azclients.RateLimitConfig{ CloudProviderRateLimit: false, CloudProviderRateLimitBucket: 5, CloudProviderRateLimitBucketWrite: 5, CloudProviderRateLimitQPS: 1, CloudProviderRateLimitQPSWrite: 1, }, VirtualMachineScaleSetRateLimit: &azclients.RateLimitConfig{ CloudProviderRateLimit: false, CloudProviderRateLimitBucket: 5, CloudProviderRateLimitBucketWrite: 5, CloudProviderRateLimitQPS: 1, CloudProviderRateLimitQPSWrite: 1, }, }, } assert.NoError(t, err) assert.Equal(t, true, reflect.DeepEqual(*expectedConfig, *manager.config), "unexpected azure manager configuration") }
explode_data.jsonl/12787
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1031 }
[ 2830, 3393, 4021, 78107, 2043, 4088, 2648, 1155, 353, 8840, 836, 8, 341, 92272, 11, 1848, 1669, 4230, 78107, 2043, 51442, 68587, 41529, 78107, 42467, 701, 9437, 19979, 21714, 2808, 67400, 3798, 6257, 692, 42400, 2648, 1669, 609, 2648, 515...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDisconnectGitAlternates(t *testing.T) { locator := config.NewLocator(config.Config) server, serverSocketPath := runObjectPoolServer(t, config.Config, locator) defer server.Stop() client, conn := newObjectPoolClient(t, serverSocketPath) defer conn.Close() ctx, cancel := testhelper.Context() defer cancel() testRepo, testRepoPath, cleanupFn := testhelper.NewTestRepo(t) defer cleanupFn() pool, err := objectpool.NewObjectPool(config.Config, locator, testRepo.GetStorageName(), testhelper.NewTestObjectPoolName(t)) require.NoError(t, err) defer pool.Remove(ctx) require.NoError(t, pool.Create(ctx, testRepo)) require.NoError(t, pool.Link(ctx, testRepo)) testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "gc") existingObjectID := "55bc176024cfa3baaceb71db584c7e5df900ea65" // Corrupt the repository to check that existingObjectID can no longer be found altPath, err := locator.InfoAlternatesPath(testRepo) require.NoError(t, err, "find info/alternates") require.NoError(t, os.RemoveAll(altPath)) cmd, err := git.SafeCmd(ctx, testRepo, nil, git.SubCmd{Name: "cat-file", Flags: []git.Option{git.Flag{Name: "-e"}}, Args: []string{existingObjectID}}) require.NoError(t, err) require.Error(t, cmd.Wait(), "expect cat-file to fail because object cannot be found") require.NoError(t, pool.Link(ctx, testRepo)) require.FileExists(t, altPath, "objects/info/alternates should be back") // At this point we know that the repository has access to // existingObjectID, but only if objects/info/alternates is in place. _, err = client.DisconnectGitAlternates(ctx, &gitalypb.DisconnectGitAlternatesRequest{Repository: testRepo}) require.NoError(t, err, "call DisconnectGitAlternates") // Check that the object can still be found, even though // objects/info/alternates is gone. This is the purpose of // DisconnectGitAlternates. testhelper.AssertPathNotExists(t, altPath) testhelper.MustRunCommand(t, nil, "git", "-C", testRepoPath, "cat-file", "-e", existingObjectID) }
explode_data.jsonl/8
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 699 }
[ 2830, 3393, 60651, 46562, 34543, 973, 1155, 353, 8840, 836, 8, 341, 197, 68033, 1669, 2193, 7121, 33831, 8754, 10753, 340, 41057, 11, 3538, 10286, 1820, 1669, 1598, 1190, 10551, 5475, 1155, 11, 2193, 10753, 11, 47117, 340, 16867, 3538, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMVCCStatsTxnSysPutAbort(t *testing.T) { defer leaktest.AfterTest(t)() defer log.Scope(t).Close(t) for _, engineImpl := range mvccEngineImpls { t.Run(engineImpl.name, func(t *testing.T) { engine := engineImpl.create() defer engine.Close() ctx := context.Background() aggMS := &enginepb.MVCCStats{} assertEq(t, engine, "initially", aggMS, &enginepb.MVCCStats{}) key := keys.RangeDescriptorKey(roachpb.RKey("a")) ts1 := hlc.Timestamp{WallTime: 1e9} txn := &roachpb.Transaction{ TxnMeta: enginepb.TxnMeta{ID: uuid.MakeV4(), WriteTimestamp: ts1}, ReadTimestamp: ts1, } // Write a system intent at ts1. val1 := roachpb.MakeValueFromString("value") if err := MVCCPut(ctx, engine, aggMS, key, txn.ReadTimestamp, val1, txn); err != nil { t.Fatal(err) } mKeySize := int64(mvccKey(key).EncodedSize()) require.EqualValues(t, mKeySize, 11) mValSize := int64((&enginepb.MVCCMetadata{ Timestamp: hlc.LegacyTimestamp(ts1), Deleted: false, Txn: &txn.TxnMeta, }).Size()) require.EqualValues(t, mValSize, 46) vKeySize := MVCCVersionTimestampSize require.EqualValues(t, vKeySize, 12) vVal1Size := int64(len(val1.RawBytes)) require.EqualValues(t, vVal1Size, 10) val2 := roachpb.MakeValueFromString("longvalue") vVal2Size := int64(len(val2.RawBytes)) require.EqualValues(t, vVal2Size, 14) expMS := enginepb.MVCCStats{ LastUpdateNanos: 1e9, SysBytes: mKeySize + mValSize + vKeySize + vVal1Size, // 11+44+12+10 = 77 SysCount: 1, } assertEq(t, engine, "after first put", aggMS, &expMS) // Now abort the intent. txn.Status = roachpb.ABORTED if _, err := MVCCResolveWriteIntent(ctx, engine, aggMS, roachpb.MakeLockUpdate(txn, roachpb.Span{Key: key}), ); err != nil { t.Fatal(err) } expMS = enginepb.MVCCStats{ LastUpdateNanos: 1e9, } assertEq(t, engine, "after aborting", aggMS, &expMS) }) } }
explode_data.jsonl/70085
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 912 }
[ 2830, 3393, 66626, 3706, 16635, 31584, 77, 32792, 19103, 85891, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 16867, 1487, 77940, 1155, 568, 7925, 1155, 340, 2023, 8358, 4712, 9673, 1669, 2088, 23164, 638, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_func_pointer_type(t *testing.T) { type TestObject2 struct { F func() } type TestObject1 struct { Obj *TestObject2 } t.Run("encode null is valid", func(t *testing.T) { should := require.New(t) output, err := json.Marshal(TestObject1{}) should.Nil(err) should.Equal(`{"Obj":null}`, string(output)) output, err = jsoniter.Marshal(TestObject1{}) should.Nil(err) should.Equal(`{"Obj":null}`, string(output)) }) t.Run("encode not null is invalid", func(t *testing.T) { should := require.New(t) _, err := json.Marshal(TestObject1{Obj: &TestObject2{}}) should.NotNil(err) _, err = jsoniter.Marshal(TestObject1{Obj: &TestObject2{}}) should.NotNil(err) }) t.Run("decode null is valid", func(t *testing.T) { should := require.New(t) var obj TestObject1 should.Nil(json.Unmarshal([]byte(`{"Obj":{"F": null}}`), &obj)) should.Nil(jsoniter.Unmarshal([]byte(`{"Obj":{"F": null}}`), &obj)) }) t.Run("decode not null is invalid", func(t *testing.T) { should := require.New(t) var obj TestObject1 should.NotNil(json.Unmarshal([]byte(`{"Obj":{"F": "hello"}}`), &obj)) should.NotNil(jsoniter.Unmarshal([]byte(`{"Obj":{"F": "hello"}}`), &obj)) }) }
explode_data.jsonl/73520
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 510 }
[ 2830, 3393, 9596, 21425, 1819, 1155, 353, 8840, 836, 8, 341, 13158, 3393, 1190, 17, 2036, 341, 197, 12727, 2915, 741, 197, 532, 13158, 3393, 1190, 16, 2036, 341, 197, 197, 5261, 353, 2271, 1190, 17, 198, 197, 532, 3244, 16708, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDDL(t *testing.T) { testcases := []struct { query string output *DDL affected []string }{{ query: "create table a", output: &DDL{ Action: CreateStr, Table: TableName{Name: NewTableIdent("a")}, }, affected: []string{"a"}, }, { query: "rename table a to b", output: &DDL{ Action: RenameStr, FromTables: TableNames{ TableName{Name: NewTableIdent("a")}, }, ToTables: TableNames{ TableName{Name: NewTableIdent("b")}, }, }, affected: []string{"a", "b"}, }, { query: "rename table a to b, c to d", output: &DDL{ Action: RenameStr, FromTables: TableNames{ TableName{Name: NewTableIdent("a")}, TableName{Name: NewTableIdent("c")}, }, ToTables: TableNames{ TableName{Name: NewTableIdent("b")}, TableName{Name: NewTableIdent("d")}, }, }, affected: []string{"a", "c", "b", "d"}, }, { query: "drop table a", output: &DDL{ Action: DropStr, FromTables: TableNames{ TableName{Name: NewTableIdent("a")}, }, }, affected: []string{"a"}, }, { query: "drop table a, b", output: &DDL{ Action: DropStr, FromTables: TableNames{ TableName{Name: NewTableIdent("a")}, TableName{Name: NewTableIdent("b")}, }, }, affected: []string{"a", "b"}, }} for _, tcase := range testcases { got, err := Parse(tcase.query) if err != nil { t.Fatal(err) } if !reflect.DeepEqual(got, tcase.output) { t.Errorf("%s: %v, want %v", tcase.query, got, tcase.output) } want := make(TableNames, 0, len(tcase.affected)) for _, t := range tcase.affected { want = append(want, TableName{Name: NewTableIdent(t)}) } if affected := got.(*DDL).AffectedTables(); !reflect.DeepEqual(affected, want) { t.Errorf("Affected(%s): %v, want %v", tcase.query, affected, want) } } }
explode_data.jsonl/3373
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 807 }
[ 2830, 3393, 58781, 1155, 353, 8840, 836, 8, 341, 18185, 23910, 1669, 3056, 1235, 341, 197, 27274, 262, 914, 198, 197, 21170, 256, 353, 58781, 198, 197, 197, 31057, 3056, 917, 198, 197, 15170, 515, 197, 27274, 25, 330, 3182, 1965, 264,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestList_Pop(t *testing.T) { gtest.C(t, func(t *gtest.T) { l := NewFrom([]interface{}{1, 2, 3, 4, 5, 6, 7, 8, 9}) t.Assert(l.PopBack(), 9) t.Assert(l.PopBacks(2), []interface{}{8, 7}) t.Assert(l.PopFront(), 1) t.Assert(l.PopFronts(2), []interface{}{2, 3}) }) }
explode_data.jsonl/30910
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 145 }
[ 2830, 3393, 852, 1088, 453, 1155, 353, 8840, 836, 8, 341, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 8810, 1669, 1532, 3830, 10556, 4970, 6257, 90, 16, 11, 220, 17, 11, 220, 18, 11, 220, 19, 11, 220, 20...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1