text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestInterface(t *testing.T) { gopClTest(t, ` type Shape interface { Area() float64 } func foo(shape Shape) { shape.Area() } `, `package main type Shape interface { Area() float64 } func foo(shape Shape) { shape.Area() } `) }
explode_data.jsonl/73604
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 93 }
[ 2830, 3393, 5051, 1155, 353, 8840, 836, 8, 341, 3174, 453, 5066, 2271, 1155, 11, 1565, 271, 1313, 22526, 3749, 341, 197, 8726, 368, 2224, 21, 19, 198, 630, 2830, 15229, 25933, 22526, 8, 341, 197, 12231, 81435, 741, 532, 7808, 1565, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_CardinalitySet(t *testing.T) { a := NewSet() if a.Cardinality() != 0 { t.Error("set should be an empty set") } a.Add(1) if a.Cardinality() != 1 { t.Error("set should have a size of 1") } a.Remove(1) if a.Cardinality() != 0 { t.Error("set should be an empty set") } a.Add(9) if a.Cardinality() != 1 { t.Error("set should have a size of 1") } a.Clear() if a.Cardinality() != 0 { t.Error("set should have a size of 1") } }
explode_data.jsonl/180
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 202 }
[ 2830, 3393, 920, 567, 80777, 1649, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 1532, 1649, 2822, 743, 264, 48613, 80777, 368, 961, 220, 15, 341, 197, 3244, 6141, 445, 746, 1265, 387, 458, 4287, 738, 1138, 197, 630, 11323, 1904, 7, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestArrayStack(t *testing.T) { as := NewArrayStack() assert.Equal(t, 0, as.n) assert.Equal(t, 0, as.Size()) assert.Equal(t, 1, len(as.a)) err := as.Add(0, "a") // [a] assert.NoError(t, err) assert.Equal(t, ArrayStack{ a: []interface{}{"a"}, n: 1, }, as) assert.Equal(t, 1, len(as.a)) err = as.Add(0, "b") // [b a] assert.NoError(t, err) assert.Equal(t, ArrayStack{ a: []interface{}{"b", "a"}, n: 2, }, as) assert.Equal(t, 2, len(as.a)) err = as.Add(1, "c") // [b c a nil] assert.NoError(t, err) assert.Equal(t, ArrayStack{ a: []interface{}{"b", "c", "a", nil}, n: 3, }, as) assert.Equal(t, 4, len(as.a)) _, err = as.Remove(0) // [c a nil nil] assert.NoError(t, err) assert.Equal(t, ArrayStack{ a: []interface{}{"c", "a", nil, nil}, n: 2, }, as) assert.Equal(t, 4, len(as.a)) err = as.Add(2, "d") // [c a d nil] assert.NoError(t, err) assert.Equal(t, ArrayStack{ a: []interface{}{"c", "a", "d", nil}, n: 3, }, as) assert.Equal(t, 4, len(as.a)) value, err := as.Get(0) // [c a d nil] -> c assert.NoError(t, err) assert.Equal(t, "c", value) assert.Equal(t, ArrayStack{ a: []interface{}{"c", "a", "d", nil}, n: 3, }, as) assert.Equal(t, 4, len(as.a)) value, err = as.Set(0, "foo") // [c a d nil] -> [foo a d nil] assert.NoError(t, err) assert.Equal(t, "c", value) assert.Equal(t, ArrayStack{ a: []interface{}{"foo", "a", "d", nil}, n: 3, }, as) assert.Equal(t, 4, len(as.a)) value, err = as.Get(0) // [foo a d nil] -> foo assert.NoError(t, err) assert.Equal(t, "foo", value) assert.Equal(t, ArrayStack{ a: []interface{}{"foo", "a", "d", nil}, n: 3, }, as) assert.Equal(t, 4, len(as.a)) }
explode_data.jsonl/34360
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 838 }
[ 2830, 3393, 1857, 4336, 1155, 353, 8840, 836, 8, 341, 60451, 1669, 1532, 1857, 4336, 741, 6948, 12808, 1155, 11, 220, 15, 11, 438, 1253, 340, 6948, 12808, 1155, 11, 220, 15, 11, 438, 2465, 2398, 6948, 12808, 1155, 11, 220, 16, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWrapOnewayHandlerInvalid(t *testing.T) { tests := []struct { Name string Func interface{} }{ {"empty", func() {}}, {"not-a-function", 0}, { "wrong-args-in", func(context.Context) error { return nil }, }, { "wrong-ctx", func(string, *struct{}) error { return nil }, }, { "wrong-req-body", func(context.Context, string, int) error { return nil }, }, { "wrong-response", func(context.Context, map[string]interface{}) (*struct{}, error) { return nil, nil }, }, { "wrong-response-val", func(context.Context, map[string]interface{}) int { return 0 }, }, { "non-pointer-req", func(context.Context, struct{}) error { return nil }, }, { "non-string-key", func(context.Context, map[int32]interface{}) error { return nil }, }, } for _, tt := range tests { assert.Panics(t, assert.PanicTestFunc(func() { wrapOnewayHandler(tt.Name, tt.Func) })) } }
explode_data.jsonl/5
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 460 }
[ 2830, 3393, 26787, 46, 931, 352, 3050, 7928, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 21297, 914, 198, 197, 197, 9626, 3749, 16094, 197, 59403, 197, 197, 4913, 3194, 497, 2915, 368, 4687, 1583, 197, 197, 4913, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWebhookCABundleRetrieval(t *testing.T) { logrus.SetLevel(logrus.DebugLevel) namespace := "ns" missingCAError := fmt.Errorf("Unable to find ca") caBundle := []byte("Foo") type initial struct { csvs []runtime.Object crds []runtime.Object objs []runtime.Object desc v1alpha1.WebhookDescription } type expected struct { caBundle []byte err error } tests := []struct { name string initial initial expected expected }{ { name: "MissingCAResource", initial: initial{ csvs: []runtime.Object{ csv("csv1", namespace, "0.0.0", "", installStrategy("csv1-dep1", nil, []v1alpha1.StrategyDeploymentPermissions{}, ), []*apiextensionsv1.CustomResourceDefinition{crd("c1", "v1", "g1")}, []*apiextensionsv1.CustomResourceDefinition{}, v1alpha1.CSVPhaseInstalling, ), }, desc: v1alpha1.WebhookDescription{ GenerateName: "webhook", Type: v1alpha1.ValidatingAdmissionWebhook, }, }, expected: expected{ caBundle: nil, err: missingCAError, }, }, { name: "RetrieveCAFromConversionWebhook", initial: initial{ csvs: []runtime.Object{ csvWithConversionWebhook(csv("csv1", namespace, "0.0.0", "", installStrategy("csv1-dep1", nil, []v1alpha1.StrategyDeploymentPermissions{}, ), []*apiextensionsv1.CustomResourceDefinition{crd("c1", "v1", "g1")}, []*apiextensionsv1.CustomResourceDefinition{}, v1alpha1.CSVPhaseInstalling, ), "csv1-dep1", []string{"c1.g1"}), }, crds: []runtime.Object{ crdWithConversionWebhook(crd("c1", "v1", "g1"), caBundle), }, desc: v1alpha1.WebhookDescription{ GenerateName: "webhook", Type: v1alpha1.ConversionWebhook, ConversionCRDs: []string{"c1.g1"}, }, }, expected: expected{ caBundle: caBundle, err: nil, }, }, { name: "FailToRetrieveCAFromConversionWebhook", initial: initial{ csvs: []runtime.Object{ csvWithConversionWebhook(csv("csv1", namespace, "0.0.0", "", installStrategy("csv1-dep1", nil, []v1alpha1.StrategyDeploymentPermissions{}, ), []*apiextensionsv1.CustomResourceDefinition{crd("c1", "v1", "g1")}, []*apiextensionsv1.CustomResourceDefinition{}, v1alpha1.CSVPhaseInstalling, ), "csv1-dep1", []string{"c1.g1"}), }, crds: []runtime.Object{ crd("c1", "v1", "g1"), }, desc: v1alpha1.WebhookDescription{ GenerateName: "webhook", Type: v1alpha1.ConversionWebhook, ConversionCRDs: []string{"c1.g1"}, }, }, expected: expected{ caBundle: nil, err: missingCAError, }, }, { name: "RetrieveFromValidatingAdmissionWebhook", initial: initial{ csvs: []runtime.Object{ csvWithValidatingAdmissionWebhook(csv("csv1", namespace, "0.0.0", "", installStrategy("csv1-dep1", nil, []v1alpha1.StrategyDeploymentPermissions{}, ), []*apiextensionsv1.CustomResourceDefinition{crd("c1", "v1", "g1")}, []*apiextensionsv1.CustomResourceDefinition{}, v1alpha1.CSVPhaseInstalling, ), "csv1-dep1", []string{"c1.g1"}), }, objs: []runtime.Object{ &admissionregistrationv1.ValidatingWebhookConfiguration{ ObjectMeta: metav1.ObjectMeta{ Name: "webhook", Namespace: namespace, Labels: map[string]string{ "olm.owner": "csv1", "olm.owner.namespace": namespace, "olm.owner.kind": v1alpha1.ClusterServiceVersionKind, "olm.webhook-description-generate-name": "webhook", }, }, Webhooks: []admissionregistrationv1.ValidatingWebhook{ { Name: "Webhook", ClientConfig: admissionregistrationv1.WebhookClientConfig{ CABundle: caBundle, }, }, }, }, }, desc: v1alpha1.WebhookDescription{ GenerateName: "webhook", Type: v1alpha1.ValidatingAdmissionWebhook, }, }, expected: expected{ caBundle: caBundle, err: nil, }, }, { name: "RetrieveFromMutatingAdmissionWebhook", initial: initial{ csvs: []runtime.Object{ csvWithMutatingAdmissionWebhook(csv("csv1", namespace, "0.0.0", "", installStrategy("csv1-dep1", nil, []v1alpha1.StrategyDeploymentPermissions{}, ), []*apiextensionsv1.CustomResourceDefinition{crd("c1", "v1", "g1")}, []*apiextensionsv1.CustomResourceDefinition{}, v1alpha1.CSVPhaseInstalling, ), "csv1-dep1", []string{"c1.g1"}), }, objs: []runtime.Object{ &admissionregistrationv1.MutatingWebhookConfiguration{ ObjectMeta: metav1.ObjectMeta{ Name: "webhook", Namespace: namespace, Labels: map[string]string{ "olm.owner": "csv1", "olm.owner.namespace": namespace, "olm.owner.kind": v1alpha1.ClusterServiceVersionKind, "olm.webhook-description-generate-name": "webhook", }, }, Webhooks: []admissionregistrationv1.MutatingWebhook{ { Name: "Webhook", ClientConfig: admissionregistrationv1.WebhookClientConfig{ CABundle: caBundle, }, }, }, }, }, desc: v1alpha1.WebhookDescription{ GenerateName: "webhook", Type: v1alpha1.MutatingAdmissionWebhook, }, }, expected: expected{ caBundle: caBundle, err: nil, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { // Create test operator ctx, cancel := context.WithCancel(context.TODO()) defer cancel() op, err := NewFakeOperator( ctx, withNamespaces(namespace, "kube-system"), withClientObjs(tt.initial.csvs...), withK8sObjs(tt.initial.objs...), withExtObjs(tt.initial.crds...), withOperatorNamespace(namespace), ) require.NoError(t, err) // run csv sync for each CSV for _, csv := range tt.initial.csvs { caBundle, err := op.getWebhookCABundle(csv.(*v1alpha1.ClusterServiceVersion), &tt.initial.desc) require.Equal(t, tt.expected.err, err) require.Equal(t, tt.expected.caBundle, caBundle) } }) } }
explode_data.jsonl/31209
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 3265 }
[ 2830, 3393, 5981, 20873, 34, 1867, 4206, 12020, 7231, 831, 1155, 353, 8840, 836, 8, 341, 90822, 4202, 4449, 12531, 20341, 20345, 4449, 340, 56623, 1669, 330, 4412, 698, 197, 30616, 5049, 1454, 1669, 8879, 13080, 445, 17075, 311, 1477, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAccSnapshot_update(t *testing.T) { data := acceptance.BuildTestData(t, "azurerm_snapshot", "test") r := SnapshotResource{} data.ResourceTest(t, r, []acceptance.TestStep{ { Config: r.fromManagedDisk(data), Check: acceptance.ComposeTestCheckFunc( check.That(data.ResourceName).ExistsInAzure(r), ), }, { Config: r.fromManagedDiskUpdated(data), Check: acceptance.ComposeTestCheckFunc( check.That(data.ResourceName).ExistsInAzure(r), ), }, }) }
explode_data.jsonl/78018
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 201 }
[ 2830, 3393, 14603, 15009, 8882, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 25505, 25212, 83920, 1155, 11, 330, 1370, 324, 4195, 53265, 497, 330, 1944, 1138, 7000, 1669, 68697, 4783, 31483, 8924, 20766, 2271, 1155, 11, 435, 11, 3056, 1033...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGocloak_GetUsers(t *testing.T) { t.Parallel() cfg := GetConfig(t) client := NewClientWithDebug(t) token := GetAdminToken(t, client) users, err := client.GetUsers( token.AccessToken, cfg.GoCloak.Realm, GetUsersParams{ Username: cfg.GoCloak.UserName, }) FailIfErr(t, err, "GetUsers failed") t.Log(users) }
explode_data.jsonl/79556
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 142 }
[ 2830, 3393, 38, 509, 385, 585, 13614, 7137, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 50286, 1669, 2126, 2648, 1155, 340, 25291, 1669, 1532, 2959, 2354, 7939, 1155, 340, 43947, 1669, 2126, 7210, 3323, 1155, 11, 2943, 692, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_scsi_GetDMDeviceByChildren(t *testing.T) { type args struct { ctx context.Context devices []string } ctx := context.Background() defaultArgs := args{ctx: ctx, devices: []string{mh.ValidDeviceName, mh.ValidDeviceName2}} ctrl := gomock.NewController(t) defer ctrl.Finish() sysPath1 := fmt.Sprintf("/sys/block/%s/holders/dm-", mh.ValidDeviceName) sysPath2 := fmt.Sprintf("/sys/block/%s/holders/dm-", mh.ValidDeviceName2) sysPath1Glob := sysPath1 + "*" sysPath2Glob := sysPath2 + "*" dm1Path := sysPath1 + "1" dm2Path := sysPath1 + "2" mock := mh.MockHelper{ Ctrl: ctrl, } tests := []struct { name string fields scsiFields args args stateSetter func(fields scsiFields) want string wantErr bool }{ { name: "error while resolve glob", fields: getDefaultSCSIFields(ctrl), stateSetter: func(fields scsiFields) { mock.FilePathGlobCallPattern = sysPath1Glob mock.FilePathGlobErr(fields.filePath) }, args: defaultArgs, want: "", wantErr: true, }, { name: "not found", fields: getDefaultSCSIFields(ctrl), stateSetter: func(fields scsiFields) { mock.FilePathGlobOKReturn = []string{} mock.FilePathGlobCallPattern = sysPath1Glob mock.FilePathGlobOK(fields.filePath) mock.FilePathGlobCallPattern = sysPath2Glob mock.FilePathGlobOK(fields.filePath) }, args: defaultArgs, want: "", wantErr: true, }, { name: "check another devices if one failed", fields: getDefaultSCSIFields(ctrl), stateSetter: func(fields scsiFields) { mock.FilePathGlobOKReturn = []string{dm1Path} mock.FilePathGlobCallPattern = sysPath1Glob mock.FilePathGlobOK(fields.filePath) mock.FilePathGlobOKReturn = []string{dm2Path} mock.FilePathGlobCallPattern = sysPath2Glob mock.FilePathGlobOK(fields.filePath) mock.IOUTILReadFileCallPath = dm1Path + "/dm/uuid" mock.IOUTILReadFileErr(fields.fileReader) mock.IOUTILReadFileOKReturn = "mpath" mock.IOUTILReadFileCallPath = dm2Path + "/dm/uuid" mock.IOUTILReadFileOK(fields.fileReader) }, args: defaultArgs, want: "dm-2", wantErr: false, }, { name: "devices have different parents", fields: getDefaultSCSIFields(ctrl), stateSetter: func(fields scsiFields) { mock.FilePathGlobOKReturn = []string{dm1Path} mock.FilePathGlobCallPattern = sysPath1Glob mock.FilePathGlobOK(fields.filePath) mock.FilePathGlobOKReturn = []string{dm2Path} mock.FilePathGlobCallPattern = sysPath2Glob mock.FilePathGlobOK(fields.filePath) mock.IOUTILReadFileOKReturn = "mpath" mock.IOUTILReadFileCallPath = dm1Path + "/dm/uuid" mock.IOUTILReadFileOK(fields.fileReader) mock.IOUTILReadFileCallPath = dm2Path + "/dm/uuid" mock.IOUTILReadFileOK(fields.fileReader) }, args: defaultArgs, want: "", wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { s := &Scsi{ fileReader: tt.fields.fileReader, filePath: tt.fields.filePath, os: tt.fields.os, osexec: tt.fields.osexec, singleCall: tt.fields.singleCall, } tt.stateSetter(tt.fields) got, err := s.GetDMDeviceByChildren(tt.args.ctx, tt.args.devices) if (err != nil) != tt.wantErr { t.Errorf("GetDMDeviceByChildren() error = %v, wantErr %v", err, tt.wantErr) return } if got != tt.want { t.Errorf("GetDMDeviceByChildren() got = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/65986
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1635 }
[ 2830, 3393, 643, 63229, 13614, 8395, 6985, 1359, 11539, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 20985, 257, 2266, 9328, 198, 197, 27302, 1216, 3056, 917, 198, 197, 630, 20985, 1669, 2266, 19047, 2822, 11940, 4117, 166...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetCellFloat(t *testing.T) { sheet := "Sheet1" t.Run("with no decimal", func(t *testing.T) { f := NewFile() assert.NoError(t, f.SetCellFloat(sheet, "A1", 123.0, -1, 64)) assert.NoError(t, f.SetCellFloat(sheet, "A2", 123.0, 1, 64)) val, err := f.GetCellValue(sheet, "A1") assert.NoError(t, err) assert.Equal(t, "123", val, "A1 should be 123") val, err = f.GetCellValue(sheet, "A2") assert.NoError(t, err) assert.Equal(t, "123.0", val, "A2 should be 123.0") }) t.Run("with a decimal and precision limit", func(t *testing.T) { f := NewFile() assert.NoError(t, f.SetCellFloat(sheet, "A1", 123.42, 1, 64)) val, err := f.GetCellValue(sheet, "A1") assert.NoError(t, err) assert.Equal(t, "123.4", val, "A1 should be 123.4") }) t.Run("with a decimal and no limit", func(t *testing.T) { f := NewFile() assert.NoError(t, f.SetCellFloat(sheet, "A1", 123.42, -1, 64)) val, err := f.GetCellValue(sheet, "A1") assert.NoError(t, err) assert.Equal(t, "123.42", val, "A1 should be 123.42") }) f := NewFile() assert.EqualError(t, f.SetCellFloat(sheet, "A", 123.42, -1, 64), `cannot convert cell "A" to coordinates: invalid cell name "A"`) }
explode_data.jsonl/947
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 522 }
[ 2830, 3393, 1649, 3599, 5442, 1155, 353, 8840, 836, 8, 341, 1903, 3674, 1669, 330, 10541, 16, 698, 3244, 16708, 445, 4197, 902, 12122, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 1166, 1669, 1532, 1703, 741, 197, 6948, 35699, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCPUShare_ParseCPUShare(t *testing.T) { tests := []struct { in string out CPUShare err error }{ {"1024", 1024, nil}, {"1", 0, ErrInvalidCPUShare}, {"1025", 1025, nil}, } for _, tt := range tests { c, err := ParseCPUShare(tt.in) if err != tt.err { t.Fatalf("err => %v; want %v", err, tt.err) } if tt.err != nil { continue } if got, want := c, tt.out; got != want { t.Fatalf("CPUShare => %d; want %d", got, want) } } }
explode_data.jsonl/64516
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 220 }
[ 2830, 3393, 31615, 12115, 77337, 31615, 12115, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 17430, 220, 914, 198, 197, 13967, 13940, 12115, 198, 197, 9859, 1465, 198, 197, 59403, 197, 197, 4913, 16, 15, 17, 19, 497...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestLexPosition(t *testing.T) { l := lex("x", nil) c1 := l.next() if c1 != 'x' { t.Errorf("next: unexpected rune %d", c1) } if l.pos != 1 { t.Errorf("next: unexpected position %d", l.pos) } if l.start != 0 { t.Errorf("next: unexpected start %d", l.start) } l.backup() if l.pos != 0 { t.Errorf("backup: unexpected position %d", l.pos) } l.peek() if l.pos != 0 { t.Errorf("peek: unexpected position %d", l.pos) } l.next() c2 := l.next() if c2 != eof { t.Errorf("next: expected eof %d", c2) } }
explode_data.jsonl/81026
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 250 }
[ 2830, 3393, 47778, 3812, 1155, 353, 8840, 836, 8, 341, 8810, 1669, 22429, 445, 87, 497, 2092, 340, 1444, 16, 1669, 326, 4529, 741, 743, 272, 16, 961, 364, 87, 6, 341, 197, 3244, 13080, 445, 3600, 25, 16500, 63499, 1018, 67, 497, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestBindWriterOnUint8Slice(t *testing.T) { cases := []struct { p []byte v []uint8 }{ { []byte{0x88}, []uint8{0x88}, }, { []byte{ 0x88, 0x83, 0x00, 0x00, }, []uint8{0x88, 0x83, 0x00, 0x00}, }, } for _, c := range cases { var u []uint8 w := BindWriterOnUint8Slice(&u) if w == nil { t.Errorf("BindWriterOnUint8Slice on %+v failed (got nil io.Writer)", &u) } l, err := w.Write(c.p) if err != nil { t.Error(err) } if len(c.p)-l != 0 { t.Errorf("len(p) %d but write len %d", len(c.p), l) } if !reflect.DeepEqual(u, c.v) { t.Errorf("parse %v expect %v but got %v", c.p, c.v, u) } } }
explode_data.jsonl/58525
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 360 }
[ 2830, 3393, 9950, 6492, 1925, 21570, 23, 33236, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 3223, 3056, 3782, 198, 197, 5195, 3056, 2496, 23, 198, 197, 59403, 197, 197, 515, 298, 197, 1294, 3782, 90, 15, 87...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestHasMonotonicClock(t *testing.T) { yes := func(expr string, tt Time) { if GetMono(&tt) == 0 { t.Errorf("%s: missing monotonic clock reading", expr) } } no := func(expr string, tt Time) { if GetMono(&tt) != 0 { t.Errorf("%s: unexpected monotonic clock reading", expr) } } yes("<-After(1)", <-After(1)) ticker := NewTicker(1) yes("<-Tick(1)", <-ticker.C) ticker.Stop() no("Date(2009, 11, 23, 0, 0, 0, 0, UTC)", Date(2009, 11, 23, 0, 0, 0, 0, UTC)) tp, _ := Parse(UnixDate, "Sat Mar 7 11:06:39 PST 2015") no(`Parse(UnixDate, "Sat Mar 7 11:06:39 PST 2015")`, tp) no("Unix(1486057371, 0)", Unix(1486057371, 0)) yes("Now()", Now()) tu := Unix(1486057371, 0) tm := tu SetMono(&tm, 123456) no("tu", tu) yes("tm", tm) no("tu.Add(1)", tu.Add(1)) no("tu.In(UTC)", tu.In(UTC)) no("tu.AddDate(1, 1, 1)", tu.AddDate(1, 1, 1)) no("tu.AddDate(0, 0, 0)", tu.AddDate(0, 0, 0)) no("tu.Local()", tu.Local()) no("tu.UTC()", tu.UTC()) no("tu.Round(2)", tu.Round(2)) no("tu.Truncate(2)", tu.Truncate(2)) yes("tm.Add(1)", tm.Add(1)) no("tm.AddDate(1, 1, 1)", tm.AddDate(1, 1, 1)) no("tm.AddDate(0, 0, 0)", tm.AddDate(0, 0, 0)) yes("tm.In(UTC)", tm.In(UTC)) yes("tm.Local()", tm.Local()) yes("tm.UTC()", tm.UTC()) no("tm.Round(2)", tm.Round(2)) no("tm.Truncate(2)", tm.Truncate(2)) }
explode_data.jsonl/48363
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 647 }
[ 2830, 3393, 10281, 11095, 354, 14011, 26104, 1155, 353, 8840, 836, 8, 341, 197, 9693, 1669, 2915, 34322, 914, 11, 17853, 4120, 8, 341, 197, 743, 2126, 58946, 2099, 5566, 8, 621, 220, 15, 341, 298, 3244, 13080, 4430, 82, 25, 7402, 77...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDateTimeAddReal(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) cases := []struct { sql string result string }{ {`SELECT "1900-01-01 00:00:00" + INTERVAL 1.123456789e3 SECOND;`, "1900-01-01 00:18:43.456789"}, {`SELECT 19000101000000 + INTERVAL 1.123456789e3 SECOND;`, "1900-01-01 00:18:43.456789"}, {`select date("1900-01-01") + interval 1.123456789e3 second;`, "1900-01-01 00:18:43.456789"}, {`SELECT "1900-01-01 00:18:43.456789" - INTERVAL 1.123456789e3 SECOND;`, "1900-01-01 00:00:00"}, {`SELECT 19000101001843.456789 - INTERVAL 1.123456789e3 SECOND;`, "1900-01-01 00:00:00"}, {`select date("1900-01-01") - interval 1.123456789e3 second;`, "1899-12-31 23:41:16.543211"}, {`select 19000101000000 - interval 1.123456789e3 second;`, "1899-12-31 23:41:16.543211"}, } for _, c := range cases { tk.MustQuery(c.sql).Check(testkit.Rows(c.result)) } }
explode_data.jsonl/65486
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 424 }
[ 2830, 3393, 7689, 2212, 12768, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 692, 1444, 2264, 1669, 3056, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_Join_string(t *testing.T) { type args struct { outer Enumerator[string] inner Enumerator[string] outerKeySelector func(string) string innerKeySelector func(string) string resultSelector func(string, string) string } tests := []struct { name string args args want Enumerator[string] }{ {name: "CustomComparer", args: args{ outer: NewOnSlice("ABCxxx", "abcyyy", "defzzz", "ghizzz"), inner: NewOnSlice("000abc", "111gHi", "222333"), outerKeySelector: func(oel string) string { return strings.ToLower(oel[:3]) }, innerKeySelector: func(iel string) string { return strings.ToLower(iel[3:]) }, resultSelector: func(oel, iel string) string { return oel + ":" + iel }, }, want: NewOnSlice("ABCxxx:000abc", "abcyyy:000abc", "ghizzz:111gHi"), }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, _ := Join(tt.args.outer, tt.args.inner, tt.args.outerKeySelector, tt.args.innerKeySelector, tt.args.resultSelector) if !SequenceEqualMust(got, tt.want) { got.Reset() tt.want.Reset() t.Errorf("Join() = '%v', want '%v'", String(got), String(tt.want)) } }) } }
explode_data.jsonl/67374
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 538 }
[ 2830, 3393, 10598, 1961, 3904, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 197, 2676, 310, 76511, 14032, 921, 197, 197, 4382, 310, 76511, 14032, 921, 197, 197, 2676, 1592, 5877, 2915, 3609, 8, 914, 198, 197, 197, 4382, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestListLemMeasure(t *testing.T) { net, objs := networkWithLemMeasureObjects(t, 5) ctx := net.Validators[0].ClientCtx request := func(next []byte, offset, limit uint64, total bool) []string { args := []string{ fmt.Sprintf("--%s=json", tmcli.OutputFlag), } if next == nil { args = append(args, fmt.Sprintf("--%s=%d", flags.FlagOffset, offset)) } else { args = append(args, fmt.Sprintf("--%s=%s", flags.FlagPageKey, next)) } args = append(args, fmt.Sprintf("--%s=%d", flags.FlagLimit, limit)) if total { args = append(args, fmt.Sprintf("--%s", flags.FlagCountTotal)) } return args } t.Run("ByOffset", func(t *testing.T) { step := 2 for i := 0; i < len(objs); i += step { args := request(nil, uint64(i), uint64(step), false) out, err := clitestutil.ExecTestCLICmd(ctx, cli.CmdListLemMeasure(), args) require.NoError(t, err) var resp types.QueryAllLemMeasureResponse require.NoError(t, net.Config.Codec.UnmarshalJSON(out.Bytes(), &resp)) require.LessOrEqual(t, len(resp.LemMeasure), step) require.Subset(t, objs, resp.LemMeasure) } }) t.Run("ByKey", func(t *testing.T) { step := 2 var next []byte for i := 0; i < len(objs); i += step { args := request(next, 0, uint64(step), false) out, err := clitestutil.ExecTestCLICmd(ctx, cli.CmdListLemMeasure(), args) require.NoError(t, err) var resp types.QueryAllLemMeasureResponse require.NoError(t, net.Config.Codec.UnmarshalJSON(out.Bytes(), &resp)) require.LessOrEqual(t, len(resp.LemMeasure), step) require.Subset(t, objs, resp.LemMeasure) next = resp.Pagination.NextKey } }) t.Run("Total", func(t *testing.T) { args := request(nil, 0, uint64(len(objs)), true) out, err := clitestutil.ExecTestCLICmd(ctx, cli.CmdListLemMeasure(), args) require.NoError(t, err) var resp types.QueryAllLemMeasureResponse require.NoError(t, net.Config.Codec.UnmarshalJSON(out.Bytes(), &resp)) require.NoError(t, err) require.Equal(t, len(objs), int(resp.Pagination.Total)) require.Equal(t, objs, resp.LemMeasure) }) }
explode_data.jsonl/44105
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 865 }
[ 2830, 3393, 852, 43, 336, 32236, 1155, 353, 8840, 836, 8, 341, 59486, 11, 52937, 1669, 3922, 2354, 43, 336, 32236, 11543, 1155, 11, 220, 20, 692, 20985, 1669, 4179, 13, 31748, 58, 15, 936, 2959, 23684, 198, 23555, 1669, 2915, 16913, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCompactionAllowZeroSeqNum(t *testing.T) { var d *DB defer func() { if d != nil { require.NoError(t, d.Close()) } }() metaRE := regexp.MustCompile(`^L([0-9]+):([^-]+)-(.+)$`) var fileNum base.FileNum parseMeta := func(s string) (level int, meta *fileMetadata) { match := metaRE.FindStringSubmatch(s) if match == nil { t.Fatalf("malformed table spec: %s", s) } level, err := strconv.Atoi(match[1]) if err != nil { t.Fatalf("malformed table spec: %s: %s", s, err) } fileNum++ meta = &fileMetadata{ FileNum: fileNum, Smallest: InternalKey{UserKey: []byte(match[2])}, Largest: InternalKey{UserKey: []byte(match[3])}, } return level, meta } datadriven.RunTest(t, "testdata/compaction_allow_zero_seqnum", func(td *datadriven.TestData) string { switch td.Cmd { case "define": if d != nil { if err := d.Close(); err != nil { return err.Error() } } var err error if d, err = runDBDefineCmd(td, nil /* options */); err != nil { return err.Error() } d.mu.Lock() s := d.mu.versions.currentVersion().String() d.mu.Unlock() return s case "allow-zero-seqnum": d.mu.Lock() c := &compaction{ cmp: d.cmp, version: d.mu.versions.currentVersion(), inputs: []compactionLevel{{}, {}}, } c.startLevel, c.outputLevel = &c.inputs[0], &c.inputs[1] d.mu.Unlock() var buf bytes.Buffer for _, line := range strings.Split(td.Input, "\n") { parts := strings.Fields(line) if len(parts) == 0 { continue } c.flushing = nil c.startLevel.level = -1 var startFiles, outputFiles []*fileMetadata switch { case len(parts) == 1 && parts[0] == "flush": c.outputLevel.level = 0 d.mu.Lock() c.flushing = d.mu.mem.queue d.mu.Unlock() default: for _, p := range parts { level, meta := parseMeta(p) if c.startLevel.level == -1 { c.startLevel.level = level } switch level { case c.startLevel.level: startFiles = append(startFiles, meta) case c.startLevel.level + 1: outputFiles = append(outputFiles, meta) default: return fmt.Sprintf("invalid level %d: expected %d or %d", level, c.startLevel.level, c.startLevel.level+1) } } c.outputLevel.level = c.startLevel.level + 1 c.startLevel.files = manifest.NewLevelSliceSpecificOrder(startFiles) c.outputLevel.files = manifest.NewLevelSliceKeySorted(c.cmp, outputFiles) } c.smallest, c.largest = manifest.KeyRange(c.cmp, c.startLevel.files.Iter(), c.outputLevel.files.Iter()) c.inuseKeyRanges = nil c.setupInuseKeyRanges() fmt.Fprintf(&buf, "%t\n", c.allowZeroSeqNum()) } return buf.String() default: return fmt.Sprintf("unknown command: %s", td.Cmd) } }) }
explode_data.jsonl/51412
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1371 }
[ 2830, 3393, 13552, 1311, 18605, 17999, 20183, 4651, 1155, 353, 8840, 836, 8, 341, 2405, 294, 353, 3506, 198, 16867, 2915, 368, 341, 197, 743, 294, 961, 2092, 341, 298, 17957, 35699, 1155, 11, 294, 10421, 2398, 197, 197, 532, 197, 6681...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInvalidRequest(t *testing.T) { reqUrl := createUrl("/wd/hub/session") resp, err := http.Post( reqUrl, "text/plain", strings.NewReader("payload"), ) AssertThat(t, err, Is{nil}) AssertThat(t, resp, Code{http.StatusBadRequest}) }
explode_data.jsonl/65292
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 99 }
[ 2830, 3393, 7928, 1900, 1155, 353, 8840, 836, 8, 341, 24395, 2864, 1669, 1855, 2864, 4283, 6377, 7530, 392, 54760, 1138, 34653, 11, 1848, 1669, 1758, 23442, 1006, 197, 24395, 2864, 345, 197, 197, 66251, 36971, 756, 197, 11355, 819, 6858...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCompare(t *testing.T) { got := CompareOrPanic(integer.FromInt64(10).(types.Numeric), integer.FromInt64(20).(types.Numeric)) if got != Less { t.Errorf("CompareOrPanic(10, 20) = %v want %v", got, Less) } }
explode_data.jsonl/65204
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 27374, 1155, 353, 8840, 836, 8, 341, 3174, 354, 1669, 23768, 2195, 47, 31270, 87414, 11439, 1072, 21, 19, 7, 16, 15, 68615, 9242, 2067, 12572, 701, 7546, 11439, 1072, 21, 19, 7, 17, 15, 68615, 9242, 2067, 12572, 1171, 74...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_deployHelper(t *testing.T) { tempdir, helperpath, err := deployHelper() if err != nil { t.Fatal(err) } t.Cleanup(func() { os.RemoveAll(tempdir) }) info, err := os.Stat(tempdir) if err != nil { t.Fatal(err) } if !info.IsDir() { t.Fatalf("expected %s to be a directory", tempdir) } // Verify helper deployment data, err := os.ReadFile(helperpath) if err != nil { t.Fatal(err) } deployed := sha256.Sum256(data) embedded := sha256.Sum256(helper) if !bytes.Equal(deployed[:], embedded[:]) { t.Fatalf("expected sha256 to be the same") } // verify constraint deployment data, err = os.ReadFile(filepath.Join(tempdir, constraintsConfig)) if err != nil { t.Fatal(err) } deployed = sha256.Sum256(data) embedded = sha256.Sum256(constraints) if !bytes.Equal(deployed[:], embedded[:]) { t.Fatalf("expected sha256 to be the same") } }
explode_data.jsonl/67861
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 368 }
[ 2830, 3393, 91890, 5511, 1155, 353, 8840, 836, 8, 341, 16280, 3741, 11, 13137, 2343, 11, 1848, 1669, 10517, 5511, 741, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 630, 3244, 727, 60639, 18552, 368, 341, 197, 25078, 84...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDimensionToMetadataUpdate(t *testing.T) { dimension := types.Dimension{ Name: "my_dimension", Value: "my_dimension_value", Properties: map[string]string{ "this_property_should_be_updated": "with_this_property_value", "this_property_should_be_removed": "", }, Tags: map[string]bool{ "this_tag_should_be_added": true, "this_tag_should_be_removed": false, }, } metadataUpdate := dimensionToMetadataUpdate(dimension) assert.Equal(t, "my_dimension", metadataUpdate.ResourceIDKey) assert.Equal(t, metadata.ResourceID("my_dimension_value"), metadataUpdate.ResourceID) expectedMetadataToUpdate := map[string]string{ "this_property_should_be_updated": "with_this_property_value", } assert.Equal(t, expectedMetadataToUpdate, metadataUpdate.MetadataToUpdate) expectedMetadataToAdd := map[string]string{ "this_tag_should_be_added": "", } assert.Equal(t, expectedMetadataToAdd, metadataUpdate.MetadataToAdd) expectedMetadataToRemove := map[string]string{ "this_property_should_be_removed": "sf_delete_this_property", "this_tag_should_be_removed": "", } assert.Equal(t, expectedMetadataToRemove, metadataUpdate.MetadataToRemove) }
explode_data.jsonl/6185
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 428 }
[ 2830, 3393, 26121, 1249, 14610, 4289, 1155, 353, 8840, 836, 8, 341, 2698, 18161, 1669, 4494, 53234, 515, 197, 21297, 25, 220, 330, 2408, 49619, 756, 197, 47399, 25, 330, 2408, 49619, 3142, 756, 197, 197, 7903, 25, 2415, 14032, 30953, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetNodeSize(t *testing.T) { helper := func(data resourceDataInterface, bundles []Bundle, expectedErrMsg, expectedSize string) { size, err := getNodeSize(data, bundles) if len(expectedErrMsg) > 0 { if err == nil || err.Error() != expectedErrMsg { t.Fatalf("Expect error %s but got %s", expectedErrMsg, err) } } else { if err != nil { t.Fatalf("Expect error to be nil but got %s", err) } if size != expectedSize { t.Fatalf("Expect size %s but got %s", expectedSize, size) } } } data := MockResourceData{ map[string]MockChange{}, } bundles := []Bundle{ { Bundle: "ELASTICSEARCH", Options: &BundleOptions{ DedicatedMasterNodes: nil, MasterNodeSize: "", KibanaNodeSize: "", DataNodeSize: "", }, }, } helper(data, bundles, "[ERROR] 'master_node_size' is required in the bundle option.", "") bundles = []Bundle{ { Bundle: "CASSANDRA", Options: &BundleOptions{ DedicatedMasterNodes: nil, MasterNodeSize: "", KibanaNodeSize: "", DataNodeSize: "", }, }, } data.changes["node_size"] = MockChange{ before: "", after: "t3.small", } helper(&data, bundles, "", "t3.small") bundles = []Bundle{ { Bundle: "Kafka", Options: &BundleOptions{ DedicatedMasterNodes: nil, MasterNodeSize: "", KibanaNodeSize: "", DataNodeSize: "", }, }, } data.changes["node_size"] = MockChange{ before: "", after: "t3.small", } helper(&data, bundles, "", "t3.small") dedicatedMaster := true bundles = []Bundle{ { Bundle: "ELASTICSEARCH", Options: &BundleOptions{ DedicatedMasterNodes: &dedicatedMaster, MasterNodeSize: "t3.small", KibanaNodeSize: "", DataNodeSize: "", }, }, } helper(&data, bundles, "[ERROR] Elasticsearch dedicated master is enabled, 'data_node_size' is required in the bundle option.", "") bundles = []Bundle{ { Bundle: "ELASTICSEARCH", Options: &BundleOptions{ DedicatedMasterNodes: &dedicatedMaster, MasterNodeSize: "t3.small", KibanaNodeSize: "", DataNodeSize: "t3.small-v2", }, }, } helper(&data, bundles, "", "t3.small-v2") dedicatedMaster = false bundles = []Bundle{ { Bundle: "ELASTICSEARCH", Options: &BundleOptions{ DedicatedMasterNodes: &dedicatedMaster, MasterNodeSize: "t3.small", KibanaNodeSize: "", DataNodeSize: "t3.small-v2", }, }, } helper(&data, bundles, "[ERROR] When 'dedicated_master_nodes' is not true , data_node_size can be either null or equal to master_node_size.", "") bundles = []Bundle{ { Bundle: "ELASTICSEARCH", Options: &BundleOptions{ DedicatedMasterNodes: &dedicatedMaster, MasterNodeSize: "t3.small", KibanaNodeSize: "", DataNodeSize: "t3.small", }, }, } helper(&data, bundles, "", "t3.small") }
explode_data.jsonl/3828
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1384 }
[ 2830, 3393, 1949, 1955, 1695, 1155, 353, 8840, 836, 8, 341, 9598, 2947, 1669, 2915, 2592, 5101, 1043, 5051, 11, 48607, 3056, 8409, 11, 3601, 75449, 11, 3601, 1695, 914, 8, 341, 197, 13832, 11, 1848, 1669, 81798, 1695, 2592, 11, 48607,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func Test_SalesReports_Filter_ToQueryParamsMap(t *testing.T) { filter := &SalesReportsFilter{} date, _ := time.Parse("2006-01-02", "2020-05-05") filter.Daily().TypeSales().SubTypeSummary().Version10().SetReportDate(date) qs := make(map[string]interface{}) qs["filter[reportDate]"] = "2020-05-05" qs["filter[reportSubType]"] = string(SalesReportSubTypeSummary) qs["filter[reportType]"] = string(SalesReportTypeSales) qs["filter[frequency]"] = string(SalesReportFrequencyDaily) qs["filter[version]"] = string(SalesReportVersion10) qs["filter[vendorNumber]"] = "" assert.Equal(t, qs, filter.ToQueryParamsMap()) }
explode_data.jsonl/60784
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 216 }
[ 2830, 3393, 1098, 3831, 23748, 68935, 38346, 2859, 4870, 2227, 1155, 353, 8840, 836, 8, 341, 50108, 1669, 609, 35418, 23748, 5632, 16094, 44086, 11, 716, 1669, 882, 8937, 445, 17, 15, 15, 21, 12, 15, 16, 12, 15, 17, 497, 330, 17, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeleteControllerAndExpectations(t *testing.T) { c := clientset.NewForConfigOrDie(&restclient.Config{Host: "", ContentConfig: restclient.ContentConfig{GroupVersion: testapi.Default.GroupVersion()}}) manager := NewReplicationManager(c, controller.NoResyncPeriodFunc, 10, 0) manager.podStoreSynced = alwaysReady rc := newReplicationController(1) manager.rcStore.Store.Add(rc) fakePodControl := controller.FakePodControl{} manager.podControl = &fakePodControl // This should set expectations for the rc manager.syncReplicationController(getKey(rc, t)) validateSyncReplication(t, &fakePodControl, 1, 0) fakePodControl.Clear() // Get the RC key rcKey, err := controller.KeyFunc(rc) if err != nil { t.Errorf("Couldn't get key for object %+v: %v", rc, err) } // This is to simulate a concurrent addPod, that has a handle on the expectations // as the controller deletes it. podExp, exists, err := manager.expectations.GetExpectations(rcKey) if !exists || err != nil { t.Errorf("No expectations found for rc") } manager.rcStore.Delete(rc) manager.syncReplicationController(getKey(rc, t)) if _, exists, err = manager.expectations.GetExpectations(rcKey); exists { t.Errorf("Found expectaions, expected none since the rc has been deleted.") } // This should have no effect, since we've deleted the rc. podExp.Add(-1, 0) manager.podStore.Store.Replace(make([]interface{}, 0), "0") manager.syncReplicationController(getKey(rc, t)) validateSyncReplication(t, &fakePodControl, 0, 0) }
explode_data.jsonl/37495
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 499 }
[ 2830, 3393, 6435, 2051, 3036, 17536, 804, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 2943, 746, 7121, 2461, 2648, 2195, 18175, 2099, 3927, 2972, 10753, 90, 9296, 25, 7342, 8883, 2648, 25, 2732, 2972, 12614, 2648, 90, 2808, 5637, 25, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestTopicPartitions(t *testing.T) { client, err := NewClient(ClientOptions{ URL: "pulsar://localhost:6650", }) assert.Nil(t, err) defer client.Close() // Create topic with 5 partitions err = httpPut("admin/v2/persistent/public/default/TestGetTopicPartitions/partitions", 5) assert.Nil(t, err) partitionedTopic := "persistent://public/default/TestGetTopicPartitions" partitions, err := client.TopicPartitions(partitionedTopic) assert.Nil(t, err) assert.Equal(t, len(partitions), 5) for i := 0; i < 5; i++ { assert.Equal(t, partitions[i], fmt.Sprintf("%s-partition-%d", partitionedTopic, i)) } // Non-Partitioned topic topic := "persistent://public/default/TestGetTopicPartitions-nopartitions" partitions, err = client.TopicPartitions(topic) assert.Nil(t, err) assert.Equal(t, len(partitions), 1) assert.Equal(t, partitions[0], topic) }
explode_data.jsonl/69313
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 324 }
[ 2830, 3393, 26406, 5800, 5930, 1155, 353, 8840, 836, 8, 341, 25291, 11, 1848, 1669, 1532, 2959, 46851, 3798, 515, 197, 79055, 25, 330, 79, 14295, 277, 1110, 8301, 25, 21, 21, 20, 15, 756, 197, 8824, 6948, 59678, 1155, 11, 1848, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestBitMaskString(t *testing.T) { tests := []struct { in CryptographicUsageMask out string }{ { in: CryptographicUsageMaskSign, out: "Sign", }, { in: CryptographicUsageMask(0x00100000), out: "0x00100000", }, { in: CryptographicUsageMaskSign | CryptographicUsageMaskExport, out: "Sign|Export", }, { in: CryptographicUsageMaskSign | CryptographicUsageMaskExport | CryptographicUsageMask(0x00100000), out: "Sign|Export|0x00100000", }, { in: CryptographicUsageMaskSign | CryptographicUsageMaskExport | CryptographicUsageMask(0x00100000) | CryptographicUsageMask(0x00200000), out: "Sign|Export|0x00300000", }, } for _, testcase := range tests { t.Run(testcase.out, func(t *testing.T) { assert.Equal(t, testcase.out, testcase.in.String()) }) } }
explode_data.jsonl/42190
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 331 }
[ 2830, 3393, 8344, 12686, 703, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 17430, 220, 37447, 12679, 14783, 12686, 198, 197, 13967, 914, 198, 197, 59403, 197, 197, 515, 298, 17430, 25, 220, 37447, 12679, 14783, 12686...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInit(t *testing.T) { New([]config.Client{ { Serial: "12FF9503829A3A0DDE9CB87191A472D4", Scopes: []string{"READ", "WRITE"}, }, { Serial: "22FF9503829A3A0DDE9CB87191A472D4", Scopes: []string{"READ"}, }, }) for k, v := range clients { switch k { case 0: if v.Serial != "12FF9503829A3A0DDE9CB87191A472D4" { t.Fatal() } case 1: if v.Serial != "22FF9503829A3A0DDE9CB87191A472D4" { t.Fatal() } } } }
explode_data.jsonl/65811
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 250 }
[ 2830, 3393, 3803, 1155, 353, 8840, 836, 8, 341, 197, 3564, 10556, 1676, 11716, 515, 197, 197, 515, 298, 93658, 25, 330, 16, 17, 1748, 24, 20, 15, 18, 23, 17, 24, 32, 18, 32, 15, 35, 1150, 24, 12979, 23, 22, 16, 24, 16, 32, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestToDNSLinkDNSLabel(t *testing.T) { for _, test := range []struct { in string out string err error }{ {"dnslink.long-name.example.com", "dnslink-long--name-example-com", nil}, {"dnslink.too-long.f1siqrebi3vir8sab33hu5vcy008djegvay6atmz91ojesyjs8lx350b7y7i1nvyw2haytfukfyu2f2x4tocdrfa0zgij6p4zpl4u5o.example.com", "", errors.New("DNSLink representation incompatible with DNS label length limit of 63: dnslink-too--long-f1siqrebi3vir8sab33hu5vcy008djegvay6atmz91ojesyjs8lx350b7y7i1nvyw2haytfukfyu2f2x4tocdrfa0zgij6p4zpl4u5o-example-com")}, } { out, err := toDNSLinkDNSLabel(test.in) if out != test.out || !equalError(err, test.err) { t.Errorf("(%s) returned (%s, %v), expected (%s, %v)", test.in, out, err, test.out, test.err) } } }
explode_data.jsonl/26659
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 364 }
[ 2830, 3393, 1249, 61088, 3939, 61088, 2476, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 3056, 1235, 341, 197, 17430, 220, 914, 198, 197, 13967, 914, 198, 197, 9859, 1465, 198, 197, 59403, 197, 197, 4913, 45226, 2080, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDefaultSize(t *testing.T) { jaeger := v1.NewJaeger(types.NamespacedName{Name: "my-instance"}) collector := NewCollector(jaeger) dep := collector.Get() assert.Nil(t, dep.Spec.Replicas) // we let Kubernetes define the default }
explode_data.jsonl/59516
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 89 }
[ 2830, 3393, 3675, 1695, 1155, 353, 8840, 836, 8, 341, 197, 5580, 1878, 1669, 348, 16, 7121, 52445, 1878, 52613, 98932, 68552, 675, 63121, 25, 330, 2408, 73655, 1, 8824, 1444, 24605, 269, 1669, 1532, 53694, 3325, 64, 1878, 340, 197, 14...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSocketPVVoteResetDeadline(t *testing.T) { for _, tc := range socketTestCases(t) { func() { var ( chainID = cmn.RandStr(12) validatorEndpoint, serviceEndpoint = testSetupSocketPair( t, chainID, types.NewMockPV(), tc.addr, tc.dialer) ts = time.Now() vType = types.PrecommitType want = &types.Vote{Timestamp: ts, Type: vType} have = &types.Vote{Timestamp: ts, Type: vType} ) defer validatorEndpoint.Stop() defer serviceEndpoint.Stop() time.Sleep(testTimeoutReadWrite2o3) require.NoError(t, serviceEndpoint.privVal.SignVote(chainID, want)) require.NoError(t, validatorEndpoint.SignVote(chainID, have)) assert.Equal(t, want.Signature, have.Signature) // This would exceed the deadline if it was not extended by the previous message time.Sleep(testTimeoutReadWrite2o3) require.NoError(t, serviceEndpoint.privVal.SignVote(chainID, want)) require.NoError(t, validatorEndpoint.SignVote(chainID, have)) assert.Equal(t, want.Signature, have.Signature) }() } }
explode_data.jsonl/77982
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 467 }
[ 2830, 3393, 10286, 48469, 41412, 14828, 83593, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 7575, 2271, 37302, 1155, 8, 341, 197, 29244, 368, 341, 298, 2405, 2399, 571, 197, 8819, 915, 999, 284, 9961, 77, 2013, 437, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCheckIfDockerIsRunning2(t *testing.T) { // Mock functions isDockerRunning = func() bool { return false } logErrorCallCount := 0 logError = func(message string, exit bool) { logErrorCallCount++ assert.Equal(t, "Docker engine is not running. Please start it and execute Compose Generator again.", message) assert.True(t, exit) } // Execute test EnsureDockerIsRunning() // Assert assert.Equal(t, 1, logErrorCallCount) }
explode_data.jsonl/67934
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 3973, 2679, 35, 13659, 3872, 18990, 17, 1155, 353, 8840, 836, 8, 341, 197, 322, 14563, 5746, 198, 19907, 35, 13659, 18990, 284, 2915, 368, 1807, 341, 197, 853, 895, 198, 197, 532, 6725, 1454, 7220, 2507, 1669, 220, 15, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReceiversBuilder_StopAll(t *testing.T) { receivers := make(Receivers) rcvCfg := &configmodels.ReceiverSettings{} receiver := &componenttest.ExampleReceiverProducer{} receivers[rcvCfg] = &builtReceiver{ logger: zap.NewNop(), receiver: receiver, } assert.Equal(t, false, receiver.Stopped) assert.NoError(t, receivers.ShutdownAll(context.Background())) assert.Equal(t, true, receiver.Stopped) }
explode_data.jsonl/50906
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 158 }
[ 2830, 3393, 693, 346, 1945, 3297, 80308, 2403, 1155, 353, 8840, 836, 8, 341, 17200, 346, 1945, 1669, 1281, 7, 693, 346, 1945, 340, 30295, 85, 42467, 1669, 609, 1676, 6507, 2817, 12862, 6086, 31483, 17200, 12862, 1669, 609, 8571, 1944, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDecodeExtensions(t *testing.T) { ex := `version https://git-lfs.github.com/spec/v1 ext-0-foo sha256:ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff ext-1-bar sha256:bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb ext-2-baz sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa oid sha256:4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393 size 12345` p, err := DecodePointer(bytes.NewBufferString(ex)) assertEqualWithExample(t, ex, nil, err) assertEqualWithExample(t, ex, latest, p.Version) assertEqualWithExample(t, ex, "4d7a214614ab2935c943f9e0ff69d22eadbb8f32b1258daaa5e2ca24d17e2393", p.Oid) assertEqualWithExample(t, ex, int64(12345), p.Size) assertEqualWithExample(t, ex, "sha256", p.OidType) assertEqualWithExample(t, ex, "foo", p.Extensions[0].Name) assertEqualWithExample(t, ex, 0, p.Extensions[0].Priority) assertEqualWithExample(t, ex, "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", p.Extensions[0].Oid) assertEqualWithExample(t, ex, "sha256", p.Extensions[0].OidType) assertEqualWithExample(t, ex, "bar", p.Extensions[1].Name) assertEqualWithExample(t, ex, 1, p.Extensions[1].Priority) assertEqualWithExample(t, ex, "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb", p.Extensions[1].Oid) assertEqualWithExample(t, ex, "sha256", p.Extensions[1].OidType) assertEqualWithExample(t, ex, "baz", p.Extensions[2].Name) assertEqualWithExample(t, ex, 2, p.Extensions[2].Priority) assertEqualWithExample(t, ex, "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", p.Extensions[2].Oid) assertEqualWithExample(t, ex, "sha256", p.Extensions[2].OidType) }
explode_data.jsonl/50437
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 703 }
[ 2830, 3393, 32564, 31282, 1155, 353, 8840, 836, 8, 341, 8122, 1669, 1565, 4366, 3703, 1110, 12882, 70162, 11021, 905, 45389, 5457, 16, 198, 427, 12, 15, 12, 7975, 15870, 17, 20, 21, 25, 53697, 53697, 53697, 53697, 53697, 53697, 53697, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDockerConfigRawConfig(t *testing.T) { rawConfigInput := dockercontainer.Config{ Hostname: "hostname", Domainname: "domainname", NetworkDisabled: true, WorkingDir: "workdir", User: "user", } rawConfig, err := json.Marshal(&rawConfigInput) if err != nil { t.Fatal(err) } testTask := &Task{ Arn: "arn:aws:ecs:us-east-1:012345678910:task/c09f0188-7f87-4b0f-bfc3-16296622b6fe", Family: "myFamily", Version: "1", Containers: []*apicontainer.Container{ { Name: "c1", DockerConfig: apicontainer.DockerConfig{ Config: strptr(string(rawConfig)), }, }, }, } config, configErr := testTask.DockerConfig(testTask.Containers[0], defaultDockerClientAPIVersion) if configErr != nil { t.Fatal(configErr) } expectedOutput := rawConfigInput assertSetStructFieldsEqual(t, expectedOutput, *config) }
explode_data.jsonl/37188
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 394 }
[ 2830, 3393, 35, 13659, 2648, 20015, 2648, 1155, 353, 8840, 836, 8, 341, 76559, 2648, 2505, 1669, 26588, 3586, 10753, 515, 197, 197, 88839, 25, 286, 330, 27806, 756, 197, 10957, 3121, 606, 25, 414, 330, 12204, 606, 756, 197, 197, 12320...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSelect_NamesInSelect(t *testing.T) { s := MustParseSelectStatement("select count(asdf), count(bar) from cpu") a := s.NamesInSelect() if !reflect.DeepEqual(a, []string{"asdf", "bar"}) { t.Fatal("expected names asdf and bar") } }
explode_data.jsonl/24806
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 91 }
[ 2830, 3393, 3379, 1604, 971, 641, 3379, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 15465, 14463, 3379, 8636, 445, 1742, 1760, 36951, 2940, 701, 1760, 54630, 8, 504, 17319, 1138, 11323, 1669, 274, 98932, 641, 3379, 741, 743, 753, 34913, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestLoadDefaultKubeConfig(t *testing.T) { if _, defined := os.LookupEnv("TEST_K8S"); !defined { t.SkipNow() return } config, err := Load("") assert.NoError(t, err) assert.NotNil(t, config) }
explode_data.jsonl/66165
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 5879, 3675, 42, 3760, 2648, 1155, 353, 8840, 836, 8, 341, 743, 8358, 4512, 1669, 2643, 79261, 14359, 445, 10033, 10102, 23, 50, 5038, 753, 9711, 341, 197, 3244, 57776, 7039, 741, 197, 853, 198, 197, 630, 25873, 11, 1848, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestGetReservedPorts(t *testing.T) { configMap := &v1.ConfigMap{ ObjectMeta: metav1.ObjectMeta{ Name: "hbase-goosefs-values", Namespace: "fluid", }, Data: map[string]string{ "data": valuesConfigMapData, }, } dataSets := []*v1alpha1.Dataset{ { ObjectMeta: metav1.ObjectMeta{ Name: "hbase", Namespace: "fluid", }, Status: v1alpha1.DatasetStatus{ Runtimes: []v1alpha1.Runtime{ { Name: "hbase", Namespace: "fluid", Type: "goosefs", }, }, }, }, { ObjectMeta: metav1.ObjectMeta{ Name: "no-runtime", Namespace: "fluid", }, }, { ObjectMeta: metav1.ObjectMeta{ Name: "runtime-type", Namespace: "fluid", }, Status: v1alpha1.DatasetStatus{ Runtimes: []v1alpha1.Runtime{ { Type: "not-goosefs", }, }, }, }, { ObjectMeta: metav1.ObjectMeta{ Name: "no-map", Namespace: "fluid", }, Status: v1alpha1.DatasetStatus{ Runtimes: []v1alpha1.Runtime{ { Type: "goosefs", }, }, }, }, } runtimeObjs := []runtime.Object{} runtimeObjs = append(runtimeObjs, configMap) for _, dataSet := range dataSets { runtimeObjs = append(runtimeObjs, dataSet.DeepCopy()) } fakeClient := fake.NewFakeClientWithScheme(testScheme, runtimeObjs...) wantPorts := []int{20000, 20001, 20002, 20003, 20004, 20005, 20006, 20007, 20008} ports, err := GetReservedPorts(fakeClient) if err != nil { t.Errorf("GetReservedPorts failed.") } if !reflect.DeepEqual(ports, wantPorts) { t.Errorf("gotPorts = %v, want %v", ports, wantPorts) } }
explode_data.jsonl/66524
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 812 }
[ 2830, 3393, 1949, 53003, 68273, 1155, 353, 8840, 836, 8, 341, 25873, 2227, 1669, 609, 85, 16, 10753, 2227, 515, 197, 23816, 12175, 25, 77520, 16, 80222, 515, 298, 21297, 25, 414, 330, 71, 3152, 2371, 13752, 3848, 69050, 756, 298, 9082...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestPrepare(t *testing.T) { nodeUUID := "33ce8659-7400-4c68-9535-d10766f07a58" cases := []struct { name string ironic *testserver.IronicMock unprepared bool expectedStarted bool expectedDirty bool expectedError bool expectedRequestAfter int }{ { name: "manageable state(haven't clean steps)", ironic: testserver.NewIronic(t).WithDefaultResponses().Node(nodes.Node{ ProvisionState: string(nodes.Manageable), UUID: nodeUUID, }), unprepared: true, expectedStarted: false, expectedRequestAfter: 0, expectedDirty: false, }, // TODO: ADD test case when clean steps aren't empty // { // name: "manageable state(have clean steps)", // ironic: testserver.NewIronic(t).WithDefaultResponses().Node(nodes.Node{ // ProvisionState: string(nodes.Manageable), // UUID: nodeUUID, // }), // unprepared: true, // expectedStarted: true, // expectedRequestAfter: 10, // expectedDirty: true, // }, { name: "cleanFail state(cleaned provision settings)", ironic: testserver.NewIronic(t).WithDefaultResponses().Node(nodes.Node{ ProvisionState: string(nodes.CleanFail), UUID: nodeUUID, }), expectedStarted: false, expectedRequestAfter: 0, expectedDirty: false, }, { name: "cleanFail state(set ironic host to manageable)", ironic: testserver.NewIronic(t).WithDefaultResponses().Node(nodes.Node{ ProvisionState: string(nodes.CleanFail), UUID: nodeUUID, }), unprepared: true, expectedStarted: false, expectedRequestAfter: 10, expectedDirty: true, }, { name: "cleaning state", ironic: testserver.NewIronic(t).WithDefaultResponses().Node(nodes.Node{ ProvisionState: string(nodes.Cleaning), UUID: nodeUUID, }), expectedStarted: false, expectedRequestAfter: 10, expectedDirty: true, }, { name: "cleanWait state", ironic: testserver.NewIronic(t).WithDefaultResponses().Node(nodes.Node{ ProvisionState: string(nodes.CleanWait), UUID: nodeUUID, }), expectedStarted: false, expectedRequestAfter: 10, expectedDirty: true, }, { name: "manageable state(manual clean finished)", ironic: testserver.NewIronic(t).WithDefaultResponses().Node(nodes.Node{ ProvisionState: string(nodes.Manageable), UUID: nodeUUID, }), expectedStarted: false, expectedRequestAfter: 0, expectedDirty: false, }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { if tc.ironic != nil { tc.ironic.Start() defer tc.ironic.Stop() } inspector := testserver.NewInspector(t).Ready().WithIntrospection(nodeUUID, introspection.Introspection{ Finished: false, }) inspector.Start() defer inspector.Stop() host := makeHost() publisher := func(reason, message string) {} auth := clients.AuthConfig{Type: clients.NoAuth} prov, err := newProvisionerWithSettings(host, bmc.Credentials{}, publisher, tc.ironic.Endpoint(), auth, inspector.Endpoint(), auth, ) if err != nil { t.Fatalf("could not create provisioner: %s", err) } prov.status.ID = nodeUUID result, started, err := prov.Prepare(tc.unprepared) assert.Equal(t, tc.expectedStarted, started) assert.Equal(t, tc.expectedDirty, result.Dirty) assert.Equal(t, time.Second*time.Duration(tc.expectedRequestAfter), result.RequeueAfter) if !tc.expectedError { assert.NoError(t, err) } else { assert.Error(t, err) } }) } }
explode_data.jsonl/48857
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1678 }
[ 2830, 3393, 50590, 1155, 353, 8840, 836, 8, 341, 20831, 24754, 1669, 330, 18, 18, 346, 23, 21, 20, 24, 12, 22, 19, 15, 15, 12, 19, 66, 21, 23, 12, 24, 20, 18, 20, 1737, 16, 15, 22, 21, 21, 69, 15, 22, 64, 20, 23, 698, 14...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPathCreatePath(t *testing.T) { peerP := PathCreatePeer() msg := updateMsgP1() updateMsgP := msg.Body.(*bgp.BGPUpdate) nlriList := updateMsgP.NLRI pathAttributes := updateMsgP.PathAttributes nlri_info := nlriList[0] path := NewPath(peerP[0], nlri_info, false, pathAttributes, time.Now(), false) assert.NotNil(t, path) }
explode_data.jsonl/57483
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 131 }
[ 2830, 3393, 1820, 4021, 1820, 1155, 353, 8840, 836, 8, 341, 197, 16537, 47, 1669, 7933, 4021, 30888, 741, 21169, 1669, 2647, 6611, 47, 16, 741, 27175, 6611, 47, 1669, 3750, 20934, 41399, 12220, 79, 1785, 24430, 4289, 340, 9038, 75, 46...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWhiterectangledetector_containsBlackPoint(t *testing.T) { img, _ := gozxing.NewBitMatrix(20, 20) img.Set(10, 10) detector, _ := NewWhiteRectangleDetectorFromImage(img) if !detector.containsBlackPoint(5, 15, 10, true) { t.Fatalf("containsBlackPoint(5, 15, 10, true) must be true") } if !detector.containsBlackPoint(5, 15, 10, false) { t.Fatalf("containsBlackPoint(5, 15, 10, false) must be true") } if detector.containsBlackPoint(5, 15, 5, true) { t.Fatalf("containsBlackPoint(5, 15, 5, true) must be false") } }
explode_data.jsonl/49100
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 214 }
[ 2830, 3393, 1639, 2015, 439, 38940, 295, 1256, 63598, 14417, 2609, 1155, 353, 8840, 836, 8, 341, 39162, 11, 716, 1669, 728, 66700, 287, 7121, 8344, 6689, 7, 17, 15, 11, 220, 17, 15, 340, 39162, 4202, 7, 16, 15, 11, 220, 16, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestIterateQueueJobs(t *testing.T) { withRepository(func(r *RedisJobRepository) { addedJobs := []*api.Job{} for i := 0; i < 10; i++ { addedJobs = append(addedJobs, addTestJob(t, r, "q1")) } iteratedJobs := []*api.Job{} err := r.IterateQueueJobs("q1", func(j *api.Job) { iteratedJobs = append(iteratedJobs, j) }) assert.Nil(t, err) for i, j := range addedJobs { assert.Equal(t, j.Id, iteratedJobs[i].Id) } }) }
explode_data.jsonl/32068
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 211 }
[ 2830, 3393, 8537, 349, 7554, 40667, 1155, 353, 8840, 836, 8, 341, 46948, 4624, 18552, 2601, 353, 48137, 12245, 4624, 8, 341, 197, 12718, 291, 40667, 1669, 29838, 2068, 45293, 16094, 197, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReverseByte(t *testing.T) { type args struct { s []byte } tests := []struct { name string args args want []byte }{ {name: "testcase1", args: args{s: []byte{1, 2, 3, 4, 5}}, want: []byte{5, 4, 3, 2, 1}}, {name: "testcase2", args: args{s: []byte{}}, want: []byte{}}, {name: "testcase3", args: args{s: []byte{1, 2, 3, 4, 5, 6}}, want: []byte{6, 5, 4, 3, 2, 1}}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { ReverseByte(tt.args.s) if reflect.DeepEqual(tt.args.s, tt.want) != true { t.Errorf("ReverseByte() got = %v, want %v", tt.args.s, tt.want) } }) } }
explode_data.jsonl/73908
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 306 }
[ 2830, 3393, 45695, 7153, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 1903, 3056, 3782, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197, 50780, 3056, 3782, 198, 197, 59403, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDatabaseWriteTaggedBatchNoNamespace(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() d, mapCh, _ := newTestDatabase(t, ctrl, BootstrapNotStarted) defer func() { close(mapCh) }() require.NoError(t, d.Open()) var ( notExistNamespace = ident.StringID("not-exist-namespace") batchSize = 100 ) _, err := d.BatchWriter(notExistNamespace, batchSize) require.Error(t, err) err = d.WriteTaggedBatch(nil, notExistNamespace, nil, nil) require.Error(t, err) require.NoError(t, d.Close()) }
explode_data.jsonl/46541
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 220 }
[ 2830, 3393, 5988, 7985, 5668, 3556, 21074, 2753, 22699, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 2698, 11, 2415, 1143, 11, 716, 1669, 501, 2271, 5988, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestElasticBoolQuery(t *testing.T) { for _, test := range elasticCases { t.Run(test.name, func(t *testing.T) { runToElasticBoolQueryTestCase(t, test) }) } }
explode_data.jsonl/58005
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 74 }
[ 2830, 3393, 36, 51179, 11233, 2859, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 35473, 37302, 341, 197, 3244, 16708, 8623, 2644, 11, 2915, 1155, 353, 8840, 836, 8, 341, 298, 56742, 1249, 36, 51179, 11233, 2859, 16458, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestNibbleN(t *testing.T) { for n, test := range nnTestEntries { vm := &MinFuckVM{Code: test.code} for i, r := range test.read { g, err := vm.nibbleN(uint32(r)) if err != nil { if !test.err { t.Errorf("Test #%d failed: VM should not return error while nibbleN(): %v", n+1, err) } return } if !bytes.Equal(test.gets[i], g) { t.Errorf("Test #%d failed: result mismatch\ngot %s\nexpected %s", n+1, hex.EncodeToString(g), hex.EncodeToString(test.gets[i])) return } } } }
explode_data.jsonl/5841
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 248 }
[ 2830, 3393, 29125, 891, 45, 1155, 353, 8840, 836, 8, 341, 2023, 308, 11, 1273, 1669, 2088, 10883, 2271, 24533, 341, 197, 54879, 1669, 609, 6217, 76374, 11187, 90, 2078, 25, 1273, 10210, 532, 197, 2023, 600, 11, 435, 1669, 2088, 1273, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestDecodingLayerParserFullTCPPacket(t *testing.T) { dlp := gopacket.NewDecodingLayerParser(LayerTypeEthernet, &Ethernet{}, &IPv4{}, &TCP{}, &gopacket.Payload{}) decoded := make([]gopacket.LayerType, 1) err := dlp.DecodeLayers(testSimpleTCPPacket, &decoded) if err != nil { t.Error("Error from dlp parser: ", err) } if len(decoded) != 4 { t.Error("Expected 4 layers parsed, instead got ", len(decoded)) } }
explode_data.jsonl/42259
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 167 }
[ 2830, 3393, 4900, 3700, 9188, 6570, 9432, 7749, 4406, 5709, 1155, 353, 8840, 836, 8, 341, 2698, 13545, 1669, 342, 453, 5709, 7121, 4900, 3700, 9188, 6570, 4957, 1135, 929, 98006, 11, 609, 98006, 22655, 609, 58056, 19, 22655, 609, 49896,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGenerateRouter(t *testing.T) { router := NewRouter() router.AddChild([]string{"p1", "p2", "p3"}, &ApiGroup{ Post: &Api{ Description: "Hello World", }, }) assert.Equal(t, router.Children["p1"].Path, "p1") assert.Equal(t, router.Children["p1"].Children["p2"].Path, "p2") assert.Equal(t, router.Children["p1"].Children["p2"].Children["p3"].Path, "p3") assert.Equal(t, router.Children["p1"].Children["p2"].Children["p3"].Api.Post.Description, "Hello World") }
explode_data.jsonl/71587
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 205 }
[ 2830, 3393, 31115, 9523, 1155, 353, 8840, 836, 8, 341, 67009, 1669, 1532, 9523, 741, 67009, 1904, 3652, 10556, 917, 4913, 79, 16, 497, 330, 79, 17, 497, 330, 79, 18, 14345, 609, 6563, 2808, 515, 197, 197, 4133, 25, 609, 6563, 515, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIssue1223(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists testjson") tk.MustExec("CREATE TABLE testjson (j json DEFAULT NULL) ENGINE=InnoDB DEFAULT CHARSET=utf8;") tk.MustExec(`INSERT INTO testjson SET j='{"test":3}';`) tk.MustExec(`INSERT INTO testjson SET j='{"test":0}';`) tk.MustExec(`insert into testjson set j='{"test":"0"}';`) tk.MustExec(`insert into testjson set j='{"test":0.0}';`) tk.MustExec(`INSERT INTO testjson SET j='{"test":"aaabbb"}';`) tk.MustExec(`INSERT INTO testjson SET j='{"test":3.1415}';`) tk.MustExec(`INSERT INTO testjson SET j='{"test":[]}';`) tk.MustExec(`INSERT INTO testjson SET j='{"test":[1,2]}';`) tk.MustExec(`INSERT INTO testjson SET j='{"test":["b","c"]}';`) tk.MustExec(`INSERT INTO testjson SET j='{"test":{"ke":"val"}}';`) tk.MustExec(`insert into testjson set j='{"test":"2015-07-27 09:43:47"}';`) tk.MustExec(`insert into testjson set j='{"test":"0000-00-00 00:00:00"}';`) tk.MustExec(`insert into testjson set j='{"test":"0778"}';`) tk.MustExec(`insert into testjson set j='{"test":"0000"}';`) tk.MustExec(`insert into testjson set j='{"test":null}';`) tk.MustExec(`insert into testjson set j=null;`) tk.MustExec(`insert into testjson set j='{"test":[null]}';`) tk.MustExec(`insert into testjson set j='{"test":true}';`) tk.MustExec(`insert into testjson set j='{"test":false}';`) tk.MustExec(`insert into testjson set j='""';`) tk.MustExec(`insert into testjson set j='null';`) tk.MustExec(`insert into testjson set j='0';`) tk.MustExec(`insert into testjson set j='"0"';`) tk.MustQuery("SELECT * FROM testjson WHERE JSON_EXTRACT(j,'$.test');").Check(testkit.Rows(`{"test": 3}`, `{"test": "0"}`, `{"test": "aaabbb"}`, `{"test": 3.1415}`, `{"test": []}`, `{"test": [1, 2]}`, `{"test": ["b", "c"]}`, `{"test": {"ke": "val"}}`, `{"test": "2015-07-27 09:43:47"}`, `{"test": "0000-00-00 00:00:00"}`, `{"test": "0778"}`, `{"test": "0000"}`, `{"test": null}`, `{"test": [null]}`, `{"test": true}`, `{"test": false}`)) tk.MustQuery("select * from testjson where j;").Check(testkit.Rows(`{"test": 3}`, `{"test": 0}`, `{"test": "0"}`, `{"test": 0}`, `{"test": "aaabbb"}`, `{"test": 3.1415}`, `{"test": []}`, `{"test": [1, 2]}`, `{"test": ["b", "c"]}`, `{"test": {"ke": "val"}}`, `{"test": "2015-07-27 09:43:47"}`, `{"test": "0000-00-00 00:00:00"}`, `{"test": "0778"}`, `{"test": "0000"}`, `{"test": null}`, `{"test": [null]}`, `{"test": true}`, `{"test": false}`, `""`, "null", `"0"`)) tk.MustExec("insert into mysql.expr_pushdown_blacklist values('json_extract','tikv','');") tk.MustExec("admin reload expr_pushdown_blacklist;") tk.MustQuery("SELECT * FROM testjson WHERE JSON_EXTRACT(j,'$.test');").Check(testkit.Rows("{\"test\": 3}", "{\"test\": \"0\"}", "{\"test\": \"aaabbb\"}", "{\"test\": 3.1415}", "{\"test\": []}", "{\"test\": [1, 2]}", "{\"test\": [\"b\", \"c\"]}", "{\"test\": {\"ke\": \"val\"}}", "{\"test\": \"2015-07-27 09:43:47\"}", "{\"test\": \"0000-00-00 00:00:00\"}", "{\"test\": \"0778\"}", "{\"test\": \"0000\"}", "{\"test\": null}", "{\"test\": [null]}", "{\"test\": true}", "{\"test\": false}")) tk.MustQuery("select * from testjson where j;").Check(testkit.Rows(`{"test": 3}`, `{"test": 0}`, `{"test": "0"}`, `{"test": 0}`, `{"test": "aaabbb"}`, `{"test": 3.1415}`, `{"test": []}`, `{"test": [1, 2]}`, `{"test": ["b", "c"]}`, `{"test": {"ke": "val"}}`, `{"test": "2015-07-27 09:43:47"}`, `{"test": "0000-00-00 00:00:00"}`, `{"test": "0778"}`, `{"test": "0000"}`, `{"test": null}`, `{"test": [null]}`, `{"test": true}`, `{"test": false}`, `""`, "null", `"0"`)) }
explode_data.jsonl/65520
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1564 }
[ 2830, 3393, 42006, 16, 17, 17, 18, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 340, 3244, 74, 50463, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCachedConn_QueryRowIndex_HasCache(t *testing.T) { resetStats() r, clean, err := redistest.CreateRedis() assert.Nil(t, err) defer clean() c := NewNodeConn(dummySqlConn{}, r, cache.WithExpiry(time.Second*10), cache.WithNotFoundExpiry(time.Second)) var str string r.Set("index", `"primary"`) err = c.QueryRowIndex(&str, "index", func(s interface{}) string { return fmt.Sprintf("%s/1234", s) }, func(conn sqlx.SqlConn, v interface{}) (interface{}, error) { assert.Fail(t, "should not go here") return "primary", nil }, func(conn sqlx.SqlConn, v, primary interface{}) error { *v.(*string) = "xin" assert.Equal(t, "primary", primary) return nil }) assert.Nil(t, err) assert.Equal(t, "xin", str) val, err := r.Get("index") assert.Nil(t, err) assert.Equal(t, `"primary"`, val) val, err = r.Get("primary/1234") assert.Nil(t, err) assert.Equal(t, `"xin"`, val) }
explode_data.jsonl/64123
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 377 }
[ 2830, 3393, 70293, 9701, 48042, 68919, 2039, 300, 8233, 1155, 353, 8840, 836, 8, 341, 70343, 16635, 741, 7000, 11, 4240, 11, 1848, 1669, 2518, 380, 477, 7251, 48137, 741, 6948, 59678, 1155, 11, 1848, 340, 16867, 4240, 2822, 1444, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDay4(t *testing.T) { fDemo, err := os.Open("testdata/day4_demo.txt") if err != nil { t.Fatalf("got error: %v\n", err) } fReal, err := os.Open("testdata/day4_task1.txt") if err != nil { t.Fatalf("got error: %v\n", err) } tests := []struct { reader io.Reader fieldsExists int valid int }{ { reader: fDemo, fieldsExists: 2, valid: 2, }, { reader: fReal, fieldsExists: 247, valid: 145, }, } for i, tc := range tests { t.Run(fmt.Sprintf("[%d] %d", i, tc.fieldsExists), func(t *testing.T) { documents := ScanDocuments(tc.reader) fieldsExists := 0 valid := 0 for _, document := range documents { if document.MissingFields() == nil { fieldsExists++ } if document.Valid() { valid++ } } if tc.fieldsExists != fieldsExists { t.Errorf("expected %d documents with the correct fields, found %d", tc.fieldsExists, fieldsExists) } if tc.valid != valid { t.Errorf("expected %d valid documents, found %d", tc.valid, valid) } }) } }
explode_data.jsonl/75618
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 509 }
[ 2830, 3393, 10159, 19, 1155, 353, 8840, 836, 8, 341, 1166, 37413, 11, 1848, 1669, 2643, 12953, 445, 92425, 44739, 19, 47830, 3909, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 22390, 1465, 25, 1018, 85, 1699, 497, 1848, 340...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestOperatorSemver(t *testing.T) { testCases := []checkTestCase{ { ctx: &context.Context{Properties: map[string]string{"v": "1.2.3"}}, constraints: []api.Constraint{{ContextName: "v", Operator: "SEMVER_EQ", Value: "3.2.1"}}, expected: false, }, { ctx: &context.Context{Properties: map[string]string{"v": "1.2.3"}}, constraints: []api.Constraint{{ContextName: "v", Operator: "SEMVER_EQ", Value: "1.2.3"}}, expected: true, }, { ctx: &context.Context{Properties: map[string]string{"v": "1.2.3"}}, constraints: []api.Constraint{{ContextName: "v", Operator: "SEMVER_EQ", Value: "v1.2.3"}}, expected: true, }, { ctx: &context.Context{Properties: map[string]string{"v": "v1.2.3"}}, constraints: []api.Constraint{{ContextName: "v", Operator: "SEMVER_EQ", Value: "v3.2.1"}}, expected: false, }, { ctx: &context.Context{Properties: map[string]string{"v": "v1.2.3"}}, constraints: []api.Constraint{{ContextName: "v", Operator: "SEMVER_EQ", Value: "v1.2.3"}}, expected: true, }, { ctx: &context.Context{Properties: map[string]string{"v": "v1.2.3"}}, constraints: []api.Constraint{{ContextName: "v", Operator: "SEMVER_EQ", Value: "v1.2.3"}}, expected: true, }, { ctx: &context.Context{Properties: map[string]string{"v": "v1.2.3"}}, constraints: []api.Constraint{{ContextName: "v", Operator: "SEMVER_LT", Value: "v1.2.3"}}, expected: false, }, { ctx: &context.Context{Properties: map[string]string{"v": "v3.2.1"}}, constraints: []api.Constraint{{ContextName: "v", Operator: "SEMVER_LT", Value: "v1.2.3"}}, expected: false, }, { ctx: &context.Context{Properties: map[string]string{"v": "v1.2.3"}}, constraints: []api.Constraint{{ContextName: "v", Operator: "SEMVER_LT", Value: "v3.2.1"}}, expected: true, }, { ctx: &context.Context{Properties: map[string]string{"v": "v3.2.1"}}, constraints: []api.Constraint{{ContextName: "v", Operator: "SEMVER_GT", Value: "v1.2.3"}}, expected: true, }, { ctx: &context.Context{Properties: map[string]string{"v": "v1.2.3"}}, constraints: []api.Constraint{{ContextName: "v", Operator: "SEMVER_GT", Value: "v3.2.1"}}, expected: false, }, } for _, tc := range testCases { if ok, err := Check(tc.ctx, tc.constraints); err != nil { t.Fatal(err) } else { assert.Equal(t, tc.expected, ok) } } }
explode_data.jsonl/67492
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1197 }
[ 2830, 3393, 18461, 29499, 423, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 2028, 16458, 515, 197, 197, 515, 298, 20985, 25, 260, 609, 2147, 9328, 90, 7903, 25, 2415, 14032, 30953, 4913, 85, 788, 330, 16, 13, 17, 13, 18, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPrimitivePutInt(t *testing.T) { client := newPrimitiveClient() a, b := int32(-1), int32(2) result, err := client.PutInt(context.Background(), IntWrapper{Field1: &a, Field2: &b}, nil) if err != nil { t.Fatalf("PutInt: %v", err) } if s := result.RawResponse.StatusCode; s != http.StatusOK { t.Fatalf("unexpected status code %d", s) } }
explode_data.jsonl/61668
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 140 }
[ 2830, 3393, 33313, 19103, 1072, 1155, 353, 8840, 836, 8, 341, 25291, 1669, 501, 33313, 2959, 741, 11323, 11, 293, 1669, 526, 18, 17, 4080, 16, 701, 526, 18, 17, 7, 17, 340, 9559, 11, 1848, 1669, 2943, 39825, 1072, 5378, 19047, 1507,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestIDDriverScanError(t *testing.T) { id := ID{} err := id.Scan(0) assert.EqualError(t, err, "xid: scanning unsupported type: int") err = id.Scan("0") assert.EqualError(t, err, strInvalidID) }
explode_data.jsonl/58928
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 81 }
[ 2830, 3393, 915, 11349, 26570, 1454, 1155, 353, 8840, 836, 8, 341, 15710, 1669, 3034, 16094, 9859, 1669, 877, 54874, 7, 15, 340, 6948, 12808, 1454, 1155, 11, 1848, 11, 330, 87, 307, 25, 35101, 40409, 943, 25, 526, 1138, 9859, 284, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func Test_UpdateBatch(t *testing.T) { helper := sysutil.NewFileTestUtil(t) helper.CreateCgroupFile("/", sysutil.CPUShares) helper.CreateFile(commonTestFile) absFile := path.Join(helper.TempDir, commonTestFile) tests := []struct { name string resources []ResourceUpdater }{ { name: "test_update_valid", resources: []ResourceUpdater{ NewCommonCgroupResourceUpdater(GroupOwnerRef("root"), "/", sysutil.CPUShares, "1024"), NewCommonResourceUpdater(absFile, "19"), }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { t.Logf("Cur CgroupFile filepath %v", sysutil.Conf.CgroupRootDir) rm := NewResourceUpdateExecutor("test", 1) stop := make(chan struct{}) rm.Run(stop) defer func() { stop <- struct{}{} }() rm.UpdateBatch(tt.resources...) got := getActualResources(tt.resources) equalResourceMap(t, tt.resources, got, "checkCurrentResource") }) } }
explode_data.jsonl/56861
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 372 }
[ 2830, 3393, 47393, 21074, 1155, 353, 8840, 836, 8, 1476, 9598, 2947, 1669, 5708, 1314, 7121, 1703, 2271, 2742, 1155, 692, 9598, 2947, 7251, 34, 4074, 1703, 35460, 5708, 1314, 727, 6325, 73015, 340, 9598, 2947, 7251, 1703, 57802, 2271, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHasChild(t *testing.T) { node := NodeStatus{ Children: []string{"a", "b"}, } assert.True(t, node.HasChild("a")) assert.False(t, node.HasChild("c")) assert.False(t, node.HasChild("")) }
explode_data.jsonl/26061
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 81 }
[ 2830, 3393, 10281, 3652, 1155, 353, 8840, 836, 8, 341, 20831, 1669, 6018, 2522, 515, 197, 197, 11539, 25, 3056, 917, 4913, 64, 497, 330, 65, 7115, 197, 532, 6948, 32443, 1155, 11, 2436, 16152, 3652, 445, 64, 5455, 6948, 50757, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestFloat32_Each(t *testing.T) { testcases := []struct { name string origin Float32 }{ { name: "test Float32 Each", origin: map[float32]struct{}{1: {}, 2: {}, 3: {}}, }, } for _, tc := range testcases { t.Logf("running scenario: %s", tc.name) var expect []float32 tc.origin.Each(func(i float32) { expect = append(expect, i) }) validateFloat32(t, tc.origin, expect) } }
explode_data.jsonl/60112
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 181 }
[ 2830, 3393, 5442, 18, 17, 2089, 610, 1155, 353, 8840, 836, 8, 341, 18185, 23910, 1669, 3056, 1235, 341, 197, 11609, 256, 914, 198, 197, 197, 8611, 13001, 18, 17, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 256, 330, 1944, 13001,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestApplyTemplate(t *testing.T) { o := graphite.Options{ Separator: "_", Templates: []string{"current.* measurement.measurement"}, } p, err := graphite.NewParserWithOptions(o) if err != nil { t.Fatalf("unexpected error creating parser, got %v", err) } measurement, _, _, _ := p.ApplyTemplate("current.users") if measurement != "current_users" { t.Errorf("Parser.ApplyTemplate unexpected result. got %s, exp %s", measurement, "current_users") } }
explode_data.jsonl/32190
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 169 }
[ 2830, 3393, 28497, 7275, 1155, 353, 8840, 836, 8, 341, 22229, 1669, 94173, 22179, 515, 197, 7568, 91640, 25, 9000, 756, 197, 10261, 76793, 25, 3056, 917, 4913, 3231, 4908, 18662, 17326, 24359, 7115, 197, 532, 3223, 11, 1848, 1669, 94173...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestParseExecutableError(t *testing.T) { root := ggql.NewRoot(nil) for _, d := range []parseTestData{ {src: "query dup { title } query dup { origin }", line: 1, col: 28}, {src: ` { title } fragment dup on Artist { title } fragment dup on Artist { origin }`, line: 4, col: 11}, {src: "{ title } { artists }", line: 1, col: 12}, {src: "bogus { title }", line: 1, col: 2}, {src: "{ (a: Int) }", line: 1, col: 4}, {src: "{ ..bad } fragment bad on Query { title }", line: 1, col: 6}, {src: "{ ... on Bad { a }}", line: 1, col: 10}, {src: "query Qq($a: Int = 3", line: 1, col: 21}, {src: "query Qq(a: Int = 3) { title }", line: 1, col: 11}, {src: "query Qq($: Int = 3) { title }", line: 1, col: 12}, {src: "query Qq($a Int = 3) { title }", line: 1, col: 14}, {src: `query Qq($a: Int = "x") { artist(name: $a`, line: 1, col: 42}, {src: `{artists{songs:{name}}}`, line: 1, col: 11}, } { _, err := root.ParseExecutableString(d.src) checkNotNil(t, err, "ParseExecutableString(%s) should fail.", d.src) var ge *ggql.Error var ges ggql.Errors switch { case errors.As(err, &ge): checkEqual(t, d.line, ge.Line, "line number mismatch for %s. %s", d.src, ge) checkEqual(t, d.col, ge.Column, "column number mismatch for %s. %s", d.src, ge) case errors.As(err, &ges): checkEqual(t, 1, len(ges), "ParseExecutableString(%s) should return one error. %s", d.src, err) var e2 *ggql.Error errors.As(ges[0], &e2) checkNotNil(t, e2, "ParseExecutableString(%s) should return a ggql.Errors with one ggql.Error or not a %T. %s", d.src, ges[0], ges[0]) checkEqual(t, d.line, e2.Line, "line number mismatch for %s. %s", d.src, e2) checkEqual(t, d.col, e2.Column, "column number mismatch for %s. %s", d.src, e2) default: t.Fatalf("\nParseExecutableString(%s) should return a *ggql.Error or ggql.Errors not a %T. %s", d.src, err, err) } } }
explode_data.jsonl/48189
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 844 }
[ 2830, 3393, 14463, 94772, 1454, 1155, 353, 8840, 836, 8, 341, 33698, 1669, 52034, 1470, 7121, 8439, 27907, 340, 2023, 8358, 294, 1669, 2088, 3056, 6400, 83920, 515, 197, 197, 90, 3548, 25, 330, 1631, 22737, 314, 2265, 335, 3239, 22737, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestBootstrapper(t *testing.T) { config := genesisconfig.Load(genesisconfig.SampleDevModeSoloProfile) t.Run("New bootstrapper", func(t *testing.T) { bootstrapper := New(config) assert.NotNil(t, bootstrapper.GenesisBlock(), "genesis block should not be nil") assert.NotNil(t, bootstrapper.GenesisBlockForChannel("channelID"), "genesis block for channel should not be nil") }) t.Run("New bootstrapper nil orderer", func(t *testing.T) { config.Orderer = nil newBootstrapperNilOrderer := func() { New(config) } assert.Panics(t, newBootstrapperNilOrderer) }) }
explode_data.jsonl/78133
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 213 }
[ 2830, 3393, 17919, 495, 3106, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 59366, 1676, 13969, 36884, 13774, 1676, 76266, 14592, 3636, 89299, 8526, 340, 3244, 16708, 445, 3564, 10459, 495, 3106, 497, 2915, 1155, 353, 8840, 836, 8, 341, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_GetMd5String_1(t *testing.T) { str := "123456789" md5str := GetMd5String(str) t.Log("GetMd5String:", md5str) test.Equal(t, "25f9e794323b453885f5181f1b624d0b", md5str) }
explode_data.jsonl/60541
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 98 }
[ 2830, 3393, 13614, 72529, 20, 703, 62, 16, 1155, 353, 8840, 836, 8, 341, 11355, 1669, 330, 16, 17, 18, 19, 20, 21, 22, 23, 24, 698, 84374, 20, 495, 1669, 2126, 72529, 20, 703, 4199, 340, 3244, 5247, 445, 1949, 72529, 20, 703, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCommandFlags(t *testing.T) { cmd := cli.NewCommand("unit test") cases := []struct { giveName string wantShorthand string wantDefault string }{ {giveName: "config", wantShorthand: "c", wantDefault: ".rr.yaml"}, {giveName: "WorkDir", wantShorthand: "w", wantDefault: ""}, {giveName: "dotenv", wantShorthand: "", wantDefault: ""}, {giveName: "debug", wantShorthand: "d", wantDefault: "false"}, {giveName: "override", wantShorthand: "o", wantDefault: "[]"}, } for _, tt := range cases { tt := tt t.Run(tt.giveName, func(t *testing.T) { flag := cmd.Flag(tt.giveName) if flag == nil { assert.Failf(t, "flag not found", "flag [%s] was not found", tt.giveName) return } assert.Equal(t, tt.wantShorthand, flag.Shorthand) assert.Equal(t, tt.wantDefault, flag.DefValue) }) } }
explode_data.jsonl/17536
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 351 }
[ 2830, 3393, 4062, 9195, 1155, 353, 8840, 836, 8, 341, 25920, 1669, 21348, 7121, 4062, 445, 3843, 1273, 5130, 1444, 2264, 1669, 3056, 1235, 341, 197, 3174, 533, 675, 414, 914, 198, 197, 50780, 2016, 61679, 914, 198, 197, 50780, 3675, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestHTTPMethodToAction(t *testing.T) { tests := []struct { method string exceptedAction string }{ { method: "GET", exceptedAction: ReadAction, }, { method: "POST", exceptedAction: AllAction, }, { method: "UNKNOWN", exceptedAction: ReadAction, }, } for _, tt := range tests { action := HTTPMethodToAction(tt.method) if action != tt.exceptedAction { t.Errorf("HttpMethodToAction(%v) = %v, want %v", tt.method, action, tt.exceptedAction) } } }
explode_data.jsonl/52242
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 248 }
[ 2830, 3393, 9230, 3523, 1249, 2512, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 42257, 260, 914, 198, 197, 34599, 291, 2512, 914, 198, 197, 59403, 197, 197, 515, 298, 42257, 25, 260, 330, 3806, 756, 298, 34599, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestWriteHumanReadableStructOptionalFields(t *testing.T) { typ := MakeStructType("S1", StructField{"a", BoolType, false}, StructField{"b", BoolType, true}) assertWriteHRSEqual(t, "Struct S1 {\n a: Bool,\n b?: Bool,\n}", typ) }
explode_data.jsonl/60910
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 95 }
[ 2830, 3393, 7985, 33975, 57938, 9422, 15309, 8941, 1155, 353, 8840, 836, 8, 341, 25314, 1669, 7405, 9422, 929, 445, 50, 16, 756, 197, 197, 9422, 1877, 4913, 64, 497, 12608, 929, 11, 895, 1583, 197, 197, 9422, 1877, 4913, 65, 497, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_StoreRepo_Update(t *testing.T) { s := sample.NewStore() testCases := []struct { name string arg *domain.Store expectedErr bool prepare func(mock sqlmock.Sqlmock) }{ { name: "failure_exec_query_returns_error", arg: s, expectedErr: true, prepare: func(mock sqlmock.Sqlmock) { query := `UPDATE stores SET created_at=$1,updated_at=$2,name=$3,description=$4,status=$5,user_id=$6,account_id=$7,category_id=$8,tags=$9,lat=$10,lng=$11,image=$12 WHERE id = $13` mock.ExpectExec(regexp.QuoteMeta(query)).WithArgs(s.CreatedAt, s.UpdatedAt, s.Name, s.Description, s.Status, s.UserID, s.AccountID, s.CategoryID, s.Tags, s.Position.Lat, s.Position.Lng, s.Image, s.ID).WillReturnError(errors.New("unexpected error")) }, }, { name: "failure_get_affected_row_returns_error", arg: s, expectedErr: true, prepare: func(mock sqlmock.Sqlmock) { query := `UPDATE stores SET created_at=$1,updated_at=$2,name=$3,description=$4,status=$5,user_id=$6,account_id=$7,category_id=$8,tags=$9,lat=$10,lng=$11,image=$12 WHERE id = $13` mock.ExpectExec(regexp.QuoteMeta(query)).WithArgs(s.CreatedAt, s.UpdatedAt, s.Name, s.Description, s.Status, s.UserID, s.AccountID, s.CategoryID, s.Tags, s.Position.Lat, s.Position.Lng, s.Image, s.ID).WillReturnResult(sqlmock.NewErrorResult(errors.New("unexpected error"))) }, }, { name: "failure_returns_nvalid_number_of_affected_rows", arg: s, expectedErr: true, prepare: func(mock sqlmock.Sqlmock) { query := `UPDATE stores SET created_at=$1,updated_at=$2,name=$3,description=$4,status=$5,user_id=$6,account_id=$7,category_id=$8,tags=$9,lat=$10,lng=$11,image=$12 WHERE id = $13` mock.ExpectExec(regexp.QuoteMeta(query)).WithArgs(s.CreatedAt, s.UpdatedAt, s.Name, s.Description, s.Status, s.UserID, s.AccountID, s.CategoryID, s.Tags, s.Position.Lat, s.Position.Lng, s.Image, s.ID).WillReturnResult(sqlmock.NewResult(1, 2)) }, }, { name: "success", arg: s, prepare: func(mock sqlmock.Sqlmock) { query := `UPDATE stores SET created_at=$1,updated_at=$2,name=$3,description=$4,status=$5,user_id=$6,account_id=$7,category_id=$8,tags=$9,lat=$10,lng=$11,image=$12 WHERE id = $13` mock.ExpectExec(regexp.QuoteMeta(query)).WithArgs(s.CreatedAt, s.UpdatedAt, s.Name, s.Description, s.Status, s.UserID, s.AccountID, s.CategoryID, s.Tags, s.Position.Lat, s.Position.Lng, s.Image, s.ID).WillReturnResult(sqlmock.NewResult(1, 1)) }, }, } for i := range testCases { tc := testCases[i] t.Run(tc.name, func(t *testing.T) { t.Parallel() db, mock, err := sqlmock.New() assert.NoError(t, err) repo := pg.NewStoreRepository(db) tc.prepare(mock) err = repo.Update(context.TODO(), tc.arg) if tc.expectedErr { assert.Error(t, err) } else { assert.NoError(t, err) } }) } }
explode_data.jsonl/20620
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1307 }
[ 2830, 3393, 92684, 25243, 47393, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 6077, 7121, 6093, 741, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 286, 914, 198, 197, 47903, 260, 353, 12204, 38047, 198, 197, 42400, 7747, 1807, 198, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInt2Hex(t *testing.T) { assert.EqualValues(t, "64", Int2Hex(100)) assert.EqualValues(t, "6400", Int2Hex(25600)) assert.EqualValues(t, "ffffffffffffffff", Int2Hex(math.MaxUint64)) assert.EqualValues(t, "ffffffff", Int2Hex(math.MaxUint32)) }
explode_data.jsonl/31333
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 113 }
[ 2830, 3393, 1072, 17, 20335, 1155, 353, 8840, 836, 8, 341, 6948, 12808, 6227, 1155, 11, 330, 21, 19, 497, 1333, 17, 20335, 7, 16, 15, 15, 1171, 6948, 12808, 6227, 1155, 11, 330, 21, 19, 15, 15, 497, 1333, 17, 20335, 7, 17, 20, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrewriteNotMatchError(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) wg := &sync.WaitGroup{} var requestIds sync.Map var server1Stopped int32 = 0 server1StoppedCh := make(chan struct{}) ch1 := make(chan *cdcpb.ChangeDataEvent, 10) srv1 := newMockChangeDataService(t, ch1) server1, addr1 := newMockService(ctx, t, srv1, wg) srv1.recvLoop = func(server cdcpb.ChangeData_EventFeedServer) { if atomic.LoadInt32(&server1Stopped) == int32(1) { return } defer func() { atomic.StoreInt32(&server1Stopped, 1) close(ch1) server1.Stop() server1StoppedCh <- struct{}{} }() for { req, err := server.Recv() if err != nil { log.Error("mock server error", zap.Error(err)) return } requestIds.Store(req.RegionId, req.RequestId) } } rpcClient, cluster, pdClient, err := testutils.NewMockTiKV("", mockcopr.NewCoprRPCHandler()) require.Nil(t, err) pdClient = &mockPDClient{Client: pdClient, versionGen: defaultVersionGen} kvStorage, err := tikv.NewTestTiKVStore(rpcClient, pdClient, nil, nil, 0) require.Nil(t, err) defer kvStorage.Close() //nolint:errcheck // create two regions to avoid the stream is canceled by no region remained regionID3 := uint64(3) regionID4 := uint64(4) cluster.AddStore(1, addr1) cluster.Bootstrap(regionID3, []uint64{1}, []uint64{4}, 4) cluster.SplitRaw(regionID3, regionID4, []byte("b"), []uint64{5}, 5) isPullInit := &mockPullerInit{} lockResolver := txnutil.NewLockerResolver(kvStorage, model.DefaultChangeFeedID("changefeed-test"), util.RoleTester) grpcPool := NewGrpcPoolImpl(ctx, &security.Credential{}) defer grpcPool.Close() regionCache := tikv.NewRegionCache(pdClient) defer regionCache.Close() cdcClient := NewCDCClient( ctx, pdClient, grpcPool, regionCache, pdutil.NewClock4Test(), model.DefaultChangeFeedID(""), config.GetDefaultServerConfig().KVClient) eventCh := make(chan model.RegionFeedEvent, 50) baseAllocatedID := currentRequestID() wg.Add(1) go func() { defer wg.Done() err = cdcClient.EventFeed(ctx, regionspan.ComparableSpan{Start: []byte("a"), End: []byte("c")}, 100, lockResolver, isPullInit, eventCh) require.Equal(t, context.Canceled, errors.Cause(err)) }() // The expected request ids are agnostic because the kv client could retry // for more than one time, so we wait until the newly started server receives // requests for both two regions. err = retry.Do(context.Background(), func() error { _, ok1 := requestIds.Load(regionID3) _, ok2 := requestIds.Load(regionID4) if ok1 && ok2 { return nil } return errors.New("waiting for kv client requests received by server") }, retry.WithBackoffBaseDelay(200), retry.WithBackoffMaxDelay(60*1000), retry.WithMaxTries(10)) require.Nil(t, err) reqID1, _ := requestIds.Load(regionID3) reqID2, _ := requestIds.Load(regionID4) initialized1 := mockInitializedEvent(regionID3, reqID1.(uint64)) initialized2 := mockInitializedEvent(regionID4, reqID2.(uint64)) ch1 <- initialized1 ch1 <- initialized2 prewriteNotMatchEvent := &cdcpb.ChangeDataEvent{Events: []*cdcpb.Event{ { RegionId: regionID3, RequestId: reqID1.(uint64), Event: &cdcpb.Event_Entries_{ Entries: &cdcpb.Event_Entries{ Entries: []*cdcpb.Event_Row{{ Type: cdcpb.Event_COMMIT, OpType: cdcpb.Event_Row_PUT, Key: []byte("aaaa"), Value: []byte("commit event before prewrite"), StartTs: 105, CommitTs: 115, }}, }, }, }, }} ch1 <- prewriteNotMatchEvent <-server1StoppedCh ch2 := make(chan *cdcpb.ChangeDataEvent, 10) srv2 := newMockChangeDataService(t, ch2) srv2.recvLoop = func(server cdcpb.ChangeData_EventFeedServer) { for { req, err := server.Recv() if err != nil { log.Error("mock server error", zap.Error(err)) return } requestIds.Store(req.RegionId, req.RequestId) } } // Reuse the same listen address as server 1 server2, _ := newMockServiceSpecificAddr(ctx, t, srv2, addr1, wg) defer func() { close(ch2) server2.Stop() wg.Wait() }() // After the gRPC stream is canceled, two more reqeusts will be sent, so the // allocated id is increased by 2 from baseAllocatedID+2. waitRequestID(t, baseAllocatedID+4) cancel() }
explode_data.jsonl/32888
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1725 }
[ 2830, 3393, 47, 52473, 2623, 8331, 1454, 1155, 353, 8840, 836, 8, 341, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 72079, 1669, 609, 12996, 28384, 2808, 31483, 2405, 1681, 12701, 12811, 10104, 198, 2405, 3538, 16, 59803, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestListPackages(t *testing.T) { ui := termui.New(&bytes.Buffer{}, ioutil.Discard, nil) assert := assert.New(t) workDir, err := os.Getwd() assert.NoError(err) roleManifestPath := filepath.Join(workDir, "../test-assets/role-manifests/app/no-instance-groups/no-instance-groups.yml") badReleasePath := filepath.Join(workDir, "../test-assets/bad-release") releasePath := filepath.Join(workDir, "../test-assets/ntp-release") f := NewFissileApplication(".", ui) err = f.LoadManifest( roleManifestPath, []string{badReleasePath}, []string{""}, []string{""}, filepath.Join(workDir, "../test-assets/bosh-cache")) assert.Error(err, "Expected ListPackages to not find the release") err = f.LoadManifest( roleManifestPath, []string{releasePath}, []string{""}, []string{""}, filepath.Join(workDir, "../test-assets/bosh-cache")) if assert.NoError(err) { err = f.ListPackages(false) assert.Nil(err, "Expected ListPackages to find the release") } }
explode_data.jsonl/47758
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 370 }
[ 2830, 3393, 852, 69513, 1155, 353, 8840, 836, 8, 341, 37278, 1669, 4647, 1963, 7121, 2099, 9651, 22622, 22655, 43144, 909, 47560, 11, 2092, 340, 6948, 1669, 2060, 7121, 1155, 692, 97038, 6184, 11, 1848, 1669, 2643, 2234, 6377, 741, 6948...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInitDBNotInitialized(t *testing.T) { cfg := &Config{ NonceExpiration: "15s", NonceSweepInterval: "15m", } var db *dmocks.FabricCADB issuer := NewIssuer("ca1", ".", cfg, util.GetDefaultBCCSP(), NewLib()) err := issuer.Init(false, db, &dbutil.Levels{Credential: 1, RAInfo: 1, Nonce: 1}) assert.NoError(t, err) db = new(dmocks.FabricCADB) db.On("IsInitialized").Return(false) issuer = NewIssuer("ca1", ".", cfg, util.GetDefaultBCCSP(), NewLib()) err = issuer.Init(false, db, &dbutil.Levels{Credential: 1, RAInfo: 1, Nonce: 1}) assert.NoError(t, err) }
explode_data.jsonl/23677
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 246 }
[ 2830, 3393, 3803, 3506, 2623, 22495, 1155, 353, 8840, 836, 8, 341, 50286, 1669, 609, 2648, 515, 197, 197, 90528, 66301, 25, 262, 330, 16, 20, 82, 756, 197, 197, 90528, 50, 48542, 10256, 25, 330, 16, 20, 76, 756, 197, 532, 2405, 29...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParserLiteral(t *testing.T) { assertTextAndMarker(t, "far`123`boo", "far`123`boo", &text{}) assertTextAndMarker(t, "far\\`123\\`boo", "far\\`123\\`boo", &text{}) }
explode_data.jsonl/60276
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 80 }
[ 2830, 3393, 6570, 17350, 1155, 353, 8840, 836, 8, 341, 6948, 1178, 3036, 20613, 1155, 11, 330, 23559, 63, 16, 17, 18, 63, 32993, 497, 330, 23559, 63, 16, 17, 18, 63, 32993, 497, 609, 1318, 37790, 6948, 1178, 3036, 20613, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUpdatePackageFlatcar(t *testing.T) { a := newForTest(t) defer a.Close() pkg := &Package{ Type: PkgTypeFlatcar, URL: "https://update.release.flatcar-linux.net/amd64-usr/XYZ/", Filename: null.StringFrom("update.gz"), Version: "2016.6.6", Size: null.StringFrom("123456"), Hash: null.StringFrom("sha1:blablablabla"), ApplicationID: flatcarAppID, } pkg, err := a.AddPackage(pkg) assert.NoError(t, err) assert.Nil(t, pkg.FlatcarAction) pkg.Version = "2016.6.7" err = a.UpdatePackage(pkg) assert.NoError(t, err) assert.Nil(t, pkg.FlatcarAction) pkg.FlatcarAction = &FlatcarAction{ Sha256: "sha256:blablablabla", } err = a.UpdatePackage(pkg) assert.NoError(t, err) assert.Equal(t, "postinstall", pkg.FlatcarAction.Event) assert.Equal(t, false, pkg.FlatcarAction.NeedsAdmin) assert.Equal(t, false, pkg.FlatcarAction.IsDelta) assert.Equal(t, true, pkg.FlatcarAction.DisablePayloadBackoff) assert.Equal(t, "sha256:blablablabla", pkg.FlatcarAction.Sha256) err = a.DeletePackage(pkg.ID) assert.NoError(t, err) pkg = &Package{ Type: PkgTypeFlatcar, URL: "https://update.release.flatcar-linux.net/amd64-usr/XYZ/", Filename: null.StringFrom("update.gz"), Version: "2016.6.6", Size: null.StringFrom("123456"), Hash: null.StringFrom("sha1:blablablabla"), ApplicationID: flatcarAppID, } pkg.FlatcarAction = &FlatcarAction{ Sha256: "sha256:blablablabla", } pkg, err = a.AddPackage(pkg) assert.NoError(t, err) assert.NotEqual(t, pkg.FlatcarAction.ID, "") flatcarActionID := pkg.FlatcarAction.ID pkg.FlatcarAction.Sha256 = "sha256:bleblebleble" err = a.UpdatePackage(pkg) assert.NoError(t, err) assert.Equal(t, "sha256:bleblebleble", pkg.FlatcarAction.Sha256) assert.Equal(t, flatcarActionID, pkg.FlatcarAction.ID) }
explode_data.jsonl/74613
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 862 }
[ 2830, 3393, 4289, 13100, 31019, 6918, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 501, 2461, 2271, 1155, 340, 16867, 264, 10421, 2822, 3223, 7351, 1669, 609, 13100, 515, 197, 27725, 25, 688, 393, 7351, 929, 31019, 6918, 345, 197, 79055, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSquashConfigForSysMgmtdHost(t *testing.T) { defaultConfig := *beegfsv1.NewBeegfsConfig() defaultConfig.ConnInterfaces = []string{"ib0"} fileSystemSpecificBeegfsConfig := *beegfsv1.NewBeegfsConfig() fileSystemSpecificBeegfsConfig.ConnInterfaces = []string{"ib1"} testConfig := beegfsv1.PluginConfig{ DefaultConfig: defaultConfig, FileSystemSpecificConfigs: []beegfsv1.FileSystemSpecificConfig{ { SysMgmtdHost: "127.0.0.1", Config: fileSystemSpecificBeegfsConfig, }, }, } tests := map[string]struct { sysMgmtdHost string want beegfsv1.BeegfsConfig }{ "not matching sysMgmtdHost": { sysMgmtdHost: "127.0.0.0", want: defaultConfig, }, "matching sysMgmtdHost": { sysMgmtdHost: "127.0.0.1", want: fileSystemSpecificBeegfsConfig, }, } for name, tc := range tests { t.Run(name, func(t *testing.T) { got := squashConfigForSysMgmtdHost(tc.sysMgmtdHost, testConfig) if !reflect.DeepEqual(tc.want, got) { t.Fatalf("expected BeegfsConfig: %v, got BeegfsConfig: %v", tc.want, got) } }) } }
explode_data.jsonl/77108
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 476 }
[ 2830, 3393, 50, 446, 988, 2648, 2461, 32792, 44, 26186, 1296, 9296, 1155, 353, 8840, 836, 8, 341, 11940, 2648, 1669, 353, 1371, 791, 69, 3492, 16, 7121, 3430, 791, 3848, 2648, 741, 11940, 2648, 50422, 41066, 284, 3056, 917, 4913, 579,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPeerUnknown(t *testing.T) { store := NewMemStore() node := NewForTests("node02", store, nil) node.initPeers() defer node.Stop() t.Run("last success", func(t *testing.T) { assertar := assert.New(t) peer := &peerAttr{ ID: hash.FakePeer(), Host: &hostAttr{ LastSuccess: time.Now().Truncate(node.conf.DiscoveryTimeout), }, } node.peers.ids[peer.ID] = peer assertar.False(node.PeerUnknown(&peer.ID)) }) t.Run("peer known", func(t *testing.T) { assertar := assert.New(t) peer := &peerAttr{ ID: hash.FakePeer(), Host: &hostAttr{ LastSuccess: time.Now(), }, } node.peers.ids[peer.ID] = peer assertar.False(node.PeerUnknown(&peer.ID)) }) t.Run("peer unknown", func(t *testing.T) { assertar := assert.New(t) unknown := hash.FakePeer() assertar.True(node.PeerUnknown(&unknown)) }) t.Run("nil peer", func(t *testing.T) { assertar := assert.New(t) assertar.True(node.PeerUnknown(nil)) }) }
explode_data.jsonl/54068
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 418 }
[ 2830, 3393, 30888, 13790, 1155, 353, 8840, 836, 8, 341, 57279, 1669, 1532, 18816, 6093, 741, 20831, 1669, 1532, 2461, 18200, 445, 3509, 15, 17, 497, 3553, 11, 2092, 340, 20831, 8271, 10197, 388, 741, 16867, 2436, 30213, 2822, 3244, 1670...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestZeroReader(t *testing.T) { var z zeroReader r := NewReader(z) c := make(chan error) go func() { _, err := r.ReadByte() c <- err }() select { case err := <-c: if err == nil { t.Error("error expected") } else if err != io.ErrNoProgress { t.Error("unexpected error:", err) } case <-time.After(time.Second): t.Error("test timed out (endless loop in ReadByte?)") } }
explode_data.jsonl/2876
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 165 }
[ 2830, 3393, 17999, 5062, 1155, 353, 8840, 836, 8, 341, 2405, 1147, 7168, 5062, 198, 7000, 1669, 1532, 5062, 13174, 692, 1444, 1669, 1281, 35190, 1465, 340, 30680, 2915, 368, 341, 197, 197, 6878, 1848, 1669, 435, 89626, 741, 197, 1444, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCodeJSON(t *testing.T) { if testing.Short() { t.Skip("skipping code.json.gz") } var ref interface{} data := testdataFile("testdata/code.json.gz") json.Unmarshal(data, &ref) config := NewDefaultConfig() jsn := config.NewJson(data) if jsnrem, val := jsn.Tovalue(); jsnrem != nil { t.Errorf("remaining text after parsing should be empty, %v", jsnrem) } else if reflect.DeepEqual(val, ref) == false { t.Errorf("codeJSON parsing failed with reference: %v", ref) } }
explode_data.jsonl/70408
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 186 }
[ 2830, 3393, 2078, 5370, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 445, 4886, 5654, 2038, 4323, 20963, 1138, 197, 630, 2405, 2053, 3749, 16094, 8924, 1669, 1273, 691, 1703, 445, 92425, 46928, 4323, 20963...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestVersionedRandomTreeSpecial2(t *testing.T) { require := require.New(t) tree := NewMutableTree(db.NewMemDB(), 100) tree.Set([]byte("OFMe2Yvm"), []byte("ez2OtQtE")) tree.Set([]byte("WEN4iN7Y"), []byte("kQNyUalI")) tree.SaveVersion() tree.Set([]byte("1yY3pXHr"), []byte("udYznpII")) tree.Set([]byte("7OSHNE7k"), []byte("ff181M2d")) tree.SaveVersion() tree.DeleteVersion(1) require.Len(tree.ndb.nodes(), tree.nodeSize()) }
explode_data.jsonl/25116
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 192 }
[ 2830, 3393, 5637, 291, 13999, 6533, 20366, 17, 1155, 353, 8840, 836, 8, 341, 17957, 1669, 1373, 7121, 1155, 340, 51968, 1669, 1532, 11217, 6533, 9791, 7121, 18816, 3506, 1507, 220, 16, 15, 15, 692, 51968, 4202, 10556, 3782, 445, 12483, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrintPriorityLevelConfiguration(t *testing.T) { tests := []struct { pl flowcontrol.PriorityLevelConfiguration expected []metav1.TableRow }{ { pl: flowcontrol.PriorityLevelConfiguration{ ObjectMeta: metav1.ObjectMeta{ Name: "unlimited", CreationTimestamp: metav1.Time{Time: time.Now().Add(1.9e9)}, }, Spec: flowcontrol.PriorityLevelConfigurationSpec{ Type: flowcontrol.PriorityLevelEnablementExempt, }, }, // Columns: Name, Type, AssuredConcurrencyShares, Queues, HandSize, QueueLengthLimit, Age expected: []metav1.TableRow{{Cells: []interface{}{"unlimited", "Exempt", "<none>", "<none>", "<none>", "<none>", "0s"}}}, }, { pl: flowcontrol.PriorityLevelConfiguration{ ObjectMeta: metav1.ObjectMeta{ Name: "unqueued", CreationTimestamp: metav1.Time{Time: time.Now().Add(1.9e9)}, }, Spec: flowcontrol.PriorityLevelConfigurationSpec{ Type: flowcontrol.PriorityLevelEnablementLimited, Limited: &flowcontrol.LimitedPriorityLevelConfiguration{ AssuredConcurrencyShares: 47, LimitResponse: flowcontrol.LimitResponse{ Type: flowcontrol.LimitResponseTypeReject, }, }, }, }, // Columns: Name, Type, AssuredConcurrencyShares, Queues, HandSize, QueueLengthLimit, Age expected: []metav1.TableRow{{Cells: []interface{}{"unqueued", "Limited", int32(47), "<none>", "<none>", "<none>", "0s"}}}, }, { pl: flowcontrol.PriorityLevelConfiguration{ ObjectMeta: metav1.ObjectMeta{ Name: "queued", CreationTimestamp: metav1.Time{Time: time.Now().Add(1.9e9)}, }, Spec: flowcontrol.PriorityLevelConfigurationSpec{ Type: flowcontrol.PriorityLevelEnablementLimited, Limited: &flowcontrol.LimitedPriorityLevelConfiguration{ AssuredConcurrencyShares: 42, LimitResponse: flowcontrol.LimitResponse{ Type: flowcontrol.LimitResponseTypeQueue, Queuing: &flowcontrol.QueuingConfiguration{ Queues: 8, HandSize: 3, QueueLengthLimit: 4, }, }, }, }, }, // Columns: Name, Type, AssuredConcurrencyShares, Queues, HandSize, QueueLengthLimit, Age expected: []metav1.TableRow{{Cells: []interface{}{"queued", "Limited", int32(42), int32(8), int32(3), int32(4), "0s"}}}, }, } for i, test := range tests { rows, err := printPriorityLevelConfiguration(&test.pl, printers.GenerateOptions{}) if err != nil { t.Fatal(err) } for i := range rows { rows[i].Object.Object = nil } if !reflect.DeepEqual(test.expected, rows) { t.Errorf("%d mismatch: %s", i, diff.ObjectReflectDiff(test.expected, rows)) } } }
explode_data.jsonl/72310
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1135 }
[ 2830, 3393, 8994, 20555, 4449, 7688, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 72213, 981, 6396, 2865, 83590, 4449, 7688, 198, 197, 42400, 3056, 4059, 402, 16, 18257, 3102, 198, 197, 59403, 197, 197, 515, 298, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestWritePrettyBackupList_ShortColumnsValues(t *testing.T) { expectedRes := "+---+------+----------+--------------------------+------------+-------------+----------+---------+------------+-----------+------------+-----------+\n" + "| # | NAME | MODIFIED | WAL SEGMENT BACKUP START | START TIME | FINISH TIME | HOSTNAME | DATADIR | PG VERSION | START LSN | FINISH LSN | PERMANENT |\n" + "+---+------+----------+--------------------------+------------+-------------+----------+---------+------------+-----------+------------+-----------+\n" + "| 0 | b0 | - | shortWallName0 | - | - | | | 0 | 0 | 0 | false |\n" + "| 1 | b1 | - | shortWallName1 | - | - | | | 0 | 0 | 0 | false |\n" + "+---+------+----------+--------------------------+------------+-------------+----------+---------+------------+-----------+------------+-----------+\n" b := bytes.Buffer{} postgres.WritePrettyBackupListDetails(shortBackups, &b) assert.Equal(t, expectedRes, b.String()) }
explode_data.jsonl/23910
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 463 }
[ 2830, 3393, 7985, 51940, 56245, 852, 1098, 22007, 13965, 6227, 1155, 353, 8840, 836, 8, 341, 42400, 1061, 1669, 6630, 4421, 10, 61247, 15340, 10, 771, 15340, 10, 5161, 10, 19528, 10, 15340, 10, 22738, 10, 5161, 10, 18088, 10, 5161, 10...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMapStringBytesLookup(t *testing.T) { // Use large string keys to avoid small-allocation coalescing, // which can cause AllocsPerRun to report lower counts than it should. m := map[string]int{ "1000000000000000000000000000000000000000000000000": 1, "2000000000000000000000000000000000000000000000000": 2, } buf := []byte("1000000000000000000000000000000000000000000000000") if x := m[string(buf)]; x != 1 { t.Errorf(`m[string([]byte("1"))] = %d, want 1`, x) } buf[0] = '2' if x := m[string(buf)]; x != 2 { t.Errorf(`m[string([]byte("2"))] = %d, want 2`, x) } var x int n := testing.AllocsPerRun(100, func() { x += m[string(buf)] }) if n != 0 { t.Errorf("AllocsPerRun for m[string(buf)] = %v, want 0", n) } x = 0 n = testing.AllocsPerRun(100, func() { y, ok := m[string(buf)] if !ok { panic("!ok") } x += y }) if n != 0 { t.Errorf("AllocsPerRun for x,ok = m[string(buf)] = %v, want 0", n) } }
explode_data.jsonl/19921
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 386 }
[ 2830, 3393, 2227, 703, 7078, 34247, 1155, 353, 8840, 836, 8, 341, 197, 322, 5443, 3460, 914, 6894, 311, 5648, 2613, 18779, 2527, 1062, 3831, 6125, 345, 197, 322, 892, 646, 5240, 46685, 82, 3889, 6727, 311, 1895, 4722, 14579, 1091, 432...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPullerDifferentPeersDifferentCollections(t *testing.T) { // Scenario: p1 pulls from p2 and from p3 // and each has different collections gn := &gossipNetwork{} factoryMock1 := &mocks.CollectionAccessFactory{} accessPolicyMock1 := &mocks.CollectionAccessPolicy{} Setup(accessPolicyMock1, 1, 2, func(data protoutil.SignedData) bool { return bytes.Equal(data.Identity, []byte("p2")) || bytes.Equal(data.Identity, []byte("p3")) }, map[string]struct{}{"org1": {}, "org2": {}}, false) factoryMock1.On("AccessPolicy", mock.Anything, mock.Anything).Return(accessPolicyMock1, nil) policyStore := newCollectionStore().withPolicy("col2", uint64(100)).thatMapsTo("p2").withPolicy("col3", uint64(100)).thatMapsTo("p3") p1 := gn.newPuller("p1", policyStore, factoryMock1, membership(peerData{"p2", uint64(1)}, peerData{"p3", uint64(1)})...) p2TransientStore := &util.PrivateRWSetWithConfig{ RWSet: newPRWSet(), CollectionConfig: &peer.CollectionConfig{ Payload: &peer.CollectionConfig_StaticCollectionConfig{ StaticCollectionConfig: &peer.StaticCollectionConfig{ Name: "col2", }, }, }, } policyStore = newCollectionStore().withPolicy("col2", uint64(100)).thatMapsTo("p1") factoryMock2 := &mocks.CollectionAccessFactory{} accessPolicyMock2 := &mocks.CollectionAccessPolicy{} Setup(accessPolicyMock2, 1, 2, func(data protoutil.SignedData) bool { return bytes.Equal(data.Identity, []byte("p1")) }, map[string]struct{}{"org1": {}, "org2": {}}, false) factoryMock2.On("AccessPolicy", mock.Anything, mock.Anything).Return(accessPolicyMock2, nil) p2 := gn.newPuller("p2", policyStore, factoryMock2) dig1 := &proto.PvtDataDigest{ TxId: "txID1", Collection: "col2", Namespace: "ns1", } store1 := Dig2PvtRWSetWithConfig{ privdatacommon.DigKey{ TxId: "txID1", Collection: "col2", Namespace: "ns1", }: p2TransientStore, } p2.PrivateDataRetriever.(*dataRetrieverMock).On("CollectionRWSet", mock.MatchedBy(protoMatcher(dig1)), mock.Anything).Return(store1, true, nil) p3TransientStore := &util.PrivateRWSetWithConfig{ RWSet: newPRWSet(), CollectionConfig: &peer.CollectionConfig{ Payload: &peer.CollectionConfig_StaticCollectionConfig{ StaticCollectionConfig: &peer.StaticCollectionConfig{ Name: "col3", }, }, }, } store2 := Dig2PvtRWSetWithConfig{ privdatacommon.DigKey{ TxId: "txID1", Collection: "col3", Namespace: "ns1", }: p3TransientStore, } policyStore = newCollectionStore().withPolicy("col3", uint64(100)).thatMapsTo("p1") factoryMock3 := &mocks.CollectionAccessFactory{} accessPolicyMock3 := &mocks.CollectionAccessPolicy{} Setup(accessPolicyMock3, 1, 2, func(data protoutil.SignedData) bool { return bytes.Equal(data.Identity, []byte("p1")) }, map[string]struct{}{"org1": {}, "org2": {}}, false) factoryMock3.On("AccessPolicy", mock.Anything, mock.Anything).Return(accessPolicyMock3, nil) p3 := gn.newPuller("p3", policyStore, factoryMock3) dig2 := &proto.PvtDataDigest{ TxId: "txID1", Collection: "col3", Namespace: "ns1", } p3.PrivateDataRetriever.(*dataRetrieverMock).On("CollectionRWSet", mock.MatchedBy(protoMatcher(dig2)), mock.Anything).Return(store2, true, nil) dasf := &digestsAndSourceFactory{} fetchedMessages, err := p1.fetch(dasf.mapDigest(toDigKey(dig1)).toSources().mapDigest(toDigKey(dig2)).toSources().create()) assert.NoError(t, err) rws1 := util.PrivateRWSet(fetchedMessages.AvailableElements[0].Payload[0]) rws2 := util.PrivateRWSet(fetchedMessages.AvailableElements[0].Payload[1]) rws3 := util.PrivateRWSet(fetchedMessages.AvailableElements[1].Payload[0]) rws4 := util.PrivateRWSet(fetchedMessages.AvailableElements[1].Payload[1]) fetched := []util.PrivateRWSet{rws1, rws2, rws3, rws4} assert.Contains(t, fetched, p2TransientStore.RWSet[0]) assert.Contains(t, fetched, p2TransientStore.RWSet[1]) assert.Contains(t, fetched, p3TransientStore.RWSet[0]) assert.Contains(t, fetched, p3TransientStore.RWSet[1]) }
explode_data.jsonl/53265
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1549 }
[ 2830, 3393, 36068, 261, 69123, 10197, 388, 69123, 52730, 1155, 353, 8840, 836, 8, 341, 197, 322, 58663, 25, 281, 16, 33045, 504, 281, 17, 323, 504, 281, 18, 198, 197, 322, 323, 1817, 702, 2155, 15302, 198, 3174, 77, 1669, 609, 70, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestListOfStringsInputWithSliceOfStrings(t *testing.T) { input := [][]string{ {"Column A", "Column B", "Column C"}, {"x","y","z"}, } config := DefaultConfig() output := FormatWithSliceOfStrings(input, config) expected := "Column A Column B Column C\n" expected += "x y z" if output != expected { t.Fatalf("\nexpected:\n%s\n\ngot:\n%s", expected, output) } }
explode_data.jsonl/10650
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 162 }
[ 2830, 3393, 64090, 20859, 2505, 2354, 33236, 2124, 20859, 1155, 353, 8840, 836, 8, 341, 22427, 1669, 52931, 917, 515, 197, 197, 4913, 2933, 362, 497, 330, 2933, 425, 497, 330, 2933, 356, 7115, 197, 197, 4913, 87, 2198, 88, 2198, 89, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestValidationInvalidEndorsing(t *testing.T) { theLedger := new(mockLedger) pm := &mocks.Mapper{} factory := &mocks.PluginFactory{} plugin := &mocks.Plugin{} factory.On("New").Return(plugin) plugin.On("Init", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil) plugin.On("Validate", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(errors.New("invalid tx")) pm.On("FactoryByName", vp.Name("vscc")).Return(factory) cryptoProvider, err := sw.NewDefaultSecurityLevelWithKeystore(sw.NewDummyKeyStore()) assert.NoError(t, err) validator := txvalidatorv14.NewTxValidator( "", semaphore.New(10), &mocktxvalidator.Support{LedgerVal: theLedger, ACVal: preV12Capabilities()}, pm, cryptoProvider, ) ccID := "mycc" tx := getEnv(ccID, nil, createRWset(t, ccID), t) theLedger.On("GetTransactionByID", mock.Anything).Return(&peer.ProcessedTransaction{}, ledger.NotFoundInIndexErr("")) cd := &ccp.ChaincodeData{ Name: ccID, Version: ccVersion, Vscc: "vscc", Policy: signedByAnyMember([]string{"SampleOrg"}), } cdbytes := protoutil.MarshalOrPanic(cd) queryExecutor := new(mockQueryExecutor) queryExecutor.On("GetState", "lscc", ccID).Return(cdbytes, nil) theLedger.On("NewQueryExecutor", mock.Anything).Return(queryExecutor, nil) b := &common.Block{ Data: &common.BlockData{Data: [][]byte{protoutil.MarshalOrPanic(tx)}}, Header: &common.BlockHeader{}, } // Keep default callback err = validator.Validate(b) // Restore default callback assert.NoError(t, err) assertInvalid(b, t, peer.TxValidationCode_ENDORSEMENT_POLICY_FAILURE) }
explode_data.jsonl/47821
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 620 }
[ 2830, 3393, 13799, 7928, 3727, 1087, 287, 1155, 353, 8840, 836, 8, 341, 32088, 60850, 1389, 1669, 501, 30389, 60850, 1389, 340, 86511, 1669, 609, 16712, 82, 71892, 16094, 1166, 2919, 1669, 609, 16712, 82, 64378, 4153, 16094, 197, 9138, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidatePersistentVolumeClaim(t *testing.T) { invalidClassName := "-invalid-" validClassName := "valid" validMode := core.PersistentVolumeFilesystem scenarios := map[string]struct { isExpectedFailure bool claim *core.PersistentVolumeClaim }{ "good-claim": { isExpectedFailure: false, claim: testVolumeClaim("foo", "ns", core.PersistentVolumeClaimSpec{ Selector: &metav1.LabelSelector{ MatchExpressions: []metav1.LabelSelectorRequirement{ { Key: "key2", Operator: "Exists", }, }, }, AccessModes: []core.PersistentVolumeAccessMode{ core.ReadWriteOnce, core.ReadOnlyMany, }, Resources: core.ResourceRequirements{ Requests: core.ResourceList{ core.ResourceName(core.ResourceStorage): resource.MustParse("10G"), }, }, StorageClassName: &validClassName, }), }, "invalid-claim-zero-capacity": { isExpectedFailure: true, claim: testVolumeClaim("foo", "ns", core.PersistentVolumeClaimSpec{ Selector: &metav1.LabelSelector{ MatchExpressions: []metav1.LabelSelectorRequirement{ { Key: "key2", Operator: "Exists", }, }, }, AccessModes: []core.PersistentVolumeAccessMode{ core.ReadWriteOnce, core.ReadOnlyMany, }, Resources: core.ResourceRequirements{ Requests: core.ResourceList{ core.ResourceName(core.ResourceStorage): resource.MustParse("0G"), }, }, StorageClassName: &validClassName, }), }, "invalid-label-selector": { isExpectedFailure: true, claim: testVolumeClaim("foo", "ns", core.PersistentVolumeClaimSpec{ Selector: &metav1.LabelSelector{ MatchExpressions: []metav1.LabelSelectorRequirement{ { Key: "key2", Operator: "InvalidOp", Values: []string{"value1", "value2"}, }, }, }, AccessModes: []core.PersistentVolumeAccessMode{ core.ReadWriteOnce, core.ReadOnlyMany, }, Resources: core.ResourceRequirements{ Requests: core.ResourceList{ core.ResourceName(core.ResourceStorage): resource.MustParse("10G"), }, }, }), }, "invalid-accessmode": { isExpectedFailure: true, claim: testVolumeClaim("foo", "ns", core.PersistentVolumeClaimSpec{ AccessModes: []core.PersistentVolumeAccessMode{"fakemode"}, Resources: core.ResourceRequirements{ Requests: core.ResourceList{ core.ResourceName(core.ResourceStorage): resource.MustParse("10G"), }, }, }), }, "missing-namespace": { isExpectedFailure: true, claim: testVolumeClaim("foo", "", core.PersistentVolumeClaimSpec{ AccessModes: []core.PersistentVolumeAccessMode{ core.ReadWriteOnce, core.ReadOnlyMany, }, Resources: core.ResourceRequirements{ Requests: core.ResourceList{ core.ResourceName(core.ResourceStorage): resource.MustParse("10G"), }, }, }), }, "no-access-modes": { isExpectedFailure: true, claim: testVolumeClaim("foo", "ns", core.PersistentVolumeClaimSpec{ Resources: core.ResourceRequirements{ Requests: core.ResourceList{ core.ResourceName(core.ResourceStorage): resource.MustParse("10G"), }, }, }), }, "no-resource-requests": { isExpectedFailure: true, claim: testVolumeClaim("foo", "ns", core.PersistentVolumeClaimSpec{ AccessModes: []core.PersistentVolumeAccessMode{ core.ReadWriteOnce, }, }), }, "invalid-resource-requests": { isExpectedFailure: true, claim: testVolumeClaim("foo", "ns", core.PersistentVolumeClaimSpec{ AccessModes: []core.PersistentVolumeAccessMode{ core.ReadWriteOnce, }, Resources: core.ResourceRequirements{ Requests: core.ResourceList{ core.ResourceName(core.ResourceMemory): resource.MustParse("10G"), }, }, }), }, "negative-storage-request": { isExpectedFailure: true, claim: testVolumeClaim("foo", "ns", core.PersistentVolumeClaimSpec{ Selector: &metav1.LabelSelector{ MatchExpressions: []metav1.LabelSelectorRequirement{ { Key: "key2", Operator: "Exists", }, }, }, AccessModes: []core.PersistentVolumeAccessMode{ core.ReadWriteOnce, core.ReadOnlyMany, }, Resources: core.ResourceRequirements{ Requests: core.ResourceList{ core.ResourceName(core.ResourceStorage): resource.MustParse("-10G"), }, }, }), }, "zero-storage-request": { isExpectedFailure: true, claim: testVolumeClaim("foo", "ns", core.PersistentVolumeClaimSpec{ Selector: &metav1.LabelSelector{ MatchExpressions: []metav1.LabelSelectorRequirement{ { Key: "key2", Operator: "Exists", }, }, }, AccessModes: []core.PersistentVolumeAccessMode{ core.ReadWriteOnce, core.ReadOnlyMany, }, Resources: core.ResourceRequirements{ Requests: core.ResourceList{ core.ResourceName(core.ResourceStorage): resource.MustParse("0G"), }, }, }), }, "invalid-storage-class-name": { isExpectedFailure: true, claim: testVolumeClaim("foo", "ns", core.PersistentVolumeClaimSpec{ Selector: &metav1.LabelSelector{ MatchExpressions: []metav1.LabelSelectorRequirement{ { Key: "key2", Operator: "Exists", }, }, }, AccessModes: []core.PersistentVolumeAccessMode{ core.ReadWriteOnce, core.ReadOnlyMany, }, Resources: core.ResourceRequirements{ Requests: core.ResourceList{ core.ResourceName(core.ResourceStorage): resource.MustParse("10G"), }, }, StorageClassName: &invalidClassName, }), }, // VolumeMode alpha feature disabled // TODO: remove when no longer alpha "disabled alpha valid volume mode": { isExpectedFailure: true, claim: testVolumeClaim("foo", "ns", core.PersistentVolumeClaimSpec{ Selector: &metav1.LabelSelector{ MatchExpressions: []metav1.LabelSelectorRequirement{ { Key: "key2", Operator: "Exists", }, }, }, AccessModes: []core.PersistentVolumeAccessMode{ core.ReadWriteOnce, core.ReadOnlyMany, }, Resources: core.ResourceRequirements{ Requests: core.ResourceList{ core.ResourceName(core.ResourceStorage): resource.MustParse("10G"), }, }, StorageClassName: &validClassName, VolumeMode: &validMode, }), }, } for name, scenario := range scenarios { errs := ValidatePersistentVolumeClaim(scenario.claim) if len(errs) == 0 && scenario.isExpectedFailure { t.Errorf("Unexpected success for scenario: %s", name) } if len(errs) > 0 && !scenario.isExpectedFailure { t.Errorf("Unexpected failure for scenario: %s - %+v", name, errs) } } }
explode_data.jsonl/27056
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2911 }
[ 2830, 3393, 17926, 53194, 18902, 45544, 1155, 353, 8840, 836, 8, 341, 197, 11808, 14541, 1669, 6523, 11808, 12, 698, 56322, 14541, 1669, 330, 1891, 698, 56322, 3636, 1669, 6200, 61655, 18902, 1703, 8948, 198, 29928, 60494, 1669, 2415, 140...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestMuxResponse(t *testing.T) { testCases := map[string]struct { muxResponse ios.MuxResponse successful bool }{ "successful response": {ios.MuxResponse{MessageType: "random", Number: 0}, true}, "unsuccessful response": {ios.MuxResponse{MessageType: "random", Number: 1}, false}, } for _, tc := range testCases { bytes := []byte(ios.ToPlist(tc.muxResponse)) actual := ios.MuxResponsefromBytes(bytes) assert.Equal(t, tc.muxResponse, actual) assert.Equal(t, tc.successful, actual.IsSuccessFull()) } }
explode_data.jsonl/5363
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 196 }
[ 2830, 3393, 44, 2200, 2582, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 2415, 14032, 60, 1235, 341, 197, 2109, 2200, 2582, 27531, 1321, 2200, 2582, 198, 197, 30553, 1262, 220, 1807, 198, 197, 59403, 197, 197, 1, 30950, 2033, 788...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetShmSizeBindMounted(t *testing.T) { if os.Geteuid() != 0 { t.Skip("Test disabled as requires root privileges") } dir, err := ioutil.TempDir("", "") assert.Nil(t, err) defer os.RemoveAll(dir) shmPath := filepath.Join(dir, "shm") err = os.Mkdir(shmPath, 0700) assert.Nil(t, err) size := 8192 if runtime.GOARCH == "ppc64le" { // PAGE_SIZE on ppc64le is 65536 size = 65536 } shmOptions := "mode=1777,size=" + strconv.Itoa(size) err = unix.Mount("shm", shmPath, "tmpfs", unix.MS_NOEXEC|unix.MS_NOSUID|unix.MS_NODEV, shmOptions) assert.Nil(t, err) defer unix.Unmount(shmPath, 0) containerConfig := vc.ContainerConfig{ Mounts: []vc.Mount{ { Source: shmPath, Destination: "/dev/shm", Type: "bind", Options: nil, }, }, } shmSize, err := getShmSize(containerConfig) assert.Nil(t, err) assert.Equal(t, shmSize, uint64(size)) }
explode_data.jsonl/44052
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 422 }
[ 2830, 3393, 1949, 2016, 76, 1695, 9950, 90789, 1155, 353, 8840, 836, 8, 341, 743, 2643, 2234, 68, 2423, 368, 961, 220, 15, 341, 197, 3244, 57776, 445, 2271, 8386, 438, 7460, 3704, 35732, 1138, 197, 630, 48532, 11, 1848, 1669, 43144, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTracingSpanName(t *testing.T) { traceContent := fmt.Sprintf("%x", md5.New().Sum([]byte(time.Now().String()))) s := startTestServer(nil, 0, func(r *http.Request) { th, ok := r.Header[traceHeader] if !ok { t.Errorf("missing %s request header", traceHeader) } else { if th[0] != traceContent { t.Errorf("wrong X-Trace-Header content: %s", th[0]) } } }) defer s.Close() u, _ := url.ParseRequestURI("https://www.example.org/hello") r := &http.Request{ URL: u, Method: "GET", Header: make(http.Header), } w := httptest.NewRecorder() doc := fmt.Sprintf(`hello: Path("/hello") -> tracingSpanName("test-span") -> "%s"`, s.URL) tracer := &tracingtest.Tracer{TraceContent: traceContent} params := Params{ OpenTracing: &OpenTracingParams{ Tracer: tracer, }, Flags: FlagsNone, } tp, err := newTestProxyWithParams(doc, params) if err != nil { t.Fatal(err) } defer tp.close() tp.proxy.ServeHTTP(w, r) if _, ok := tracer.FindSpan("test-span"); !ok { t.Error("setting the span name failed") } }
explode_data.jsonl/50647
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 447 }
[ 2830, 3393, 1282, 4527, 12485, 675, 1155, 353, 8840, 836, 8, 341, 65058, 2762, 1669, 8879, 17305, 4430, 87, 497, 10688, 20, 7121, 1005, 9190, 10556, 3782, 9730, 13244, 1005, 703, 60131, 1903, 1669, 1191, 2271, 5475, 27907, 11, 220, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRepositoryComponents(t *testing.T) { t.Parallel() testRegistry := "gcr.io" testRepository := "project-id/image" repositoryNameStr := testRegistry + "/" + testRepository repository, err := NewRepository(repositoryNameStr, StrictValidation) if err != nil { t.Fatalf("`%s` should be a valid Repository name, got error: %v", repositoryNameStr, err) } actualRegistry := repository.RegistryStr() if actualRegistry != testRegistry { t.Errorf("RegistryStr() was incorrect for %v. Wanted: `%s` Got: `%s`", repository, testRegistry, actualRegistry) } actualRepository := repository.RepositoryStr() if actualRepository != testRepository { t.Errorf("RepositoryStr() was incorrect for %v. Wanted: `%s` Got: `%s`", repository, testRepository, actualRepository) } }
explode_data.jsonl/39008
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 265 }
[ 2830, 3393, 4624, 10443, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 18185, 15603, 1669, 330, 70, 5082, 4245, 698, 18185, 4624, 1669, 330, 4987, 12897, 23349, 1837, 17200, 3099, 675, 2580, 1669, 1273, 15603, 488, 16891, 488, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestValidateNamespace(t *testing.T) { validLabels := map[string]string{"a": "b"} invalidLabels := map[string]string{"NoUppercaseOrSpecialCharsLike=Equals": "b"} successCases := []core.Namespace{ { ObjectMeta: metav1.ObjectMeta{Name: "abc", Labels: validLabels}, }, { ObjectMeta: metav1.ObjectMeta{Name: "abc-123"}, Spec: core.NamespaceSpec{ Finalizers: []core.FinalizerName{"example.com/something", "example.com/other"}, }, }, } for _, successCase := range successCases { if errs := ValidateNamespace(&successCase); len(errs) != 0 { t.Errorf("expected success: %v", errs) } } errorCases := map[string]struct { R core.Namespace D string }{ "zero-length name": { core.Namespace{ObjectMeta: metav1.ObjectMeta{Name: ""}}, "", }, "defined-namespace": { core.Namespace{ObjectMeta: metav1.ObjectMeta{Name: "abc-123", Namespace: "makesnosense"}}, "", }, "invalid-labels": { core.Namespace{ObjectMeta: metav1.ObjectMeta{Name: "abc", Labels: invalidLabels}}, "", }, } for k, v := range errorCases { errs := ValidateNamespace(&v.R) if len(errs) == 0 { t.Errorf("expected failure for %s", k) } } }
explode_data.jsonl/1047
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 492 }
[ 2830, 3393, 17926, 22699, 1155, 353, 8840, 836, 8, 341, 56322, 23674, 1669, 2415, 14032, 30953, 4913, 64, 788, 330, 65, 16707, 197, 11808, 23674, 1669, 2415, 14032, 30953, 4913, 2753, 52, 602, 2962, 519, 2195, 20366, 32516, 12949, 28, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGetOwners(t *testing.T) { owners := []meta.ShardOwner{ {NodeID: 1}, {NodeID: 2}, {NodeID: 3}, {NodeID: 4}, {NodeID: 5}, {NodeID: 6}, } ori_len := len(owners) assert.Equal(t, ori_len, len(getOwners(nil, owners))) assert.Equal(t, ori_len, len(getOwners(map[uint64]bool{}, owners))) assert.Equal(t, ori_len, len(getOwners(map[uint64]bool{ 0: true, }, owners))) assert.Equal(t, ori_len-1, len(getOwners(map[uint64]bool{ 0: true, 1: true, }, owners))) assert.Equal(t, ori_len-1, len(getOwners(map[uint64]bool{ 0: true, 4: true, }, owners))) assert.Equal(t, ori_len-2, len(getOwners(map[uint64]bool{ 0: true, 2: true, 4: true, }, owners))) assert.Equal(t, 0, len(getOwners(map[uint64]bool{ 0: true, 1: true, 2: true, 3: true, 4: true, 5: true, 6: true, }, owners))) }
explode_data.jsonl/43795
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 409 }
[ 2830, 3393, 1949, 57762, 1155, 353, 8840, 836, 8, 341, 197, 22980, 1669, 3056, 5490, 10849, 567, 13801, 515, 197, 197, 90, 1955, 915, 25, 220, 16, 1583, 197, 197, 90, 1955, 915, 25, 220, 17, 1583, 197, 197, 90, 1955, 915, 25, 220,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWorkerReady(t *testing.T) { f := newFixture(t) startTime := metav1.Now() completionTime := metav1.Now() mpiJob := newMPIJob("test", int32Ptr(16), &startTime, &completionTime) f.setUpMPIJob(mpiJob) f.setUpConfigMap(newConfigMap(mpiJob, 2, 8)) f.setUpRbac(mpiJob, 2) worker := newWorker(mpiJob, 2, 8, gpuResourceName, false) worker.Status.ReadyReplicas = 2 f.setUpWorker(worker) expLauncher := newLauncher(mpiJob, "kubectl-delivery") f.expectCreateJobAction(expLauncher) mpiJobCopy := mpiJob.DeepCopy() mpiJobCopy.Status.WorkerReplicas = 2 setUpMPIJobTimestamp(mpiJobCopy, &startTime, &completionTime) f.expectUpdateMPIJobStatusAction(mpiJobCopy) f.run(getKey(mpiJob, t), gpuResourceName) }
explode_data.jsonl/75017
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 300 }
[ 2830, 3393, 21936, 19202, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 18930, 1155, 340, 21375, 1462, 1669, 77520, 16, 13244, 741, 32810, 14386, 1462, 1669, 77520, 16, 13244, 2822, 197, 39479, 12245, 1669, 501, 56369, 12245, 445, 1944, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestShouldRaiseWhenCredentialsAreNotInBase64(t *testing.T) { _, _, err := parseBasicAuth(ProxyAuthorizationHeader, "Basic alzefzlfzemjfej==") assert.Error(t, err) assert.Equal(t, "illegal base64 data at input byte 16", err.Error()) }
explode_data.jsonl/20179
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 14996, 93101, 4498, 27025, 11526, 2623, 641, 3978, 21, 19, 1155, 353, 8840, 836, 8, 341, 197, 6878, 8358, 1848, 1669, 4715, 15944, 5087, 7, 16219, 18124, 4047, 11, 330, 15944, 452, 89, 823, 89, 11008, 94133, 73, 1859, 73, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRouter_Any_ReturnsErrorIfInvalidRoute(t *testing.T) { path := "/path1{" router := Router{} err := router.Any(path, testHandlerFunc) assertNotNil(t, err) }
explode_data.jsonl/31724
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 64 }
[ 2830, 3393, 9523, 1566, 3834, 53316, 82, 1454, 2679, 7928, 4899, 1155, 353, 8840, 836, 8, 341, 26781, 1669, 3521, 2343, 16, 90, 1837, 67009, 1669, 10554, 16094, 9859, 1669, 9273, 13311, 5581, 11, 1273, 3050, 9626, 340, 6948, 96144, 1155...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func Test_entryOu_storeLocked(t *testing.T) { t.Parallel() type args struct { i *string } type fields struct { p unsafe.Pointer } type want struct{} type test struct { name string args args fields fields want want checkFunc func(want) error beforeFunc func(args) afterFunc func(args) } defaultCheckFunc := func(w want) error { return nil } tests := []test{ // TODO test cases /* { name: "test_case_1", args: args { i: nil, }, fields: fields { p: nil, }, want: want{}, checkFunc: defaultCheckFunc, }, */ // TODO test cases /* func() test { return test { name: "test_case_2", args: args { i: nil, }, fields: fields { p: nil, }, want: want{}, checkFunc: defaultCheckFunc, } }(), */ } for _, tc := range tests { test := tc t.Run(test.name, func(tt *testing.T) { tt.Parallel() defer goleak.VerifyNone(tt, goleak.IgnoreCurrent()) if test.beforeFunc != nil { test.beforeFunc(test.args) } if test.afterFunc != nil { defer test.afterFunc(test.args) } if test.checkFunc == nil { test.checkFunc = defaultCheckFunc } e := &entryOu{ p: test.fields.p, } e.storeLocked(test.args.i) if err := test.checkFunc(test.want); err != nil { tt.Errorf("error = %v", err) } }) } }
explode_data.jsonl/61540
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 817 }
[ 2830, 3393, 9078, 46, 84, 14809, 49010, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 13158, 2827, 2036, 341, 197, 8230, 353, 917, 198, 197, 532, 13158, 5043, 2036, 341, 197, 3223, 19860, 41275, 198, 197, 532, 13158, 1366, 203...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNegate(t *testing.T) { tests := []struct { in string //十六进制编码值 expected string // }{ //secp256k1 prime(又名0) {"0", "0"}, {"fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f", "0"}, {"0", "fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f"}, //secp256k1主-1 {"fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2e", "1"}, {"1", "fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2e"}, //SECP256K1主-2 {"2", "fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2d"}, {"fffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2d", "2"}, //随机抽样 { "b3d9aac9c5e43910b4385b53c7e78c21d4cd5f8e683c633aed04c233efc2e120", "4c2655363a1bc6ef4bc7a4ac381873de2b32a07197c39cc512fb3dcb103d1b0f", }, { "f8a85984fee5a12a7c8dd08830d83423c937d77c379e4a958e447a25f407733f", "757a67b011a5ed583722f77cf27cbdc36c82883c861b56a71bb85d90bf888f0", }, { "45ee6142a7fda884211e93352ed6cb2807800e419533be723a9548823ece8312", "ba119ebd5802577bdee16ccad12934d7f87ff1be6acc418dc56ab77cc131791d", }, { "53c2a668f07e411a2e473e1c3b6dcb495dec1227af27673761d44afe5b43d22b", "ac3d59970f81bee5d1b8c1e3c49234b6a213edd850d898c89e2bb500a4bc2a04", }, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { f := new(fieldVal).SetHex(test.in).Normalize() expected := new(fieldVal).SetHex(test.expected).Normalize() result := f.Negate(1).Normalize() if !result.Equals(expected) { t.Errorf("fieldVal.Negate #%d wrong result\n"+ "got: %v\nwant: %v", i, result, expected) continue } } }
explode_data.jsonl/362
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 827 }
[ 2830, 3393, 47800, 349, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 258, 981, 914, 442, 102853, 41299, 43316, 112950, 25511, 198, 7325, 914, 6475, 197, 59403, 322, 325, 4672, 17, 20, 21, 74, 16, 10250, 9909, 99518, 130...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestExtensionsList(t *testing.T) { client, err := clients.NewComputeV2Client() th.AssertNoErr(t, err) allPages, err := extensions.List(client).AllPages() th.AssertNoErr(t, err) allExtensions, err := extensions.ExtractExtensions(allPages) th.AssertNoErr(t, err) var found bool for _, extension := range allExtensions { tools.PrintResource(t, extension) if extension.Name == "SchedulerHints" { found = true } } th.AssertEquals(t, found, true) }
explode_data.jsonl/15308
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 180 }
[ 2830, 3393, 31282, 852, 1155, 353, 8840, 836, 8, 341, 25291, 11, 1848, 1669, 8239, 7121, 46254, 53, 17, 2959, 741, 70479, 11711, 2753, 7747, 1155, 11, 1848, 692, 50960, 17713, 11, 1848, 1669, 19721, 5814, 12805, 568, 2403, 17713, 741, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPatchByFullSymbolName(t *testing.T) { Convey("[TestPatchByFullSymbolName]", t, func() { patchGuard := PatchByFullSymbolName("github.com/cch123/supermonkey.hey", func() string { return "ok" }) So(hey(), ShouldEqual, "ok") patchGuard.Unpatch() }) }
explode_data.jsonl/31446
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 106 }
[ 2830, 3393, 43622, 1359, 9432, 15090, 675, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 10937, 2271, 43622, 1359, 9432, 15090, 675, 19076, 259, 11, 2915, 368, 341, 197, 3223, 754, 20806, 1669, 30412, 1359, 9432, 15090, 675, 445, 5204, 905...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFormatWithdrawPermissions(t *testing.T) { b.SetDefaults() expected := exchange.NoAPIWithdrawalMethodsText actual := b.FormatWithdrawPermissions() if actual != expected { t.Errorf("Expected: %s, Received: %s", expected, actual) } }
explode_data.jsonl/14541
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 83 }
[ 2830, 3393, 4061, 92261, 23851, 1155, 353, 8840, 836, 8, 341, 2233, 4202, 16273, 741, 42400, 1669, 9289, 16766, 7082, 92261, 278, 17856, 1178, 198, 88814, 1669, 293, 9978, 92261, 23851, 741, 743, 5042, 961, 3601, 341, 197, 3244, 13080, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestCheckMatchErrors(t *testing.T) { tests := []struct { note string query string }{ {"null", "null = true"}, {"boolean", "true = null"}, {"number", "1 = null"}, {"string", `"hello" = null`}, {"array", "[1,2,3] = null"}, {"array-nested", `[1,2,3] = [1,2,"3"]`}, {"array-nested-2", `[1,2] = [1,2,3]`}, {"array-dynamic", `[ true | true ] = [x | a = [1, "foo"]; x = a[_]]`}, {"object", `{"a": 1, "b": 2} = null`}, {"object-nested", `{"a": 1, "b": "2"} = {"a": 1, "b": 2}`}, {"object-nested-2", `{"a": 1} = {"a": 1, "b": "2"}`}, {"set", "{1,2,3} = null"}, {"any", `x = ["str", 1]; x[_] = null`}, } for _, tc := range tests { t.Run(tc.note, func(t *testing.T) { body := MustParseBody(tc.query) checker := newTypeChecker() _, err := checker.CheckBody(nil, body) if len(err) != 1 { t.Fatalf("Expected exactly one error from %v, but got:\n%v", body, err) } }) } }
explode_data.jsonl/14562
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 445 }
[ 2830, 3393, 3973, 8331, 13877, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 9038, 1272, 220, 914, 198, 197, 27274, 914, 198, 197, 59403, 197, 197, 4913, 2921, 497, 330, 2921, 284, 830, 7115, 197, 197, 4913, 6117, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPulsarProducer(t *testing.T) { pulsarAddress, _ := Params.Load("_PulsarAddress") pc, err := GetPulsarClientInstance(pulsar.ClientOptions{URL: pulsarAddress}) defer pc.Close() assert.NoError(t, err) assert.NotNil(t, pc) topic := "TEST" producer, err := pc.CreateProducer(ProducerOptions{Topic: topic}) assert.Nil(t, err) assert.NotNil(t, producer) pulsarProd := producer.(*pulsarProducer) assert.Equal(t, pulsarProd.Topic(), topic) msg := &ProducerMessage{ Payload: []byte{}, Properties: map[string]string{}, } _, err = producer.Send(context.TODO(), msg) assert.Nil(t, err) pulsarProd.Close() }
explode_data.jsonl/67683
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 258 }
[ 2830, 3393, 47, 14295, 277, 45008, 1155, 353, 8840, 836, 8, 341, 3223, 14295, 277, 4286, 11, 716, 1669, 34352, 13969, 16975, 47, 14295, 277, 4286, 1138, 82013, 11, 1848, 1669, 2126, 47, 14295, 277, 2959, 2523, 1295, 14295, 277, 11716, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGoRun(t *testing.T) { e := newTestGo() s, _, _ := stressClient.NewTestStressTest() e.Run(s) if e == nil { t.Fail() } }
explode_data.jsonl/43288
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 64 }
[ 2830, 3393, 10850, 6727, 1155, 353, 8840, 836, 8, 341, 7727, 1669, 501, 2271, 10850, 741, 1903, 11, 8358, 716, 1669, 8464, 2959, 7121, 2271, 623, 673, 2271, 741, 7727, 16708, 1141, 340, 743, 384, 621, 2092, 341, 197, 3244, 57243, 741,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestDeleteOrganization(t *testing.T) { assert.NoError(t, PrepareTestDatabase()) org := AssertExistsAndLoadBean(t, &User{ID: 6}).(*User) assert.NoError(t, DeleteOrganization(org)) AssertNotExistsBean(t, &User{ID: 6}) AssertNotExistsBean(t, &OrgUser{OrgID: 6}) AssertNotExistsBean(t, &Team{OrgID: 6}) org = AssertExistsAndLoadBean(t, &User{ID: 3}).(*User) err := DeleteOrganization(org) assert.Error(t, err) assert.True(t, IsErrUserOwnRepos(err)) user := AssertExistsAndLoadBean(t, &User{ID: 5}).(*User) assert.Error(t, DeleteOrganization(user)) CheckConsistencyFor(t, &User{}, &Team{}) }
explode_data.jsonl/71060
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 251 }
[ 2830, 3393, 6435, 41574, 1155, 353, 8840, 836, 8, 341, 6948, 35699, 1155, 11, 31166, 2271, 5988, 2398, 87625, 1669, 5319, 15575, 3036, 5879, 10437, 1155, 11, 609, 1474, 90, 915, 25, 220, 21, 16630, 4071, 1474, 340, 6948, 35699, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1