text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestValidFunc(t *testing.T) { var err error var conv = &KConv method := KDbug.GetMethod(conv, "BaseConvert") //不存在的方法 _, _, err = ValidFunc("test", "echo") if err == nil { t.Error("ValidFunc fail") return } //参数数量不足 _, _, err = ValidFunc(method, "12345") if err == nil { t.Error("ValidFunc fail") return } //参数类型错误 _, _, err = ValidFunc(method, 0, "12345", "10", 16) if err == nil { t.Error("ValidFunc fail") return } }
explode_data.jsonl/74247
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 229 }
[ 2830, 3393, 4088, 9626, 1155, 353, 8840, 836, 8, 341, 2405, 1848, 1465, 198, 2405, 5686, 284, 609, 42, 34892, 198, 42257, 1669, 62990, 2313, 2234, 3523, 54995, 11, 330, 3978, 12012, 5130, 197, 322, 69184, 104339, 198, 197, 6878, 8358, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestOpenRetries(t *testing.T) { // Can't force a permission denied error if run as root. u, err := user.Current() if err != nil { t.Skip(fmt.Sprintf("Couldn't determine current user id: %s", err)) } if u.Uid == "0" { t.Skip("Skipping test when run as root") } tmpDir, rmTmpDir := testutil.TestTempDir(t) defer rmTmpDir() logfile := filepath.Join(tmpDir, "log") if _, err := os.OpenFile(logfile, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0); err != nil { t.Fatal(err) } if _, err := NewFile(logfile, nil, false, nil); err == nil || !os.IsPermission(err) { t.Fatalf("Expected a permission denied error here: %s", err) } }
explode_data.jsonl/76854
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 264 }
[ 2830, 3393, 5002, 12020, 4019, 1155, 353, 8840, 836, 8, 341, 197, 322, 2980, 944, 5344, 264, 7882, 14820, 1465, 421, 1598, 438, 3704, 624, 10676, 11, 1848, 1669, 1196, 11517, 741, 743, 1848, 961, 2092, 341, 197, 3244, 57776, 28197, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestParseOsdDump(t *testing.T) { c := &Ceph{Log: testutil.Logger{}} dump, err := c.parseDump(osdPerfDump) require.NoError(t, err) require.InEpsilon(t, 552132.109360000, dump["filestore"]["commitcycle_interval.sum"], epsilon) require.Equal(t, float64(0), dump["mutex-FileJournal::finisher_lock"]["wait.avgcount"]) }
explode_data.jsonl/40221
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 140 }
[ 2830, 3393, 14463, 46, 13446, 51056, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 609, 34, 23544, 90, 2201, 25, 1273, 1314, 12750, 6257, 532, 2698, 1510, 11, 1848, 1669, 272, 4632, 51056, 9638, 67, 3889, 69, 51056, 340, 17957, 35699, 115...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetFromMap(t *testing.T) { tests := []struct { name string setMap map[string]bool wantStr string }{ { name: "set multiple features", setMap: map[string]bool{ "a": true, "b": false, }, wantStr: "a=true,b=false", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { gates := NewFeatureGate() gates.SetFromMap(tt.setMap) got := gates.String() if got != tt.wantStr { t.Errorf("want: %s, got %s", tt.wantStr, got) } }) } }
explode_data.jsonl/34558
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 246 }
[ 2830, 3393, 1649, 3830, 2227, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 8196, 2227, 220, 2415, 14032, 96436, 198, 197, 50780, 2580, 914, 198, 197, 59403, 197, 197, 515, 298, 11609, 25,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestParse(t *testing.T) { type args struct { s string } tests := []struct { name string args args want *Traceparent wantErr bool }{ {"ok", args{"01-00010203040506070809000102030405-0607080900010203-00"}, &Traceparent{ version: TraceVersion, traceID: [16]byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5}, parentID: [8]byte{6, 7, 8, 9, 0, 1, 2, 3}, traceFlags: 0, }, false}, {"ok", args{"01-00010203040506070809000102030405-0607080900010203-01"}, &Traceparent{ version: TraceVersion, traceID: [16]byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5}, parentID: [8]byte{6, 7, 8, 9, 0, 1, 2, 3}, traceFlags: FlagSampled, }, false}, {"fail", args{""}, nil, true}, {"fail", args{"1-00010203040506070809000102030405-0607080900010203-01"}, nil, true}, {"fail", args{"01-0010203040506070809000102030405-0607080900010203-01"}, nil, true}, {"fail", args{"01-00010203040506070809000102030405-607080900010203-01"}, nil, true}, {"fail", args{"01-00010203040506070809000102030405-0607080900010203-1"}, nil, true}, {"fail", args{"0x-00010203040506070809000102030405-0607080900010203-01"}, nil, true}, {"fail", args{"01-0001020304050607080900010203040x-0607080900010203-01"}, nil, true}, {"fail", args{"01-00010203040506070809000102030405-060708090001020x-01"}, nil, true}, {"fail", args{"01-00010203040506070809000102030405-0607080900010203-0x"}, nil, true}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := Parse(tt.args.s) if (err != nil) != tt.wantErr { t.Errorf("Parse() error = %v, wantErr %v", err, tt.wantErr) return } if !reflect.DeepEqual(got, tt.want) { t.Errorf("Parse() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/35200
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 847 }
[ 2830, 3393, 14463, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 1903, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 31215, 262, 2827, 198, 197, 50780, 262, 353, 6550, 3765, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAppCreation(t *testing.T) { ctx := Given(t) ctx. Path(guestbookPath). When(). Create(). Then(). Expect(SyncStatusIs(SyncStatusCodeOutOfSync)). And(func(app *Application) { assert.Equal(t, Name(), app.Name) assert.Equal(t, RepoURL(RepoURLTypeFile), app.Spec.Source.RepoURL) assert.Equal(t, guestbookPath, app.Spec.Source.Path) assert.Equal(t, DeploymentNamespace(), app.Spec.Destination.Namespace) assert.Equal(t, common.KubernetesInternalAPIServerAddr, app.Spec.Destination.Server) }). Expect(Event(EventReasonResourceCreated, "create")). And(func(_ *Application) { // app should be listed output, err := RunCli("app", "list") assert.NoError(t, err) assert.Contains(t, output, Name()) }). When(). // ensure that create is idempotent Create(). Then(). Given(). Revision("master"). When(). // ensure that update replaces spec and merge labels and annotations And(func() { FailOnErr(AppClientset.ArgoprojV1alpha1().Applications(ArgoCDNamespace).Patch(context.Background(), ctx.GetName(), types.MergePatchType, []byte(`{"metadata": {"labels": { "test": "label" }, "annotations": { "test": "annotation" }}}`), metav1.PatchOptions{})) }). Create("--upsert"). Then(). And(func(app *Application) { assert.Equal(t, "label", app.Labels["test"]) assert.Equal(t, "annotation", app.Annotations["test"]) assert.Equal(t, "master", app.Spec.Source.TargetRevision) }) }
explode_data.jsonl/66666
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 572 }
[ 2830, 3393, 2164, 32701, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 16246, 1155, 692, 20985, 624, 197, 69640, 3268, 3045, 2190, 1820, 4292, 197, 197, 4498, 25829, 197, 75569, 25829, 197, 197, 12209, 25829, 197, 35911, 3759, 1721, 2522, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTxnEventMarshalWithApdex(t *testing.T) { e := sampleTxnEvent e.Zone = ApdexFailing testTxnEventJSON(t, &e, `[ { "type":"Transaction", "name":"myName", "timestamp":1.488393111e+09, "nr.apdexPerfZone":"F", "error":false, "duration":2, "totalTime":3, "guid":"txn-id", "traceId":"txn-id", "priority":0.500000, "sampled":false }, {}, {}]`) }
explode_data.jsonl/37458
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 188 }
[ 2830, 3393, 31584, 77, 1556, 55438, 2354, 10611, 91769, 1155, 353, 8840, 836, 8, 341, 7727, 1669, 6077, 31584, 77, 1556, 198, 7727, 13476, 603, 284, 5232, 91769, 37, 14277, 198, 18185, 31584, 77, 1556, 5370, 1155, 11, 609, 68, 11, 156...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLatestDockerVersionPushed(t *testing.T) { if testing.Short() { t.Skip("Skipping due to network request against dockerhub") } url := fmt.Sprintf("https://index.docker.io/v1/repositories/sourcegraph/server/tags/%s", latestReleaseDockerServerImageBuild.Version) resp, err := http.Get(url) if err != nil { t.Skip("Failed to contact dockerhub", err) } if resp.StatusCode == 404 { t.Fatalf("sourcegraph/server:%s does not exist on dockerhub. %s", latestReleaseDockerServerImageBuild.Version, url) } if resp.StatusCode != 200 { t.Skip("unexpected response from dockerhub", resp.StatusCode) } }
explode_data.jsonl/3330
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 212 }
[ 2830, 3393, 31992, 35, 13659, 5637, 16644, 291, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 445, 85945, 4152, 311, 3922, 1681, 2348, 26588, 26682, 1138, 197, 630, 19320, 1669, 8879, 17305, 445, 2428, 1110...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestCertTypes(t *testing.T) { for keyType, key := range testPublicKeys { cert := &ssh.Certificate{ ValidPrincipals: []string{"gopher1"}, ValidAfter: 0, ValidBefore: ssh.CertTimeInfinity, Key: key, Serial: 1, CertType: ssh.UserCert, SignatureKey: testPublicKeys["rsa"], Permissions: ssh.Permissions{ CriticalOptions: map[string]string{}, Extensions: map[string]string{}, }, } if err := cert.SignCert(rand.Reader, testSigners["rsa"]); err != nil { t.Fatalf("signcert: %v", err) } if err := addCertToAgent(testPrivateKeys[keyType], cert); err != nil { t.Fatalf("%v", err) } if err := addCertToAgentSock(testPrivateKeys[keyType], cert); err != nil { t.Fatalf("%v", err) } } }
explode_data.jsonl/68411
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 353 }
[ 2830, 3393, 36934, 4173, 1155, 353, 8840, 836, 8, 341, 2023, 1376, 929, 11, 1376, 1669, 2088, 1273, 12676, 8850, 341, 197, 1444, 529, 1669, 609, 25537, 727, 20962, 515, 298, 197, 4088, 3533, 5706, 1127, 25, 3056, 917, 4913, 70, 16940,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestBoolInt_UnmarshalJSON_GotInt(t *testing.T) { var sampleOut struct { Val BoolInt `json:"val"` } var sampleIn = []byte(`{"val":123456789}`) json.Unmarshal(sampleIn, &sampleOut) if sampleOut.Val.Flag { t.Error("should be false but got true") } if sampleOut.Val.Value == nil { t.Fatalf("should be 123456789 but got nil") } if *sampleOut.Val.Value != 123456789 { t.Errorf("should be 123456789 but got %v", sampleOut.Val.Value) } }
explode_data.jsonl/31042
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 188 }
[ 2830, 3393, 11233, 1072, 40687, 27121, 5370, 2646, 354, 1072, 1155, 353, 8840, 836, 8, 341, 2405, 6077, 2662, 2036, 341, 197, 197, 2208, 12608, 1072, 1565, 2236, 2974, 831, 8805, 197, 532, 2405, 6077, 641, 284, 3056, 3782, 5809, 4913, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestStatementMode(t *testing.T) { if testing.Short() { t.Skip() } execStatements(t, []string{ "create table stream1(id int, val varbinary(128), primary key(id))", "create table stream2(id int, val varbinary(128), primary key(id))", }) engine.se.Reload(context.Background()) defer execStatements(t, []string{ "drop table stream1", "drop table stream2", }) testcases := []testcase{{ input: []string{ "set @@session.binlog_format='STATEMENT'", "begin", "insert into stream1 values (1, 'aaa')", "update stream1 set val='bbb' where id = 1", "delete from stream1 where id = 1", "commit", "set @@session.binlog_format='ROW'", }, output: [][]string{{ `begin`, `type:INSERT dml:"insert into stream1 values (1, 'aaa')"`, `type:UPDATE dml:"update stream1 set val='bbb' where id = 1"`, `type:DELETE dml:"delete from stream1 where id = 1"`, `gtid`, `commit`, }}, }} runCases(t, nil, testcases, "", nil) }
explode_data.jsonl/10426
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 393 }
[ 2830, 3393, 8636, 3636, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 741, 197, 532, 67328, 93122, 1155, 11, 3056, 917, 515, 197, 197, 1, 3182, 1965, 4269, 16, 3724, 526, 11, 1044, 762, 25891, 7, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestBuildTiFlashSidecarContainers(t *testing.T) { g := NewGomegaWithT(t) type testcase struct { name string flashConfig *v1alpha1.TiFlashConfig expect []corev1.Container resource bool } tests := []*testcase{ { name: "nil config", flashConfig: nil, expect: defaultSideCarContainers, }, { name: "empty config", flashConfig: &v1alpha1.TiFlashConfig{}, expect: defaultSideCarContainers, }, { name: "custom config", flashConfig: &customTiFlashLogConfig, expect: customSideCarContainers, }, { name: "custom resource config", flashConfig: &customTiFlashLogConfig, expect: customResourceSideCarContainers, resource: true, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { tc := newTidbCluster() tc.Spec.TiFlash.Config = test.flashConfig if test.resource { tc.Spec.TiFlash.LogTailer = &v1alpha1.LogTailerSpec{} tc.Spec.TiFlash.LogTailer.ResourceRequirements = corev1.ResourceRequirements{ Requests: corev1.ResourceList{ corev1.ResourceCPU: resource.MustParse("1"), corev1.ResourceMemory: resource.MustParse("2Gi"), corev1.ResourceStorage: resource.MustParse("100Gi"), }, } } cs := buildTiFlashSidecarContainers(tc) g.Expect(cs).To(Equal(test.expect)) }) } }
explode_data.jsonl/76413
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 621 }
[ 2830, 3393, 11066, 45351, 23876, 16384, 6918, 74632, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1532, 38, 32696, 2354, 51, 1155, 692, 13158, 70080, 2036, 341, 197, 11609, 286, 914, 198, 197, 94170, 2648, 353, 85, 16, 7141, 16, 836, 72,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReadJSON(t *testing.T) { writer := iwriter.New(client, testDB) id1, err := idProvider.ID() require.Nil(t, err, fmt.Sprintf("got unexpected error: %s", err)) m := json.Message{ Channel: id1, Publisher: id1, Created: time.Now().UnixNano(), Subtopic: "subtopic/format/some_json", Protocol: "coap", Payload: map[string]interface{}{ "field_1": 123.0, "field_2": "value", "field_3": false, }, } messages1 := json.Messages{ Format: format1, } msgs1 := []map[string]interface{}{} for i := 0; i < msgsNum; i++ { messages1.Data = append(messages1.Data, m) m := toMap(m) msgs1 = append(msgs1, m) } err = writer.Consume(messages1) assert.Nil(t, err, fmt.Sprintf("expected no error got %s\n", err)) id2, err := idProvider.ID() require.Nil(t, err, fmt.Sprintf("got unexpected error: %s", err)) m = json.Message{ Channel: id2, Publisher: id2, Created: time.Now().UnixNano() + msgsNum, Subtopic: "subtopic/other_format/some_other_json", Protocol: "udp", Payload: map[string]interface{}{ "field_pi": 3.14159265, }, } messages2 := json.Messages{ Format: format2, } msgs2 := []map[string]interface{}{} for i := 0; i < msgsNum; i++ { msg := m if i%2 == 0 { msg.Protocol = httpProt } messages2.Data = append(messages2.Data, msg) m := toMap(msg) msgs2 = append(msgs2, m) } err = writer.Consume(messages2) assert.Nil(t, err, fmt.Sprintf("expected no error got %s\n", err)) httpMsgs := []map[string]interface{}{} for i := 0; i < msgsNum; i += 2 { httpMsgs = append(httpMsgs, msgs2[i]) } reader := ireader.New(client, testDB) cases := map[string]struct { chanID string pageMeta readers.PageMetadata page readers.MessagesPage }{ "read message page for existing channel": { chanID: id1, pageMeta: readers.PageMetadata{ Format: messages1.Format, Offset: 0, Limit: 1, }, page: readers.MessagesPage{ Total: msgsNum, Messages: fromJSON(msgs1[:1]), }, }, "read message page for non-existent channel": { chanID: wrongID, pageMeta: readers.PageMetadata{ Format: messages1.Format, Offset: 0, Limit: 10, }, page: readers.MessagesPage{ Messages: []readers.Message{}, }, }, "read message last page": { chanID: id2, pageMeta: readers.PageMetadata{ Format: messages2.Format, Offset: msgsNum - 20, Limit: msgsNum, }, page: readers.MessagesPage{ Total: msgsNum, Messages: fromJSON(msgs2[msgsNum-20 : msgsNum]), }, }, "read message with protocol": { chanID: id2, pageMeta: readers.PageMetadata{ Format: messages2.Format, Offset: 0, Limit: uint64(msgsNum / 2), Protocol: httpProt, }, page: readers.MessagesPage{ Total: uint64(msgsNum / 2), Messages: fromJSON(httpMsgs), }, }, } for desc, tc := range cases { result, err := reader.ReadAll(tc.chanID, tc.pageMeta) for i := 0; i < len(result.Messages); i++ { m := result.Messages[i] // Remove time as it is not sent by the client. delete(m.(map[string]interface{}), "time") result.Messages[i] = m } assert.Nil(t, err, fmt.Sprintf("%s: expected no error got %s", desc, err)) assert.ElementsMatch(t, tc.page.Messages, result.Messages, fmt.Sprintf("%s: expected \n%v got \n%v", desc, tc.page.Messages, result.Messages)) assert.Equal(t, tc.page.Total, result.Total, fmt.Sprintf("%s: expected %v got %v", desc, tc.page.Total, result.Total)) } }
explode_data.jsonl/82480
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1536 }
[ 2830, 3393, 4418, 5370, 1155, 353, 8840, 836, 8, 341, 38959, 1669, 600, 18189, 7121, 12805, 11, 1273, 3506, 692, 15710, 16, 11, 1848, 1669, 877, 5179, 9910, 741, 17957, 59678, 1155, 11, 1848, 11, 8879, 17305, 445, 22390, 16500, 1465, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestGetKeyWhenFindObjectForPrivateKeyFails(t *testing.T) { s, ctx := newSessionWithMock() pubKey := &rsa.PublicKey{N: big.NewInt(1), E: 1} // test newSigner fails when FindObject for private key fails ctx.GetAttributeValueFunc = func(pkcs11.SessionHandle, pkcs11.ObjectHandle, []*pkcs11.Attribute) ([]*pkcs11.Attribute, error) { return []*pkcs11.Attribute{pkcs11.NewAttribute(pkcs11.CKA_KEY_TYPE, []byte{0, 0, 0, 0, 0, 0, 0, 0})}, nil } _, err := s.NewSigner("label", pubKey) test.AssertError(t, err, "newSigner didn't fail when GetRSAPublicKey fails") // test newSigner fails when GetECDSAPublicKey fails ctx.GetAttributeValueFunc = func(pkcs11.SessionHandle, pkcs11.ObjectHandle, []*pkcs11.Attribute) ([]*pkcs11.Attribute, error) { return []*pkcs11.Attribute{pkcs11.NewAttribute(pkcs11.CKA_KEY_TYPE, []byte{3, 0, 0, 0, 0, 0, 0, 0})}, nil } _, err = s.NewSigner("label", pubKey) test.AssertError(t, err, "newSigner didn't fail when GetECDSAPublicKey fails") }
explode_data.jsonl/1166
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 394 }
[ 2830, 3393, 1949, 1592, 4498, 80835, 2461, 75981, 37, 6209, 1155, 353, 8840, 836, 8, 341, 1903, 11, 5635, 1669, 501, 5283, 2354, 11571, 741, 62529, 1592, 1669, 609, 60869, 49139, 1592, 90, 45, 25, 2409, 7121, 1072, 7, 16, 701, 468, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSpanFinishTwice(t *testing.T) { assert := assert.New(t) wait := time.Millisecond * 2 tracer, _, _, stop := startTestTracer(t) defer stop() assert.Equal(tracer.payload.itemCount(), 0) // the finish must be idempotent span := tracer.newRootSpan("pylons.request", "pylons", "/") time.Sleep(wait) span.Finish() tracer.awaitPayload(t, 1) previousDuration := span.Duration time.Sleep(wait) span.Finish() assert.Equal(previousDuration, span.Duration) tracer.awaitPayload(t, 1) }
explode_data.jsonl/42838
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 196 }
[ 2830, 3393, 12485, 25664, 22816, 558, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 48750, 1669, 882, 71482, 353, 220, 17, 271, 25583, 9584, 11, 8358, 8358, 2936, 1669, 1191, 2271, 1282, 9584, 1155, 340, 16867, 2936, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestShouldNotRefreshUserGroupsFromBackendWhenDisabled(t *testing.T) { mock := mocks.NewMockAutheliaCtx(t) defer mock.Close() // Setup user john. user := &authentication.UserDetails{ Username: "john", Groups: []string{ "admin", "users", }, Emails: []string{ "john@example.com", }, } mock.UserProviderMock.EXPECT().GetDetails("john").Times(0) clock := mocks.TestingClock{} clock.Set(time.Now()) userSession := mock.Ctx.GetSession() userSession.Username = user.Username userSession.AuthenticationLevel = authentication.TwoFactor userSession.LastActivity = clock.Now().Unix() userSession.RefreshTTL = clock.Now().Add(-1 * time.Minute) userSession.Groups = user.Groups userSession.Emails = user.Emails userSession.KeepMeLoggedIn = true err := mock.Ctx.SaveSession(userSession) require.NoError(t, err) mock.Ctx.Request.Header.Set("X-Original-URL", "https://two-factor.example.com") config := verifyGetCfg config.RefreshInterval = schema.ProfileRefreshDisabled VerifyGet(config)(mock.Ctx) assert.Equal(t, 200, mock.Ctx.Response.StatusCode()) // Session time should NOT have been updated, it should still have a refresh TTL 1 minute in the past. userSession = mock.Ctx.GetSession() assert.Equal(t, clock.Now().Add(-1*time.Minute).Unix(), userSession.RefreshTTL.Unix()) }
explode_data.jsonl/20202
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 474 }
[ 2830, 3393, 14996, 2623, 14567, 1474, 22173, 3830, 29699, 4498, 25907, 1155, 353, 8840, 836, 8, 341, 77333, 1669, 68909, 7121, 11571, 5087, 35929, 23684, 1155, 340, 16867, 7860, 10421, 2822, 197, 322, 18626, 1196, 39642, 624, 19060, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWithin(t *testing.T) { t.Parallel() for i, test := range []struct { s []float64 v float64 idx int panics bool }{ { s: []float64{1, 2, 5, 9}, v: 1, idx: 0, }, { s: []float64{1, 2, 5, 9}, v: 9, idx: -1, }, { s: []float64{1, 2, 5, 9}, v: 1.5, idx: 0, }, { s: []float64{1, 2, 5, 9}, v: 2, idx: 1, }, { s: []float64{1, 2, 5, 9}, v: 2.5, idx: 1, }, { s: []float64{1, 2, 5, 9}, v: -3, idx: -1, }, { s: []float64{1, 2, 5, 9}, v: 15, idx: -1, }, { s: []float64{1, 2, 5, 9}, v: math.NaN(), idx: -1, }, { s: []float64{5, 2, 6}, panics: true, }, { panics: true, }, { s: []float64{1}, panics: true, }, } { var idx int panics := Panics(func() { idx = Within(test.s, test.v) }) if panics { if !test.panics { t.Errorf("Case %v: bad panic", i) } continue } if test.panics { if !panics { t.Errorf("Case %v: did not panic when it should", i) } continue } if idx != test.idx { t.Errorf("Case %v: Idx mismatch. Want: %v, got: %v", i, test.idx, idx) } } }
explode_data.jsonl/1244
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 748 }
[ 2830, 3393, 41961, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 2023, 600, 11, 1273, 1669, 2088, 3056, 1235, 341, 197, 1903, 414, 3056, 3649, 21, 19, 198, 197, 5195, 414, 2224, 21, 19, 198, 197, 62077, 262, 526, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetIndex(t *testing.T) { req, err := http.NewRequest("GET", "/", nil) if err != nil { log.Fatal(err) } w := httptest.NewRecorder() indexHandler(w, req) assertStringEquals(t, "200", fmt.Sprintf("%d", w.Code)) assertStringEquals(t, "text/html; charset=utf-8", w.Header().Get("Content-Type")) assertStringContains(t, w.Body.String(), "<title>The Favicon Finder</title>") }
explode_data.jsonl/25588
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 1949, 1552, 1155, 353, 8840, 836, 8, 341, 24395, 11, 1848, 1669, 1758, 75274, 445, 3806, 497, 64657, 2092, 340, 743, 1848, 961, 2092, 341, 197, 6725, 26133, 3964, 340, 197, 630, 6692, 1669, 54320, 70334, 7121, 47023, 741, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPathFilter(t *testing.T) { type resultDesc struct { path string match bool } type testDesc struct { filter string result []resultDesc } testData := []testDesc{ { filter: "Sensors.Vehicle.Door", result: []resultDesc{ {"Sensors.Vehicle.Door", true}, {"Sensors.Vehicle.DoorFront", false}, {"Sensors.Vehicle.DoorRear", false}, }, }, { filter: "Sensors.Vehicle.Door", result: []resultDesc{ {"Sensors.Vehicle.Door.Front", true}, {"Sensors.Vehicle.Door.Rear", true}, }, }, { filter: "Sensors.Vehicle.*", result: []resultDesc{ {"Sensors.Vehicles", false}, {"Sensors.Vehicle.Door.Front", true}, {"Sensors.Vehicle.Door.Rear", true}, {"Sensors.Vehicle.Engine", true}, {"Sensors.Vehicle.Window.Front.Position", true}, {"Sensors.Vehicle.Window.Rear.Position", true}, {"Sensors.Engine.Temp", false}, {"Sensors.Engine.RPM", false}, }, }, { filter: "Sensors.Vehicle.*.Front", result: []resultDesc{ {"Sensors.Vehicle.Door.Front", true}, {"Sensors.Vehicle.Door.Rear", false}, {"Sensors.Vehicle.Engine", false}, {"Sensors.Vehicle.Window.Front.Position", true}, {"Sensors.Vehicle.Window.Rear.Position", false}, }, }, } for _, testItem := range testData { regexp, err := dataprovider.CreatePathFilter(testItem.filter) if err != nil { t.Errorf("Can't create regexp from path: %s", err) continue } for _, result := range testItem.result { if regexp.Match(result.path) != result.match { if result.match { t.Errorf("Path %s doesn't match filter %s", result.path, testItem.filter) } else { t.Errorf("Path %s shouldn't match filter %s", result.path, testItem.filter) } } } } }
explode_data.jsonl/26926
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 778 }
[ 2830, 3393, 1820, 5632, 1155, 353, 8840, 836, 8, 341, 13158, 1102, 11065, 2036, 341, 197, 26781, 220, 914, 198, 197, 47706, 1807, 198, 197, 630, 13158, 1273, 11065, 2036, 341, 197, 50108, 914, 198, 197, 9559, 3056, 1382, 11065, 198, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestSetInstanceSysvarBySetGlobalSysVar(t *testing.T) { varName := "tidb_general_log" defaultValue := "OFF" // This is the default value for tidb_general_log store, clean := realtikvtest.CreateMockStoreAndSetup(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test") se := tk.Session().(variable.GlobalVarAccessor) // Get globalSysVar twice and get the same default value v, err := se.GetGlobalSysVar(varName) require.NoError(t, err) require.Equal(t, defaultValue, v) v, err = se.GetGlobalSysVar(varName) require.NoError(t, err) require.Equal(t, defaultValue, v) // session.GetGlobalSysVar would not get the value which session.SetGlobalSysVar writes, // because SetGlobalSysVar calls SetGlobalFromHook, which uses TiDBGeneralLog's SetGlobal, // but GetGlobalSysVar could not access TiDBGeneralLog's GetGlobal. // set to "1" err = se.SetGlobalSysVar(varName, "ON") require.NoError(t, err) v, err = se.GetGlobalSysVar(varName) tk.MustQuery("select @@global.tidb_general_log").Check(testkit.Rows("1")) require.NoError(t, err) require.Equal(t, defaultValue, v) // set back to "0" err = se.SetGlobalSysVar(varName, defaultValue) require.NoError(t, err) v, err = se.GetGlobalSysVar(varName) tk.MustQuery("select @@global.tidb_general_log").Check(testkit.Rows("0")) require.NoError(t, err) require.Equal(t, defaultValue, v) }
explode_data.jsonl/5806
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 501 }
[ 2830, 3393, 1649, 2523, 32792, 947, 1359, 1649, 11646, 32792, 3962, 1155, 353, 8840, 836, 8, 341, 2405, 675, 1669, 330, 24449, 65, 39177, 5224, 698, 11940, 1130, 1669, 330, 27068, 1, 442, 1096, 374, 279, 1638, 897, 369, 13112, 65, 391...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExportKeyingMaterial(t *testing.T) { // Check for leaking routines report := test.CheckRoutines(t) defer report() var rand [28]byte exportLabel := "EXTRACTOR-dtls_srtp" expectedServerKey := []byte{0x61, 0x09, 0x9d, 0x7d, 0xcb, 0x08, 0x52, 0x2c, 0xe7, 0x7b} expectedClientKey := []byte{0x87, 0xf0, 0x40, 0x02, 0xf6, 0x1c, 0xf1, 0xfe, 0x8c, 0x77} c := &Conn{ state: State{ localRandom: handshakeRandom{time.Unix(500, 0), rand}, remoteRandom: handshakeRandom{time.Unix(1000, 0), rand}, localSequenceNumber: []uint64{0, 0}, cipherSuite: &cipherSuiteTLSEcdheEcdsaWithAes128GcmSha256{}, }, } c.setLocalEpoch(0) c.setRemoteEpoch(0) state := c.ConnectionState() _, err := state.ExportKeyingMaterial(exportLabel, nil, 0) if err != errHandshakeInProgress { t.Errorf("ExportKeyingMaterial when epoch == 0: expected '%s' actual '%s'", errHandshakeInProgress, err) } c.setLocalEpoch(1) state = c.ConnectionState() _, err = state.ExportKeyingMaterial(exportLabel, []byte{0x00}, 0) if err != errContextUnsupported { t.Errorf("ExportKeyingMaterial with context: expected '%s' actual '%s'", errContextUnsupported, err) } for k := range invalidKeyingLabels { state = c.ConnectionState() _, err = state.ExportKeyingMaterial(k, nil, 0) if err != errReservedExportKeyingMaterial { t.Errorf("ExportKeyingMaterial reserved label: expected '%s' actual '%s'", errReservedExportKeyingMaterial, err) } } state = c.ConnectionState() keyingMaterial, err := state.ExportKeyingMaterial(exportLabel, nil, 10) if err != nil { t.Errorf("ExportKeyingMaterial as server: unexpected error '%s'", err) } else if !bytes.Equal(keyingMaterial, expectedServerKey) { t.Errorf("ExportKeyingMaterial client export: expected (% 02x) actual (% 02x)", expectedServerKey, keyingMaterial) } c.state.isClient = true state = c.ConnectionState() keyingMaterial, err = state.ExportKeyingMaterial(exportLabel, nil, 10) if err != nil { t.Errorf("ExportKeyingMaterial as server: unexpected error '%s'", err) } else if !bytes.Equal(keyingMaterial, expectedClientKey) { t.Errorf("ExportKeyingMaterial client export: expected (% 02x) actual (% 02x)", expectedClientKey, keyingMaterial) } }
explode_data.jsonl/40931
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 843 }
[ 2830, 3393, 16894, 1592, 287, 13415, 1155, 353, 8840, 836, 8, 341, 197, 322, 4248, 369, 51829, 29497, 198, 69931, 1669, 1273, 10600, 49, 28628, 1155, 340, 16867, 1895, 2822, 2405, 10382, 508, 17, 23, 90184, 198, 59440, 2476, 1669, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestGetPostsForChannel(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() Client := th.Client post1 := th.CreatePost() post2 := th.CreatePost() post3 := &model.Post{ChannelId: th.BasicChannel.Id, Message: "zz" + model.NewId() + "a", RootId: post1.Id} post3, _ = Client.CreatePost(post3) time.Sleep(300 * time.Millisecond) since := model.GetMillis() time.Sleep(300 * time.Millisecond) post4 := th.CreatePost() posts, resp := Client.GetPostsForChannel(th.BasicChannel.Id, 0, 60, "") CheckNoError(t, resp) require.Equal(t, post4.Id, posts.Order[0], "wrong order") require.Equal(t, post3.Id, posts.Order[1], "wrong order") require.Equal(t, post2.Id, posts.Order[2], "wrong order") require.Equal(t, post1.Id, posts.Order[3], "wrong order") posts, resp = Client.GetPostsForChannel(th.BasicChannel.Id, 0, 3, resp.Etag) CheckEtag(t, posts, resp) posts, resp = Client.GetPostsForChannel(th.BasicChannel.Id, 0, 3, "") CheckNoError(t, resp) require.Len(t, posts.Order, 3, "wrong number returned") _, ok := posts.Posts[post3.Id] require.True(t, ok, "missing comment") _, ok = posts.Posts[post1.Id] require.True(t, ok, "missing root post") posts, resp = Client.GetPostsForChannel(th.BasicChannel.Id, 1, 1, "") CheckNoError(t, resp) require.Equal(t, post3.Id, posts.Order[0], "wrong order") posts, resp = Client.GetPostsForChannel(th.BasicChannel.Id, 10000, 10000, "") CheckNoError(t, resp) require.Empty(t, posts.Order, "should be no posts") post5 := th.CreatePost() posts, resp = Client.GetPostsSince(th.BasicChannel.Id, since) CheckNoError(t, resp) require.Len(t, posts.Posts, 2, "should return 2 posts") // "since" query to return empty NextPostId and PrevPostId require.Equal(t, "", posts.NextPostId, "should return an empty NextPostId") require.Equal(t, "", posts.PrevPostId, "should return an empty PrevPostId") found := make([]bool, 2) for _, p := range posts.Posts { require.LessOrEqual(t, since, p.CreateAt, "bad create at for post returned") if p.Id == post4.Id { found[0] = true } else if p.Id == post5.Id { found[1] = true } } for _, f := range found { require.True(t, f, "missing post") } _, resp = Client.GetPostsForChannel("", 0, 60, "") CheckBadRequestStatus(t, resp) _, resp = Client.GetPostsForChannel("junk", 0, 60, "") CheckBadRequestStatus(t, resp) _, resp = Client.GetPostsForChannel(model.NewId(), 0, 60, "") CheckForbiddenStatus(t, resp) Client.Logout() _, resp = Client.GetPostsForChannel(model.NewId(), 0, 60, "") CheckUnauthorizedStatus(t, resp) _, resp = th.SystemAdminClient.GetPostsForChannel(th.BasicChannel.Id, 0, 60, "") CheckNoError(t, resp) // more tests for next_post_id, prev_post_id, and order // There are 12 posts composed of first 2 system messages and 10 created posts Client.Login(th.BasicUser.Email, th.BasicUser.Password) th.CreatePost() // post6 post7 := th.CreatePost() post8 := th.CreatePost() th.CreatePost() // post9 post10 := th.CreatePost() // get the system post IDs posted before the created posts above posts, resp = Client.GetPostsBefore(th.BasicChannel.Id, post1.Id, 0, 2, "") systemPostId1 := posts.Order[1] // similar to '/posts' posts, resp = Client.GetPostsForChannel(th.BasicChannel.Id, 0, 60, "") CheckNoError(t, resp) require.Len(t, posts.Order, 12, "expected 12 posts") require.Equal(t, post10.Id, posts.Order[0], "posts not in order") require.Equal(t, systemPostId1, posts.Order[11], "posts not in order") require.Equal(t, "", posts.NextPostId, "should return an empty NextPostId") require.Equal(t, "", posts.PrevPostId, "should return an empty PrevPostId") // similar to '/posts?per_page=3' posts, resp = Client.GetPostsForChannel(th.BasicChannel.Id, 0, 3, "") CheckNoError(t, resp) require.Len(t, posts.Order, 3, "expected 3 posts") require.Equal(t, post10.Id, posts.Order[0], "posts not in order") require.Equal(t, post8.Id, posts.Order[2], "should return 3 posts and match order") require.Equal(t, "", posts.NextPostId, "should return an empty NextPostId") require.Equal(t, post7.Id, posts.PrevPostId, "should return post7.Id as PrevPostId") // similar to '/posts?per_page=3&page=1' posts, resp = Client.GetPostsForChannel(th.BasicChannel.Id, 1, 3, "") CheckNoError(t, resp) require.Len(t, posts.Order, 3, "expected 3 posts") require.Equal(t, post7.Id, posts.Order[0], "posts not in order") require.Equal(t, post5.Id, posts.Order[2], "posts not in order") require.Equal(t, post8.Id, posts.NextPostId, "should return post8.Id as NextPostId") require.Equal(t, post4.Id, posts.PrevPostId, "should return post4.Id as PrevPostId") // similar to '/posts?per_page=3&page=2' posts, resp = Client.GetPostsForChannel(th.BasicChannel.Id, 2, 3, "") CheckNoError(t, resp) require.Len(t, posts.Order, 3, "expected 3 posts") require.Equal(t, post4.Id, posts.Order[0], "posts not in order") require.Equal(t, post2.Id, posts.Order[2], "should return 3 posts and match order") require.Equal(t, post5.Id, posts.NextPostId, "should return post5.Id as NextPostId") require.Equal(t, post1.Id, posts.PrevPostId, "should return post1.Id as PrevPostId") // similar to '/posts?per_page=3&page=3' posts, resp = Client.GetPostsForChannel(th.BasicChannel.Id, 3, 3, "") CheckNoError(t, resp) require.Len(t, posts.Order, 3, "expected 3 posts") require.Equal(t, post1.Id, posts.Order[0], "posts not in order") require.Equal(t, systemPostId1, posts.Order[2], "should return 3 posts and match order") require.Equal(t, post2.Id, posts.NextPostId, "should return post2.Id as NextPostId") require.Equal(t, "", posts.PrevPostId, "should return an empty PrevPostId") // similar to '/posts?per_page=3&page=4' posts, resp = Client.GetPostsForChannel(th.BasicChannel.Id, 4, 3, "") CheckNoError(t, resp) require.Empty(t, posts.Order, "should return 0 post") require.Equal(t, "", posts.NextPostId, "should return an empty NextPostId") require.Equal(t, "", posts.PrevPostId, "should return an empty PrevPostId") }
explode_data.jsonl/5249
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2221 }
[ 2830, 3393, 1949, 19631, 2461, 9629, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 741, 71724, 1669, 270, 11716, 271, 51172, 16, 1669, 270, 7251, 4133, 741, 51172, 17, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestOverloadOp(t *testing.T) { gopClTest(t, ` type foo struct { } func (a *foo) + (b *foo) *foo { println("a + b") return &foo{} } func (a foo) - (b foo) foo { println("a - b") return foo{} } func -(a foo) { println("-a") } func ++(a foo) { println("a++") } func (a foo) != (b foo) bool{ println("a!=b") return true } var a, b foo var c = a - b var d = -a // TODO: -a have no return value! var e = a!=b `, `package main import fmt "fmt" type foo struct { } func (a *foo) Gop_Add(b *foo) *foo { fmt.Println("a + b") return &foo{} } func (a foo) Gop_Sub(b foo) foo { fmt.Println("a - b") return foo{} } func (a foo) Gop_NE(b foo) bool { fmt.Println("a!=b") return true } func (a foo) Gop_Neg() { fmt.Println("-a") } func (a foo) Gop_Inc() { fmt.Println("a++") } var a, b foo var c = a.Gop_Sub(b) var d = a.Gop_Neg() var e = a.Gop_NE(b) `) }
explode_data.jsonl/73665
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 437 }
[ 2830, 3393, 1918, 1078, 7125, 1155, 353, 8840, 836, 8, 341, 3174, 453, 5066, 2271, 1155, 11, 22074, 1313, 15229, 2036, 341, 630, 2830, 320, 64, 353, 7975, 8, 488, 320, 65, 353, 7975, 8, 353, 7975, 341, 81168, 445, 64, 488, 293, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateLedgerUnsupportedAlgo(t *testing.T) { kb := NewInMemory() _, err := kb.CreateLedger("some_account", Ed25519, "libonomy", 0, 1) assert.Error(t, err) assert.Equal(t, "unsupported signing algo: only secp256k1 is supported", err.Error()) }
explode_data.jsonl/31112
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 94 }
[ 2830, 3393, 4021, 60850, 1389, 41884, 2101, 3346, 1155, 353, 8840, 836, 8, 341, 16463, 65, 1669, 1532, 641, 10642, 741, 197, 6878, 1848, 1669, 38653, 7251, 60850, 1389, 445, 14689, 13500, 497, 3199, 17, 20, 20, 16, 24, 11, 330, 2740, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeleteLabelValuesWithCollisions(t *testing.T) { vec := NewGaugeVec( GaugeOpts{ Name: "test", Help: "helpless", }, []string{"l1", "l2"}, ) vec.hashAdd = func(h uint64, s string) uint64 { return 1 } vec.hashAddByte = func(h uint64, b byte) uint64 { return 1 } testDeleteLabelValues(t, vec) }
explode_data.jsonl/14621
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 133 }
[ 2830, 3393, 6435, 2476, 6227, 2354, 15265, 6805, 1155, 353, 8840, 836, 8, 341, 40213, 1669, 1532, 38, 19392, 10050, 1006, 197, 9600, 19392, 43451, 515, 298, 21297, 25, 330, 1944, 756, 298, 197, 12689, 25, 330, 8653, 1717, 756, 197, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCannotMatchArgon2idParamPattern(t *testing.T) { ok, err := CheckPassword("password", "$argon2id$v=19$m65536,t3,p2$BpLnfgDsc2WD8F2q$o/vzA4myCqZZ36bUGsDY//8mKUYNZZaR0t4MFFSs+iM") assert.EqualError(t, err, "Hash key is not the last parameter, the hash is likely malformed ($argon2id$v=19$m65536,t3,p2$BpLnfgDsc2WD8F2q$o/vzA4myCqZZ36bUGsDY//8mKUYNZZaR0t4MFFSs+iM)") assert.False(t, ok) }
explode_data.jsonl/40191
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 208 }
[ 2830, 3393, 17444, 8331, 2735, 263, 17, 307, 2001, 15760, 1155, 353, 8840, 836, 8, 341, 59268, 11, 1848, 1669, 4248, 4876, 445, 3833, 497, 5201, 70821, 17, 307, 65020, 28, 16, 24, 53516, 21, 20, 20, 18, 21, 13960, 18, 7237, 17, 3,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEncode(t *testing.T) { as := assert.New(t) data := []struct { in string expect string }{ {in: "あいうえお", expect: "杮杮杮柿柿柿杮杮杮柿柿柿柿柿柿杮杮柿柿柿柿柿杮柿杮杮杮柿柿柿杮杮杮柿柿柿柿柿柿杮杮柿柿柿柿杮柿柿杮杮杮柿柿柿杮杮杮柿柿柿柿柿柿杮杮柿柿柿柿杮杮柿杮杮杮柿柿柿杮杮杮柿柿柿柿柿柿杮杮柿柿柿杮柿柿柿杮杮杮柿柿柿杮杮杮柿柿柿柿柿柿杮杮柿柿柿杮柿杮柿"}, {in: "abc", expect: "柿杮杮柿柿柿柿杮柿杮杮柿柿柿杮柿柿杮杮柿柿柿杮杮"}, } for _, v := range data { res, err := Encode(v.in) as.NoError(err) as.Equal(v.expect, res) } }
explode_data.jsonl/34457
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 511 }
[ 2830, 3393, 32535, 1155, 353, 8840, 836, 8, 341, 60451, 1669, 2060, 7121, 1155, 692, 8924, 1669, 3056, 1235, 341, 197, 17430, 257, 914, 198, 197, 24952, 914, 198, 197, 59403, 197, 197, 90, 258, 25, 330, 29491, 94504, 57842, 32234, 497...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInstructionsString(t *testing.T) { instructions := []Instructions{ Make(OpConstant, 1), Make(OpConstant, 2), Make(OpConstant, 65535), } expected := `0000 OpConstant 1 0003 OpConstant 2 0006 OpConstant 65535 ` concatenated := Instructions{} for _, ins := range instructions { concatenated = append(concatenated, ins...) } if concatenated.String() != expected { t.Errorf("instructions wrongly formatted.\nwant=%q\ngot=%q", expected, concatenated.String()) } }
explode_data.jsonl/66359
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 179 }
[ 2830, 3393, 55291, 703, 1155, 353, 8840, 836, 8, 341, 17430, 19657, 1669, 3056, 55291, 515, 197, 197, 8078, 54494, 15472, 11, 220, 16, 1326, 197, 197, 8078, 54494, 15472, 11, 220, 17, 1326, 197, 197, 8078, 54494, 15472, 11, 220, 21, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestContainerTypePodSandbox(t *testing.T) { var ociSpec specs.Spec ociSpec.Annotations = map[string]string{ annotations.ContainerType: annotations.ContainerTypeSandbox, } testContainerTypeSuccessful(t, ociSpec, vc.PodSandbox) }
explode_data.jsonl/44040
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 80 }
[ 2830, 3393, 4502, 929, 23527, 50, 31536, 1155, 353, 8840, 836, 8, 341, 2405, 93975, 8327, 32247, 36473, 271, 197, 2119, 8327, 91172, 284, 2415, 14032, 30953, 515, 197, 197, 39626, 33672, 929, 25, 32207, 33672, 929, 50, 31536, 345, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestObjectDeleteMarkers(t *testing.T) { s := RunBasicJetStreamServer() defer shutdown(s) nc, js := jsClient(t, s) defer nc.Close() obs, err := js.CreateObjectStore(&nats.ObjectStoreConfig{Bucket: "OBJS"}) expectOk(t, err) msg := bytes.Repeat([]byte("A"), 100) _, err = obs.PutBytes("A", msg) expectOk(t, err) err = obs.Delete("A") expectOk(t, err) si, err := js.StreamInfo("OBJ_OBJS") expectOk(t, err) // We should have one message left. The delete marker. if si.State.Msgs != 1 { t.Fatalf("Expected 1 marker msg, got %d msgs", si.State.Msgs) } // Make sure we have a delete marker, this will be there to drive Watch functionality. info, err := obs.GetInfo("A") expectOk(t, err) if !info.Deleted { t.Fatalf("Expected info to be marked as deleted") } }
explode_data.jsonl/75492
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 308 }
[ 2830, 3393, 1190, 6435, 82405, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 6452, 15944, 35641, 3027, 5475, 741, 16867, 23766, 1141, 692, 197, 1016, 11, 6994, 1669, 6994, 2959, 1155, 11, 274, 340, 16867, 25126, 10421, 2822, 197, 5481, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestClusterReconciler_machineToCluster(t *testing.T) { cluster := &clusterv1.Cluster{ TypeMeta: metav1.TypeMeta{ Kind: "Cluster", }, ObjectMeta: metav1.ObjectMeta{ Name: "test-cluster", Namespace: "test", }, Spec: clusterv1.ClusterSpec{}, Status: clusterv1.ClusterStatus{}, } controlPlaneWithNoderef := &clusterv1.Machine{ TypeMeta: metav1.TypeMeta{ Kind: "Machine", }, ObjectMeta: metav1.ObjectMeta{ Name: "controlPlaneWithNoderef", Labels: map[string]string{ clusterv1.ClusterLabelName: cluster.Name, clusterv1.MachineControlPlaneLabelName: "", }, }, Status: clusterv1.MachineStatus{ NodeRef: &v1.ObjectReference{ Kind: "Node", Namespace: "test-node", }, }, } controlPlaneWithoutNoderef := &clusterv1.Machine{ TypeMeta: metav1.TypeMeta{ Kind: "Machine", }, ObjectMeta: metav1.ObjectMeta{ Name: "controlPlaneWithoutNoderef", Labels: map[string]string{ clusterv1.ClusterLabelName: cluster.Name, clusterv1.MachineControlPlaneLabelName: "", }, }, } nonControlPlaneWithNoderef := &clusterv1.Machine{ TypeMeta: metav1.TypeMeta{ Kind: "Machine", }, ObjectMeta: metav1.ObjectMeta{ Name: "nonControlPlaneWitNoderef", Labels: map[string]string{ clusterv1.ClusterLabelName: cluster.Name, }, }, Status: clusterv1.MachineStatus{ NodeRef: &v1.ObjectReference{ Kind: "Node", Namespace: "test-node", }, }, } nonControlPlaneWithoutNoderef := &clusterv1.Machine{ TypeMeta: metav1.TypeMeta{ Kind: "Machine", }, ObjectMeta: metav1.ObjectMeta{ Name: "nonControlPlaneWithoutNoderef", Labels: map[string]string{ clusterv1.ClusterLabelName: cluster.Name, }, }, } tests := []struct { name string o handler.MapObject want []ctrl.Request }{ { name: "controlplane machine, noderef is set, should return cluster", o: handler.MapObject{ Meta: controlPlaneWithNoderef.GetObjectMeta(), Object: controlPlaneWithNoderef, }, want: []ctrl.Request{ {NamespacedName: client.ObjectKey{ Name: cluster.Name, Namespace: cluster.Namespace, }}, }, }, { name: "controlplane machine, noderef is not set", o: handler.MapObject{ Meta: controlPlaneWithoutNoderef.GetObjectMeta(), Object: controlPlaneWithoutNoderef, }, want: nil, }, { name: "not controlplane machine, noderef is set", o: handler.MapObject{ Meta: nonControlPlaneWithNoderef.GetObjectMeta(), Object: nonControlPlaneWithNoderef, }, want: nil, }, { name: "not controlplane machine, noderef is not set", o: handler.MapObject{ Meta: nonControlPlaneWithoutNoderef.GetObjectMeta(), Object: nonControlPlaneWithoutNoderef, }, want: nil, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { r := &ClusterReconciler{ Client: fake.NewFakeClient(cluster, controlPlaneWithNoderef, controlPlaneWithoutNoderef, nonControlPlaneWithNoderef, nonControlPlaneWithoutNoderef), Log: log.Log, } if got := r.controlPlaneMachineToCluster(tt.o); !reflect.DeepEqual(got, tt.want) { t.Errorf("controlPlaneMachineToCluster() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/71243
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1463 }
[ 2830, 3393, 28678, 693, 40446, 5769, 38695, 1249, 28678, 1155, 353, 8840, 836, 8, 341, 197, 18855, 1669, 609, 564, 590, 648, 16, 72883, 515, 197, 27725, 12175, 25, 77520, 16, 10184, 12175, 515, 298, 197, 10629, 25, 330, 28678, 756, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestHeadTracker_Save_InsertsAndTrimsTable(t *testing.T) { t.Parallel() db := pgtest.NewGormDB(t) config := newCfg(t) ethClient := cltest.NewEthClientMockWithDefaultChain(t) orm := headtracker.NewORM(db, cltest.FixtureChainID) for idx := 0; idx < 200; idx++ { assert.Nil(t, orm.IdempotentInsertHead(context.TODO(), *cltest.Head(idx))) } ht := createHeadTracker(ethClient, config, orm) h := cltest.Head(200) require.NoError(t, ht.headTracker.Save(context.TODO(), *h)) assert.Equal(t, big.NewInt(200), ht.headTracker.HighestSeenHead().ToInt()) firstHead := firstHead(t, db) assert.Equal(t, big.NewInt(101), firstHead.ToInt()) lastHead, err := orm.LastHead(context.TODO()) require.NoError(t, err) assert.Equal(t, int64(200), lastHead.Number) }
explode_data.jsonl/63746
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 317 }
[ 2830, 3393, 12346, 31133, 78746, 76417, 82, 3036, 1282, 5742, 2556, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 20939, 1669, 17495, 1944, 7121, 38, 493, 3506, 1155, 340, 25873, 1669, 501, 42467, 1155, 692, 197, 769, 2959, 166...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEigen(t *testing.T) { vs := []struct { ax, ay, az float64 }{ {90, 0, 0}, {0, 90, 0}, {0, 0, 90}, {10, 10, 10}, {10, 9, 8}, {1, 2, 4}, {1, -2, 4}, } for i, v := range vs { r := RX(Degrees(v.ax)).XM(RY(Degrees(v.ay))).XM(RZ(Degrees(v.az))) s, v, a, err := r.Eigen() if err != nil { t.Errorf("[%d] failed to generate eigenvector: %v", i, err) continue } if !Zeroish(s - 1) { t.Errorf("[%d] determinant is not 1: %g", i, s) continue } if confirm := r.XV(v); !v.Equals(confirm) { t.Errorf("[%d] engenvector %v of %v is not", i, v, r) continue } if rC, err := v.RV(a); err != nil { t.Errorf("[%d] failed to do rotation ang=%v around %v: %v", i, a, v, err) } else if !rC.Equals(r) { t.Errorf("[%d] rotation %v does not match intended for ang=%v around %v", i, rC, a, v) } } }
explode_data.jsonl/47412
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 446 }
[ 2830, 3393, 71740, 1155, 353, 8840, 836, 8, 341, 5195, 82, 1669, 3056, 1235, 341, 197, 70368, 11, 18898, 11, 12376, 2224, 21, 19, 198, 197, 59403, 197, 197, 90, 24, 15, 11, 220, 15, 11, 220, 15, 1583, 197, 197, 90, 15, 11, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestNotifications(t *testing.T) { Convey("Given the notifications service", t, func() { //bus.ClearBusHandlers() setting.StaticRootPath = "../../../public/" setting.Smtp.Enabled = true setting.Smtp.TemplatesPattern = "emails/*.html" setting.Smtp.FromAddress = "from@address.com" err := Init() So(err, ShouldBeNil) var sentMsg *Message addToMailQueue = func(msg *Message) { sentMsg = msg } Convey("When sending reset email password", func() { err := sendResetPasswordEmail(&m.SendResetPasswordEmailCommand{User: &m.User{Email: "asd@asd.com"}}) So(err, ShouldBeNil) So(sentMsg.Body, ShouldContainSubstring, "body") So(sentMsg.Subject, ShouldEqual, "Reset your Grafana password - asd@asd.com") So(sentMsg.Body, ShouldNotContainSubstring, "Subject") }) }) }
explode_data.jsonl/67645
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 306 }
[ 2830, 3393, 34736, 1155, 353, 8840, 836, 8, 1476, 93070, 5617, 445, 22043, 279, 21969, 2473, 497, 259, 11, 2915, 368, 341, 197, 197, 322, 10338, 13524, 15073, 39949, 2822, 197, 8196, 1280, 58826, 8439, 1820, 284, 30630, 888, 29555, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRouterStaticPcre(t *testing.T) { s, c := initial(t) defer s.Stop() router := rstatic.NewRouter( router.WithHandler(rpc.Handler), router.WithRegistry(s.Options().Registry), ) err := router.Register(&api.Endpoint{ Name: "foo.Test.Call", Method: []string{"POST"}, Path: []string{"^/api/v0/test/call/?$"}, Handler: "rpc", }) if err != nil { t.Fatal(err) } hrpc := rpc.NewHandler( handler.WithClient(c), handler.WithRouter(router), ) hsrv := &http.Server{ Handler: hrpc, Addr: "127.0.0.1:6543", WriteTimeout: 15 * time.Second, ReadTimeout: 15 * time.Second, IdleTimeout: 20 * time.Second, MaxHeaderBytes: 1024 * 1024 * 1, // 1Mb } go func() { log.Println(hsrv.ListenAndServe()) }() defer hsrv.Close() time.Sleep(1 * time.Second) check(t, hsrv.Addr, "http://%s/api/v0/test/call", `{"msg":"Hello "}`) }
explode_data.jsonl/58077
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 417 }
[ 2830, 3393, 9523, 11690, 47, 837, 1155, 353, 8840, 836, 8, 341, 1903, 11, 272, 1669, 2856, 1155, 340, 16867, 274, 30213, 2822, 67009, 1669, 435, 1978, 7121, 9523, 1006, 197, 67009, 26124, 3050, 2601, 3992, 31010, 1326, 197, 67009, 26124...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMatInRange(t *testing.T) { mat1 := NewMatWithSize(101, 102, MatTypeCV8U) lb := NewMatFromScalar(NewScalar(20.0, 100.0, 100.0, 0.0), MatTypeCV8U) ub := NewMatFromScalar(NewScalar(20.0, 100.0, 100.0, 0.0), MatTypeCV8U) dst := NewMat() InRange(mat1, lb, ub, &dst) if dst.Empty() { t.Error("TestMatAddWeighted dest mat3 should not be empty.") } }
explode_data.jsonl/81708
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 166 }
[ 2830, 3393, 11575, 76059, 1155, 353, 8840, 836, 8, 341, 59874, 16, 1669, 1532, 11575, 2354, 1695, 7, 16, 15, 16, 11, 220, 16, 15, 17, 11, 6867, 929, 19589, 23, 52, 340, 8810, 65, 1669, 1532, 11575, 3830, 20639, 35063, 20639, 7, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFindCoordinatorResponse(t *testing.T) { t.Parallel() errMsg := "kaboom" for _, tc := range []struct { desc string response *FindCoordinatorResponse encoded []byte }{{ desc: "version 0 - no error", response: &FindCoordinatorResponse{ Version: 0, Err: ErrNoError, Coordinator: &Broker{ id: 7, addr: "host:9092", }, }, encoded: []byte{ 0, 0, // Err 0, 0, 0, 7, // Coordinator.ID 0, 4, 'h', 'o', 's', 't', // Coordinator.Host 0, 0, 35, 132, // Coordinator.Port }, }, { desc: "version 1 - no error", response: &FindCoordinatorResponse{ Version: 1, ThrottleTime: 100 * time.Millisecond, Err: ErrNoError, Coordinator: &Broker{ id: 7, addr: "host:9092", }, }, encoded: []byte{ 0, 0, 0, 100, // ThrottleTime 0, 0, // Err 255, 255, // ErrMsg: empty 0, 0, 0, 7, // Coordinator.ID 0, 4, 'h', 'o', 's', 't', // Coordinator.Host 0, 0, 35, 132, // Coordinator.Port }, }, { desc: "version 0 - error", response: &FindCoordinatorResponse{ Version: 0, Err: ErrConsumerCoordinatorNotAvailable, Coordinator: NoNode, }, encoded: []byte{ 0, 15, // Err 255, 255, 255, 255, // Coordinator.ID: -1 0, 0, // Coordinator.Host: "" 255, 255, 255, 255, // Coordinator.Port: -1 }, }, { desc: "version 1 - error", response: &FindCoordinatorResponse{ Version: 1, ThrottleTime: 100 * time.Millisecond, Err: ErrConsumerCoordinatorNotAvailable, ErrMsg: &errMsg, Coordinator: NoNode, }, encoded: []byte{ 0, 0, 0, 100, // ThrottleTime 0, 15, // Err 0, 6, 'k', 'a', 'b', 'o', 'o', 'm', // ErrMsg 255, 255, 255, 255, // Coordinator.ID: -1 0, 0, // Coordinator.Host: "" 255, 255, 255, 255, // Coordinator.Port: -1 }, }} { testResponse(t, tc.desc, tc.response, tc.encoded) } }
explode_data.jsonl/20486
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 893 }
[ 2830, 3393, 9885, 64304, 2582, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 9859, 6611, 1669, 330, 74, 370, 4191, 1837, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 41653, 257, 914, 198, 197, 21735, 353, 9885, 64304, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNesting(t *testing.T) { set, err := ParseDir("testdata", "*.html", nil) if err != nil { t.Fatal(err) } for _, path := range []string{ "base/dashboard.html", "base/billing.html", "base/docs/release.html", "/base/dashboard.html", "/base/billing.html", "/base/docs/release.html", } { t.Run(filepath.Base(path), func(t *testing.T) { path := path tmpl := set.Lookup(path) if tmpl == nil { t.Fatalf("no template found for %s", path) } var w bytes.Buffer if err := tmpl.Execute(&w, nil); err != nil { t.Error(err) } }) } }
explode_data.jsonl/13426
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 272 }
[ 2830, 3393, 45, 59855, 1155, 353, 8840, 836, 8, 341, 8196, 11, 1848, 1669, 14775, 6184, 445, 92425, 497, 59128, 1551, 497, 2092, 340, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 2023, 8358, 1815, 1669, 2088, 3056...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDay11AroundStability(t *testing.T) { d := &Day11{ mode: "around", } grid := `L.LL.LL.LL LLLLLLL.LL L.L.L..L.. LLLL.LL.LL L.LL.LL.LL L.LLLLL.LL ..L.L..... LLLLLLLLLL L.LLLLLL.L L.LLLLL.LL` assert.Equal(t, `#.#L.L#.## #LLL#LL.L# L.#.L..#.. #L##.##.L# #.#L.LL.LL #.#L#L#.## ..L.L..... #L#L##L#L# #.LLLLLL.L #.#L#L#.##`, d.stabilize(grid)) assert.Equal(t, 37, strings.Count(d.stabilize(grid), "#")) }
explode_data.jsonl/48592
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 245 }
[ 2830, 3393, 10159, 16, 16, 43580, 623, 2897, 1155, 353, 8840, 836, 8, 341, 2698, 1669, 609, 10159, 16, 16, 515, 197, 60247, 25, 330, 19454, 756, 197, 532, 49018, 1669, 1565, 43, 1214, 43, 1214, 43, 1214, 43, 198, 49356, 86708, 1214,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRuleVarcharVSChar(t *testing.T) { common.Log.Debug("Entering function: %s", common.GetFunctionName()) sqls := []string{ `create table t1(id int,name char(20),last_time date);`, `create table t1(id int,name binary(20),last_time date);`, `alter table t1 add column id int, add column name binary(20), add column last_time date;`, } for _, sql := range sqls { q, err := NewQuery4Audit(sql) if err == nil { rule := q.RuleVarcharVSChar() if rule.Item != "COL.008" { t.Error("Rule not match:", rule.Item, "Expect : COL.008") } } else { t.Error("sqlparser.Parse Error:", err) } } common.Log.Debug("Exiting function: %s", common.GetFunctionName()) }
explode_data.jsonl/76822
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 271 }
[ 2830, 3393, 11337, 53, 1113, 277, 26050, 4768, 1155, 353, 8840, 836, 8, 341, 83825, 5247, 20345, 445, 82867, 729, 25, 1018, 82, 497, 4185, 2234, 5152, 675, 2398, 30633, 82, 1669, 3056, 917, 515, 197, 197, 63, 3182, 1965, 259, 16, 37...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestFilterAndSort(t *testing.T) { // Use a specific type(pool) to test unit test type test struct { input []*model.StoragePoolSpec param map[string][]string expected []*model.StoragePoolSpec } tests := []test{ // select by storage type { input: []*model.StoragePoolSpec{ &SamplePools[0], &SamplePools[1], &SamplePools[2], }, param: map[string][]string{ "storageType": {"block"}, }, expected: []*model.StoragePoolSpec{ &SamplePools[0], &SamplePools[1], }, }, // sort by name asc { input: []*model.StoragePoolSpec{ &SamplePools[0], &SamplePools[1], &SamplePools[2], }, param: map[string][]string{ "sortKey": {"name"}, "sortDir": {"asc"}, }, expected: []*model.StoragePoolSpec{ &SamplePools[2], &SamplePools[0], &SamplePools[1], }, }, // sort by name desc { input: []*model.StoragePoolSpec{ &SamplePools[0], &SamplePools[1], &SamplePools[2], }, param: map[string][]string{ "sortKey": {"name"}, "sortDir": {"desc"}, }, expected: []*model.StoragePoolSpec{ &SamplePools[1], &SamplePools[0], &SamplePools[2], }, }, // limit is 2 { input: []*model.StoragePoolSpec{ &SamplePools[0], &SamplePools[1], &SamplePools[2], }, param: map[string][]string{ "limit": {"2"}, "offset": {"1"}, }, expected: []*model.StoragePoolSpec{ &SamplePools[1], &SamplePools[2], }, }, } for _, testcase := range tests { ret := fc.FilterAndSort(testcase.input, testcase.param, sortableKeysMap[typePools]) var res = []*model.StoragePoolSpec{} for _, data := range ret.([]interface{}) { res = append(res, data.(*model.StoragePoolSpec)) } if !reflect.DeepEqual(res, testcase.expected) { var expected []model.StoragePoolSpec for _, value := range testcase.expected { expected = append(expected, *value) } var got []model.StoragePoolSpec for _, value := range res { got = append(got, *value) } t.Errorf("Expected %+v\n", expected) t.Errorf("Got %+v\n", got) } } }
explode_data.jsonl/50712
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 968 }
[ 2830, 3393, 5632, 3036, 10231, 1155, 353, 8840, 836, 8, 341, 197, 322, 5443, 264, 3151, 943, 41838, 8, 311, 1273, 4982, 1273, 198, 13158, 1273, 2036, 341, 197, 22427, 262, 29838, 2528, 43771, 10551, 8327, 198, 197, 36037, 262, 2415, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestNewImageDifferentTagUpdate2(t *testing.T) { // this buildconfig references a different tag than the one that will be updated // it has previously run a build for the testTagID123 tag. buildcfg := mockBuildConfig("registry.com/namespace/imagename", "registry.com/namespace/imagename", "testImageStream", "testTag") buildcfg.Triggers[0].ImageChange.LastTriggeredImageID = "registry.com/namespace/imagename:testTagID123" imageStream := mockImageStream("testImageStream", "registry.com/namespace/imagename", map[string]string{"otherTag": "newImageID123", "testTag": "testTagID123"}) image := mockImage("testImage@id", "registry.com/namespace/imagename@id") controller := mockImageChangeController(buildcfg, imageStream, image) bcInstantiator := controller.BuildConfigInstantiator.(*buildConfigInstantiator) bcUpdater := bcInstantiator.buildConfigUpdater err := controller.HandleImageRepo(imageStream) if err != nil { t.Errorf("Unexpected error %v from HandleImageRepo", err) } if len(bcInstantiator.name) != 0 { t.Error("New build generated when a different repository was updated!") } if bcUpdater.buildcfg != nil { t.Error("BuildConfig was updated when a different repository was updated!") } }
explode_data.jsonl/69172
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 385 }
[ 2830, 3393, 3564, 1906, 69123, 5668, 4289, 17, 1155, 353, 8840, 836, 8, 341, 197, 322, 419, 1936, 1676, 15057, 264, 2155, 4772, 1091, 279, 825, 429, 686, 387, 6049, 198, 197, 322, 432, 702, 8597, 1598, 264, 1936, 369, 279, 1273, 566...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestScribeEntryUtils(t *testing.T) { logEntries := []*protos.LogEntry{ { Category: "test", NormalMap: map[string]string{"status": "ACTIVE"}, IntMap: map[string]int64{"port": 443}, Time: 12345, }, } scribeEntries, err := exporters.ConvertToScribeLogEntries(logEntries) assert.NoError(t, err) assert.Equal(t, 1, len(scribeEntries)) assert.Equal(t, logEntries[0].Category, scribeEntries[0].Category) expectedMsg := "{\"int\":{\"port\":443,\"time\":12345},\"normal\":{\"status\":\"ACTIVE\"}}" assert.Equal(t, expectedMsg, scribeEntries[0].Message) }
explode_data.jsonl/22839
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 240 }
[ 2830, 3393, 50, 3114, 5874, 4209, 1155, 353, 8840, 836, 8, 1476, 6725, 24533, 1669, 29838, 4391, 436, 5247, 5874, 515, 197, 197, 515, 298, 6258, 2031, 25, 220, 330, 1944, 756, 298, 197, 12206, 2227, 25, 2415, 14032, 30953, 4913, 2829,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStellar(t *testing.T) { accounts := make([]xdr.AccountId, 6) for i := 0; i < len(accounts); i++ { _, accounts[i] = testutil.GenerateAccountID(t) } _, src := testutil.GenerateAccountID(t) envelope := testutil.GenerateTransactionEnvelope( src, 1, []xdr.Operation{ testutil.GenerateCreateOperation(&accounts[0], accounts[1]), testutil.GeneratePaymentOperation(&accounts[2], accounts[3]), testutil.GenerateMergeOperation(&accounts[4], accounts[5]), }, ) envelopeBytes, err := envelope.MarshalBinary() require.NoError(t, err) networkPassphrase := "network phassphrase" expected, err := network.HashTransaction(&envelope.Tx, networkPassphrase) require.NoError(t, err) e := Entry{ Version: KinVersion_KIN3, Kind: &Entry_Stellar{ Stellar: &StellarEntry{ Ledger: 10, PagingToken: 1, NetworkPassphrase: networkPassphrase, EnvelopeXdr: envelopeBytes, }, }, } envelopeAccounts, err := GetAccountsFromEnvelope(envelope) assert.NoError(t, err) assert.Len(t, envelopeAccounts, 1+len(accounts)) for _, account := range append([]xdr.AccountId{src}, accounts...) { _, exists := envelopeAccounts[account.Address()] assert.True(t, exists) } // ID actual, err := e.GetTxID() assert.NoError(t, err) assert.EqualValues(t, expected[:], actual) // Accounts entryAccounts, err := e.GetAccounts() assert.NoError(t, err) assert.Equal(t, len(entryAccounts), len(envelopeAccounts)) for _, account := range entryAccounts { _, exists := envelopeAccounts[account] assert.True(t, exists) } // Ordering Key for _, v := range []KinVersion{KinVersion_KIN2, KinVersion_KIN3} { e.Version = v k, err := e.GetOrderingKey() assert.NoError(t, err) pt := e.Kind.(*Entry_Stellar).Stellar.PagingToken cursor := strconv.FormatUint(pt, 10) actual, err := OrderingKeyFromCursor(v, cursor) assert.NoError(t, err) assert.EqualValues(t, actual, k) } }
explode_data.jsonl/11685
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 785 }
[ 2830, 3393, 623, 26880, 1155, 353, 8840, 836, 8, 341, 197, 26206, 1669, 1281, 10556, 87, 3612, 30877, 764, 11, 220, 21, 340, 2023, 600, 1669, 220, 15, 26, 600, 366, 2422, 91868, 1215, 600, 1027, 341, 197, 197, 6878, 9618, 989, 60, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestFindBundleCacheExists(t *testing.T) { cfg := config.NewTestConfig(t) home, err := cfg.Config.GetHomeDir() require.NoError(t, err, "should have had a porter home dir") cacheDir := filepath.Join(home, "cache") cfg.TestContext.AddTestDirectory("testdata", cacheDir) c := New(cfg.Config) _, _, ok, err := c.FindBundle("deislabs/kubekahn:latest") assert.NoError(t, err, "the cache dir should exist, no error should have happened") assert.False(t, ok, "the bundle shouldn't exist") }
explode_data.jsonl/56180
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 176 }
[ 2830, 3393, 9885, 8409, 8233, 15575, 1155, 353, 8840, 836, 8, 341, 50286, 1669, 2193, 7121, 2271, 2648, 1155, 340, 197, 5117, 11, 1848, 1669, 13286, 10753, 2234, 7623, 6184, 741, 17957, 35699, 1155, 11, 1848, 11, 330, 5445, 614, 1030, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDust(t *testing.T) { pkScript := []byte{0x76, 0xa9, 0x21, 0x03, 0x2f, 0x7e, 0x43, 0x0a, 0xa4, 0xc9, 0xd1, 0x59, 0x43, 0x7e, 0x84, 0xb9, 0x75, 0xdc, 0x76, 0xd9, 0x00, 0x3b, 0xf0, 0x92, 0x2c, 0xf3, 0xaa, 0x45, 0x28, 0x46, 0x4b, 0xab, 0x78, 0x0d, 0xba, 0x5e, 0x88, 0xac} tests := []struct { name string // test description txOut wire.TxOut relayFee vtcutil.Amount // minimum relay transaction fee. isDust bool }{ { // Any value is allowed with a zero relay fee. "zero value with zero relay fee", wire.TxOut{Value: 0, PkScript: pkScript}, 0, false, }, { // Zero value is dust with any relay fee" "zero value with very small tx fee", wire.TxOut{Value: 0, PkScript: pkScript}, 1, true, }, { "38 byte public key script with value 584", wire.TxOut{Value: 584, PkScript: pkScript}, 1000, true, }, { "38 byte public key script with value 585", wire.TxOut{Value: 585, PkScript: pkScript}, 1000, false, }, { // Maximum allowed value is never dust. "max satoshi amount is never dust", wire.TxOut{Value: vtcutil.MaxSatoshi, PkScript: pkScript}, vtcutil.MaxSatoshi, false, }, { // Maximum int64 value causes overflow. "maximum int64 value", wire.TxOut{Value: 1<<63 - 1, PkScript: pkScript}, 1<<63 - 1, true, }, { // Unspendable pkScript due to an invalid public key // script. "unspendable pkScript", wire.TxOut{Value: 5000, PkScript: []byte{0x01}}, 0, // no relay fee true, }, } for _, test := range tests { res := isDust(&test.txOut, test.relayFee) if res != test.isDust { t.Fatalf("Dust test '%s' failed: want %v got %v", test.name, test.isDust, res) continue } } }
explode_data.jsonl/1117
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 839 }
[ 2830, 3393, 35, 590, 1155, 353, 8840, 836, 8, 341, 3223, 74, 5910, 1669, 3056, 3782, 90, 15, 87, 22, 21, 11, 220, 15, 9591, 24, 11, 220, 15, 87, 17, 16, 11, 220, 15, 87, 15, 18, 11, 220, 15, 87, 17, 69, 11, 220, 15, 87, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestJobSpecsController_Index(t *testing.T) { t.Parallel() app, cleanup := cltest.NewApplication() defer cleanup() client := app.NewHTTPClient() j1, err := setupJobSpecsControllerIndex(app) assert.NoError(t, err) resp, cleanup := client.Get("/v2/specs?size=x") defer cleanup() cltest.AssertServerResponse(t, resp, 422) resp, cleanup = client.Get("/v2/specs?size=1") defer cleanup() cltest.AssertServerResponse(t, resp, 200) body := cltest.ParseResponseBody(resp) metaCount, err := cltest.ParseJSONAPIResponseMetaCount(body) assert.NoError(t, err) assert.Equal(t, 2, metaCount) var links jsonapi.Links jobs := []models.JobSpec{} err = web.ParsePaginatedResponse(body, &jobs, &links) assert.NoError(t, err) assert.NotEmpty(t, links["next"].Href) assert.Empty(t, links["prev"].Href) assert.Len(t, jobs, 1) assert.Equal(t, j1.Initiators[0].Schedule, jobs[0].Initiators[0].Schedule, "should have the same schedule") resp, cleanup = client.Get(links["next"].Href) defer cleanup() cltest.AssertServerResponse(t, resp, 200) jobs = []models.JobSpec{} err = web.ParsePaginatedResponse(cltest.ParseResponseBody(resp), &jobs, &links) assert.NoError(t, err) assert.Empty(t, links["next"]) assert.NotEmpty(t, links["prev"]) assert.Len(t, jobs, 1) assert.Equal(t, models.InitiatorWeb, jobs[0].Initiators[0].Type, "should have the same type") assert.NotEqual(t, true, jobs[0].Initiators[0].Ran, "should ignore fields for other initiators") }
explode_data.jsonl/53685
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 563 }
[ 2830, 3393, 12245, 8327, 82, 2051, 50361, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 28236, 11, 21290, 1669, 1185, 1944, 7121, 4988, 741, 16867, 21290, 741, 25291, 1669, 906, 7121, 9230, 2959, 2822, 12428, 16, 11, 1848, 1669...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFmtDateFull(t *testing.T) { tests := []struct { t time.Time expected string }{ // { // t: time.Date(2016, 02, 03, 9, 0, 1, 0, time.UTC), // expected: "Wednesday, February 3, 2016", // }, } trans := New() for _, tt := range tests { s := trans.FmtDateFull(tt.t) if s != tt.expected { t.Errorf("Expected '%s' Got '%s'", tt.expected, s) } } }
explode_data.jsonl/1293
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 190 }
[ 2830, 3393, 93322, 1916, 9432, 1155, 353, 8840, 836, 8, 1476, 78216, 1669, 3056, 1235, 341, 197, 3244, 286, 882, 16299, 198, 197, 42400, 914, 198, 197, 59403, 197, 197, 322, 341, 197, 197, 322, 220, 3244, 25, 286, 882, 8518, 7, 17, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestLoggingServiceV2ListMonitoredResourceDescriptorsError(t *testing.T) { errCode := codes.Internal mockLogging.err = grpc.Errorf(errCode, "test error") var request *loggingpb.ListMonitoredResourceDescriptorsRequest = &loggingpb.ListMonitoredResourceDescriptorsRequest{} c, err := NewClient(context.Background(), clientOpt) if err != nil { t.Fatal(err) } resp, err := c.ListMonitoredResourceDescriptors(context.Background(), request).Next() if c := grpc.Code(err); c != errCode { t.Errorf("got error code %q, want %q", c, errCode) } _ = resp }
explode_data.jsonl/77764
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 196 }
[ 2830, 3393, 34575, 1860, 53, 17, 852, 11095, 36201, 4783, 58553, 1454, 1155, 353, 8840, 836, 8, 341, 9859, 2078, 1669, 13912, 32579, 198, 77333, 34575, 18441, 284, 47900, 13080, 3964, 2078, 11, 330, 1944, 1465, 5130, 2405, 1681, 353, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBackup_MakeBackup(t *testing.T) { loc := "/tmp/remark-backups.test" defer os.RemoveAll(loc) assert.NoError(t, os.MkdirAll(loc, 0o700)) bk := AutoBackup{BackupLocation: loc, SiteID: "site1", KeepMax: 3, Exporter: &mockExporter{}} fname, err := bk.makeBackup() assert.NoError(t, err) expFile := fmt.Sprintf("/tmp/remark-backups.test/backup-site1-%s.gz", time.Now().Format("20060102")) assert.Equal(t, expFile, fname) fi, err := os.Lstat(expFile) assert.NoError(t, err) assert.Equal(t, int64(52), fi.Size()) }
explode_data.jsonl/40477
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 228 }
[ 2830, 3393, 56245, 1245, 726, 56245, 1155, 353, 8840, 836, 8, 341, 71128, 1669, 3521, 5173, 14, 37448, 15461, 8602, 5958, 698, 16867, 2643, 84427, 22649, 340, 6948, 35699, 1155, 11, 2643, 1321, 12438, 2403, 22649, 11, 220, 15, 78, 22, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBoolSlice(t *testing.T) { val := bool(true) m := map[string]interface{}{"value": []bool{val}, "nothing": nil} assert.Equal(t, val, New(m).Get("value").BoolSlice()[0]) assert.Equal(t, val, New(m).Get("value").MustBoolSlice()[0]) assert.Equal(t, []bool(nil), New(m).Get("nothing").BoolSlice()) assert.Equal(t, val, New(m).Get("nothing").BoolSlice([]bool{bool(true)})[0]) assert.Panics(t, func() { New(m).Get("nothing").MustBoolSlice() }) }
explode_data.jsonl/23401
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 198 }
[ 2830, 3393, 11233, 33236, 1155, 353, 8840, 836, 8, 1476, 19302, 1669, 1807, 3715, 340, 2109, 1669, 2415, 14032, 31344, 6257, 4913, 957, 788, 3056, 2641, 90, 831, 2137, 330, 41212, 788, 2092, 532, 6948, 12808, 1155, 11, 1044, 11, 1532, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRoundRobinLoadBalancer_OneOf(t *testing.T) { var lb cluster.LoadBalancer = cluster.NewRoundRobinLoadBalancer() targetAddrs := []cluster.Address{"a:5071", "b:5701", "c:5701", "a:5071", "b:5701"} var addrs []cluster.Address for i := 0; i < 5; i++ { addrs = append(addrs, lb.OneOf([]cluster.Address{"a:5071", "b:5701", "c:5701"})) } assert.Equal(t, targetAddrs, addrs) }
explode_data.jsonl/62449
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 163 }
[ 2830, 3393, 27497, 76671, 5879, 93825, 68201, 2124, 1155, 353, 8840, 836, 8, 341, 2405, 18866, 10652, 13969, 93825, 284, 10652, 7121, 27497, 76671, 5879, 93825, 741, 28861, 2212, 5428, 1669, 3056, 18855, 26979, 4913, 64, 25, 20, 15, 22, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMarathonSDRunAndStop(t *testing.T) { ch, md := newTestDiscovery(func(url string) (*AppList, error) { return marathonTestAppList(marathonValidLabel, 1), nil }) md.RefreshInterval = time.Millisecond * 10 ctx, cancel := context.WithCancel(context.Background()) go func() { select { case <-ch: cancel() case <-time.After(md.RefreshInterval * 3): cancel() t.Fatalf("Update took too long.") } }() md.Run(ctx, ch) select { case <-ch: default: t.Fatalf("Channel not closed.") } }
explode_data.jsonl/65406
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 205 }
[ 2830, 3393, 12061, 23941, 5491, 6727, 3036, 10674, 1155, 353, 8840, 836, 8, 341, 23049, 11, 10688, 1669, 501, 2271, 67400, 18552, 6522, 914, 8, 4609, 2164, 852, 11, 1465, 8, 341, 197, 853, 44696, 2271, 2164, 852, 1255, 277, 23941, 408...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseFloat64OrReturnBadRequest(t *testing.T) { t.Parallel() w := httptest.NewRecorder() _, ok := rest.ParseFloat64OrReturnBadRequest(w, "100", 0) require.True(t, ok) require.Equal(t, http.StatusOK, w.Result().StatusCode) //nolint:bodyclose w = httptest.NewRecorder() _, ok = rest.ParseFloat64OrReturnBadRequest(w, "bad request", 0) require.False(t, ok) require.Equal(t, http.StatusBadRequest, w.Result().StatusCode) //nolint:bodyclose w = httptest.NewRecorder() ret, ok := rest.ParseFloat64OrReturnBadRequest(w, "", 9.0) require.Equal(t, float64(9), ret) require.True(t, ok) require.Equal(t, http.StatusOK, w.Result().StatusCode) //nolint:bodyclose }
explode_data.jsonl/55933
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 268 }
[ 2830, 3393, 14463, 5442, 21, 19, 2195, 5598, 46015, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 6692, 1669, 54320, 70334, 7121, 47023, 741, 197, 6878, 5394, 1669, 2732, 8937, 5442, 21, 19, 2195, 5598, 46015, 3622, 11, 330, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetCheckpointSegmentsAndEncoding(t *testing.T) { t.Run("successfully get the GUC values", func(t *testing.T) { dbConn, sqlMock := testhelper.CreateAndConnectMockDB(1) checkpointRow := sqlmock.NewRows([]string{"string"}).AddRow(driver.Value("8")) encodingRow := sqlmock.NewRows([]string{"string"}).AddRow(driver.Value("UNICODE")) sqlMock.ExpectQuery("SELECT .*checkpoint.*").WillReturnRows(checkpointRow) sqlMock.ExpectQuery("SELECT .*server.*").WillReturnRows(encodingRow) actualConfig, err := GetCheckpointSegmentsAndEncoding([]string{}, dbConn) if err != nil { t.Fatalf("got %#v, want nil", err) } expectedConfig := []string{"CHECK_POINT_SEGMENTS=8", "ENCODING=UNICODE"} if !reflect.DeepEqual(actualConfig, expectedConfig) { t.Errorf("got %v, want %v", actualConfig, expectedConfig) } }) }
explode_data.jsonl/11385
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 312 }
[ 2830, 3393, 1949, 92688, 64813, 3036, 14690, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 60505, 633, 279, 479, 5459, 2750, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 20939, 9701, 11, 5704, 11571, 1669, 1273, 18764, 7251, 3036, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestIssue18744(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec(`use test;`) tk.MustExec(`drop table if exists t, t1;`) tk.MustExec(`CREATE TABLE t ( id int(11) NOT NULL, a bigint(20) DEFAULT NULL, b char(20) DEFAULT NULL, c datetime DEFAULT NULL, d double DEFAULT NULL, e json DEFAULT NULL, f decimal(40,6) DEFAULT NULL, PRIMARY KEY (id), KEY a (a), KEY b (b), KEY c (c), KEY d (d), KEY f (f) ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin;`) tk.MustExec(`CREATE TABLE t1 ( id int(11) NOT NULL, a bigint(20) DEFAULT NULL, b char(20) DEFAULT NULL, c datetime DEFAULT NULL, d double DEFAULT NULL, e json DEFAULT NULL, f decimal(40,6) DEFAULT NULL, PRIMARY KEY (id), KEY a (a), KEY b (b), KEY c (c), KEY d (d), KEY f (f) ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin;`) tk.MustExec(`insert into t1(id) values(0),(1),(2);`) tk.MustExec(`insert into t values(0, 2010, "2010-01-01 01:01:00" , "2010-01-01 01:01:00" , 2010 , 2010 , 2010.000000);`) tk.MustExec(`insert into t values(1 , NULL , NULL , NULL , NULL , NULL , NULL);`) tk.MustExec(`insert into t values(2 , 2012 , "2012-01-01 01:01:00" , "2012-01-01 01:01:00" , 2012 , 2012 , 2012.000000);`) tk.MustExec(`set tidb_index_lookup_join_concurrency=1`) require.NoError(t, failpoint.Enable("github.com/pingcap/tidb/executor/testIndexHashJoinOuterWorkerErr", "return")) defer func() { require.NoError(t, failpoint.Disable("github.com/pingcap/tidb/executor/testIndexHashJoinOuterWorkerErr")) }() err := tk.QueryToErr(`select /*+ inl_hash_join(t2) */ t1.id, t2.id from t1 join t t2 on t1.a = t2.a order by t1.a ASC limit 1;`) require.EqualError(t, err, "mockIndexHashJoinOuterWorkerErr") }
explode_data.jsonl/38155
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 811 }
[ 2830, 3393, 42006, 16, 23, 22, 19, 19, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 340, 3244, 74, 50...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateStreamInsufficientReplicas(t *testing.T) { defer cleanupStorage(t) // Use a central NATS server. ns := natsdTest.RunDefaultServer() defer ns.Shutdown() // Configure server. s1Config := getTestConfig("a", true, 5050) s1 := runServerWithConfig(t, s1Config) defer s1.Stop() getMetadataLeader(t, 10*time.Second, s1) client, err := lift.Connect([]string{"localhost:5050"}) require.NoError(t, err) defer client.Close() err = client.CreateStream(context.Background(), "foo", "foo", lift.ReplicationFactor(2)) require.Error(t, err) }
explode_data.jsonl/34462
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 204 }
[ 2830, 3393, 4021, 3027, 15474, 26683, 18327, 52210, 1155, 353, 8840, 836, 8, 341, 16867, 21290, 5793, 1155, 692, 197, 322, 5443, 264, 8622, 18248, 50, 3538, 624, 84041, 1669, 308, 1862, 67, 2271, 16708, 3675, 5475, 741, 16867, 12268, 10...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSharingValuesAmongContextsInAnIsolate(t *testing.T) { t.Parallel() iso := NewIsolate() ctx1, ctx2 := iso.NewContext(), iso.NewContext() // Create a value in ctx1 foo, err := ctx1.Eval(`foo = {x:6,y:true,z:"asdf"}; foo`, "ctx1.js") if err != nil { t.Fatal(err) } // Set that value into ctx2 err = ctx2.Global().Set("bar", foo) if err != nil { t.Fatal(err) } // ...and verify that it has the same value. res, err := ctx2.Eval(`bar.z`, "ctx2.js") if err != nil { t.Fatal(err) } if str := res.String(); str != "asdf" { t.Errorf("Expected 'asdf', got %q", str) } // Now modify that value in ctx2 _, err = ctx2.Eval("bar.z = 'xyz';", "ctx2b.js") if err != nil { t.Fatal(err) } // ...and verify that it got changed in ctx1 as well! res, err = ctx1.Eval("foo.z", "ctx1b.js") if err != nil { t.Fatal(err) } if str := res.String(); str != "xyz" { t.Errorf("Expected 'xyz', got %q", str) } }
explode_data.jsonl/81576
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 428 }
[ 2830, 3393, 83756, 6227, 33610, 1972, 82, 641, 2082, 3872, 33066, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 15420, 1669, 1532, 3872, 33066, 741, 20985, 16, 11, 5635, 17, 1669, 33456, 7121, 1972, 1507, 33456, 7121, 1972,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestGetIPCMode(t *testing.T) { // test false pod := &api.Pod{} ipcMode := getIPCMode(pod) if ipcMode != "" { t.Errorf("expected empty ipc mode for pod but got %v", ipcMode) } // test true pod.Spec.SecurityContext = &api.PodSecurityContext{} pod.Spec.SecurityContext.HostIPC = true ipcMode = getIPCMode(pod) if ipcMode != "host" { t.Errorf("expected host ipc mode for pod but got %v", ipcMode) } }
explode_data.jsonl/31181
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 167 }
[ 2830, 3393, 1949, 62119, 3636, 1155, 353, 8840, 836, 8, 341, 197, 322, 1273, 895, 198, 3223, 347, 1669, 609, 2068, 88823, 16094, 46531, 66, 3636, 1669, 633, 62119, 3636, 1295, 347, 692, 743, 60104, 3636, 961, 1591, 341, 197, 3244, 130...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRequestCtxSendFileNotModified(t *testing.T) { var ctx RequestCtx var req Request ctx.Init(&req, nil, defaultLogger) filePath := "./server_test.go" lastModified, err := FileLastModified(filePath) if err != nil { t.Fatalf("unexpected error: %s", err) } ctx.Request.Header.Set("If-Modified-Since", string(AppendHTTPDate(nil, lastModified))) ctx.SendFile(filePath) s := ctx.Response.String() var resp Response br := bufio.NewReader(bytes.NewBufferString(s)) if err := resp.Read(br); err != nil { t.Fatalf("error when reading response: %s", err) } if resp.StatusCode() != StatusNotModified { t.Fatalf("unexpected status code: %d. Expecting %d", resp.StatusCode(), StatusNotModified) } if len(resp.Body()) > 0 { t.Fatalf("unexpected non-zero response body: %q", resp.Body()) } }
explode_data.jsonl/73293
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 301 }
[ 2830, 3393, 1900, 23684, 11505, 1703, 2623, 19148, 1155, 353, 8840, 836, 8, 341, 2405, 5635, 6145, 23684, 198, 2405, 4232, 6145, 198, 20985, 26849, 2099, 2958, 11, 2092, 11, 1638, 7395, 692, 17661, 1820, 1669, 5924, 4030, 4452, 18002, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestEventQuery_Next(t *testing.T) { s := graphql.NewMockSubServer(t, graphql.AddVarNamesToQuery(eventQuerySubscription, Query, Metadata, Options), map[string]interface{}{Query: "", Metadata: nil, Options: nil}, &eventQueryResult{EventQueryResults: testEventQueryResultsOne}, ) defer s.Close() svc := New(s.URL, client.WithHTTPTimeout(5*time.Second)) sub, err := svc.EventQuery(context.Background(), "", nil, nil) require.NoError(t, err) defer sub.Close() c, err := sub.Next(context.Background()) require.NoError(t, err) expectedData, err := json.Marshal(testEventQueryResultsOne) require.NoError(t, err) actualData, err := json.Marshal(c) require.JSONEq(t, string(expectedData), string(actualData)) }
explode_data.jsonl/36555
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 259 }
[ 2830, 3393, 1556, 2859, 1604, 427, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 48865, 7121, 11571, 3136, 5475, 1155, 345, 197, 197, 38294, 1904, 3962, 7980, 1249, 2859, 6235, 2859, 33402, 11, 11361, 11, 33589, 11, 14566, 1326, 197, 19567,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClient_SendEther_From(t *testing.T) { t.Parallel() app, cleanup := setupWithdrawalsApplication(t) defer cleanup() require.NoError(t, app.StartAndConnect()) client, _ := app.NewClientAndRenderer() set := flag.NewFlagSet("sendether", 0) set.String("from", app.Store.TxManager.NextActiveAccount().Address.String(), "") set.Parse([]string{"100", "0x342156c8d3bA54Abc67920d35ba1d1e67201aC9C"}) app.EthMock.Context("manager.CreateTx#1", func(ethMock *cltest.EthMock) { ethMock.Register("eth_sendRawTransaction", cltest.NewHash()) }) cliapp := cli.NewApp() c := cli.NewContext(cliapp, set, nil) assert.NoError(t, client.SendEther(c)) }
explode_data.jsonl/78856
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 261 }
[ 2830, 3393, 2959, 46267, 71045, 53157, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 28236, 11, 21290, 1669, 6505, 92261, 1127, 4988, 1155, 340, 16867, 21290, 2822, 17957, 35699, 1155, 11, 906, 12101, 3036, 14611, 12367, 25291, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnmarshal_WithString(t *testing.T) { type testStruct struct { Str string Key1 string Key2 string Key3 string Winpath string Winpath2 string Quoted string Regex string Regex2 string Lines string } testUnmarshal(t, []testcase{ { data: `str = "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF."`, expect: &testStruct{ Str: "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF.", }, }, { data: string(loadTestData("unmarshal-string-1.toml")), expect: &testStruct{Key1: "One\nTwo", Key2: "One\nTwo", Key3: "One\nTwo"}, }, { data: string(loadTestData("unmarshal-string-2.toml")), expect: &testStruct{ Key1: "The quick brown fox jumps over the lazy dog.", Key2: "The quick brown fox jumps over the lazy dog.", Key3: "The quick brown fox jumps over the lazy dog.", }, }, { data: string(loadTestData("unmarshal-string-3.toml")), expect: &testStruct{ Winpath: `C:\Users\nodejs\templates`, Winpath2: `\\ServerX\admin$\system32\`, Quoted: `Tom "Dubs" Preston-Werner`, Regex: `<\i\c*\s*>`, }, }, { data: string(loadTestData("unmarshal-string-4.toml")), expect: &testStruct{ Regex2: `I [dw]on't need \d{2} apples`, Lines: "The first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n", }, }, }) }
explode_data.jsonl/52952
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 665 }
[ 2830, 3393, 1806, 27121, 62, 52342, 1155, 353, 8840, 836, 8, 341, 13158, 1273, 9422, 2036, 341, 197, 197, 2580, 414, 914, 198, 197, 55242, 16, 257, 914, 198, 197, 55242, 17, 257, 914, 198, 197, 55242, 18, 257, 914, 198, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestQuestionMatches(t *testing.T) { pr, q := newReverse("192.0.2.21/24") if !pr.QuestionMatches(q) { t.Error("Question doesn't match self", q) } pr, q = newReverse("192.0.2.23/24") if !pr.QuestionMatches(q) { t.Error("Question doesn't match self", q) } pr, q = newReverse("2001:db8::1/64") if !pr.QuestionMatches(q) { t.Error("Question doesn't match self", q) } q1 := q q1.Qclass = dns.ClassCHAOS if pr.QuestionMatches(q1) { t.Error("Question matches modified Class self", q1) } q1 = q q1.Qtype = dns.TypeANY if pr.QuestionMatches(q1) { t.Error("Question matches modified Type self", q1) } q1 = q q1.Name = "Jubs" if pr.QuestionMatches(q1) { t.Error("Question matches modified Name self", q1) } }
explode_data.jsonl/14930
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 322 }
[ 2830, 3393, 14582, 42470, 1155, 353, 8840, 836, 8, 341, 25653, 11, 2804, 1669, 501, 45695, 445, 16, 24, 17, 13, 15, 13, 17, 13, 17, 16, 14, 17, 19, 1138, 743, 753, 649, 58521, 42470, 10583, 8, 341, 197, 3244, 6141, 445, 14582, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestHashObjectCmd(t *testing.T) { t.Parallel() t.Run("blob", func(t *testing.T) { t.Parallel() t.Run("default should be blob", func(t *testing.T) { t.Parallel() repoPath, cleanup := testhelper.UnTar(t, testhelper.RepoSmall) t.Cleanup(cleanup) cwd, err := os.Getwd() require.NoError(t, err) outBuf := bytes.NewBufferString("") cmd := newRootCmd(cwd, env.NewFromOs()) cmd.SetArgs([]string{ "hash-object", filepath.Join(repoPath, "README.md"), }) cmd.SetOut(outBuf) require.NotPanics(t, func() { err = cmd.Execute() }) require.NoError(t, err) out, err := io.ReadAll(outBuf) require.NoError(t, err) assert.Equal(t, "642480605b8b0fd464ab5762e044269cf29a60a3\n", string(out)) }) t.Run("blob opt should work", func(t *testing.T) { t.Parallel() cwd, err := os.Getwd() require.NoError(t, err) outBuf := bytes.NewBufferString("") cmd := newRootCmd(cwd, env.NewFromOs()) require.NoError(t, err) cmd.SetArgs([]string{ "hash-object", "-t", "blob", filepath.Join(testhelper.TestdataPath(t), "blob"), }) cmd.SetOut(outBuf) require.NotPanics(t, func() { err = cmd.Execute() }) require.NoError(t, err) out, err := io.ReadAll(outBuf) require.NoError(t, err) assert.Equal(t, "286db5050f814069644960e6cc7589c386053c6c\n", string(out)) }) }) t.Run("tree", func(t *testing.T) { t.Parallel() t.Run("valid tree should work", func(t *testing.T) { t.Parallel() cwd, err := os.Getwd() require.NoError(t, err) outBuf := bytes.NewBufferString("") cmd := newRootCmd(cwd, env.NewFromOs()) require.NoError(t, err) cmd.SetArgs([]string{ "hash-object", "-t", "tree", filepath.Join(testhelper.TestdataPath(t), "tree"), }) cmd.SetOut(outBuf) require.NotPanics(t, func() { err = cmd.Execute() }) require.NoError(t, err) out, err := io.ReadAll(outBuf) require.NoError(t, err) assert.Equal(t, "2651fee5e238156738bc05ed1b558fdc9dc56fde\n", string(out)) }) t.Run("invalid tree should fail", func(t *testing.T) { t.Parallel() cwd, err := os.Getwd() require.NoError(t, err) outBuf := bytes.NewBufferString("") cmd := newRootCmd(cwd, env.NewFromOs()) require.NoError(t, err) cmd.SetArgs([]string{ "hash-object", "-t", "tree", filepath.Join(testhelper.TestdataPath(t), "blob"), }) cmd.SetOut(outBuf) require.NotPanics(t, func() { err = cmd.Execute() }) require.Error(t, err) // let's make sure we have mo content out, err := io.ReadAll(outBuf) require.NoError(t, err) assert.Empty(t, string(out)) }) }) t.Run("commit", func(t *testing.T) { t.Parallel() t.Run("valid commit should work", func(t *testing.T) { t.Parallel() cwd, err := os.Getwd() require.NoError(t, err) outBuf := bytes.NewBufferString("") cmd := newRootCmd(cwd, env.NewFromOs()) require.NoError(t, err) cmd.SetArgs([]string{ "hash-object", "-t", "commit", filepath.Join(testhelper.TestdataPath(t), "commit"), }) cmd.SetOut(outBuf) require.NotPanics(t, func() { err = cmd.Execute() }) require.NoError(t, err) out, err := io.ReadAll(outBuf) require.NoError(t, err) assert.Equal(t, "0499018e26f79d37ad056611b75730dcb12918fb\n", string(out)) }) t.Run("invalid commit should fail", func(t *testing.T) { t.Parallel() cwd, err := os.Getwd() require.NoError(t, err) outBuf := bytes.NewBufferString("") cmd := newRootCmd(cwd, env.NewFromOs()) require.NoError(t, err) cmd.SetArgs([]string{ "hash-object", "-t", "commit", filepath.Join(testhelper.TestdataPath(t), "tree"), }) cmd.SetOut(outBuf) require.NotPanics(t, func() { err = cmd.Execute() }) assert.Error(t, err) // let's make sure we have mo content out, err := io.ReadAll(outBuf) require.NoError(t, err) assert.Empty(t, string(out)) }) }) }
explode_data.jsonl/44419
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1884 }
[ 2830, 3393, 6370, 1190, 15613, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 3244, 16708, 445, 35112, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 3244, 41288, 7957, 2822, 197, 3244, 16708, 445, 2258, 1265, 387, 23404, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateOffset(t *testing.T) { var validInput = []string{"", "1", "10k", "11m", "1K", "100M", "5G"} for _, test := range validInput { allErrs := validateOffset(test, field.NewPath("offset-field")) if len(allErrs) != 0 { t.Errorf("validateOffset(%q) returned an error for valid input", test) } } var invalidInput = []string{"55mm", "2mG", "6kb", "-5k", "1L", "5Gb"} for _, test := range invalidInput { allErrs := validateOffset(test, field.NewPath("offset-field")) if len(allErrs) == 0 { t.Errorf("validateOffset(%q) didn't return error for invalid input.", test) } } }
explode_data.jsonl/65866
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 236 }
[ 2830, 3393, 17926, 6446, 1155, 353, 8840, 836, 8, 341, 2405, 2697, 2505, 284, 3056, 917, 4913, 497, 330, 16, 497, 330, 16, 15, 74, 497, 330, 16, 16, 76, 497, 330, 16, 42, 497, 330, 16, 15, 15, 44, 497, 330, 20, 38, 16707, 2023...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestCreateUserWithToken(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() user := model.User{Email: strings.ToLower(model.NewId()) + "success+test@example.com", Nickname: "Darth Vader", Username: "vader" + model.NewId(), Password: "passwd1", AuthService: ""} t.Run("invalid token", func(t *testing.T) { if _, err := th.App.CreateUserWithToken(&user, "123"); err == nil { t.Fatal("Should fail on unexisting token") } }) t.Run("invalid token type", func(t *testing.T) { token := model.NewToken( TOKEN_TYPE_VERIFY_EMAIL, model.MapToJson(map[string]string{"teamId": th.BasicTeam.Id, "email": user.Email}), ) <-th.App.Srv.Store.Token().Save(token) defer th.App.DeleteToken(token) if _, err := th.App.CreateUserWithToken(&user, token.Token); err == nil { t.Fatal("Should fail on bad token type") } }) t.Run("expired token", func(t *testing.T) { token := model.NewToken( TOKEN_TYPE_TEAM_INVITATION, model.MapToJson(map[string]string{"teamId": th.BasicTeam.Id, "email": user.Email}), ) token.CreateAt = model.GetMillis() - TEAM_INVITATION_EXPIRY_TIME - 1 <-th.App.Srv.Store.Token().Save(token) defer th.App.DeleteToken(token) if _, err := th.App.CreateUserWithToken(&user, token.Token); err == nil { t.Fatal("Should fail on expired token") } }) t.Run("invalid team id", func(t *testing.T) { token := model.NewToken( TOKEN_TYPE_TEAM_INVITATION, model.MapToJson(map[string]string{"teamId": model.NewId(), "email": user.Email}), ) <-th.App.Srv.Store.Token().Save(token) defer th.App.DeleteToken(token) if _, err := th.App.CreateUserWithToken(&user, token.Token); err == nil { t.Fatal("Should fail on bad team id") } }) t.Run("valid request", func(t *testing.T) { invitationEmail := model.NewId() + "other-email@test.com" token := model.NewToken( TOKEN_TYPE_TEAM_INVITATION, model.MapToJson(map[string]string{"teamId": th.BasicTeam.Id, "email": invitationEmail}), ) <-th.App.Srv.Store.Token().Save(token) newUser, err := th.App.CreateUserWithToken(&user, token.Token) if err != nil { t.Log(err) t.Fatal("Should add user to the team") } if newUser.Email != invitationEmail { t.Fatal("The user email must be the invitation one") } if result := <-th.App.Srv.Store.Token().GetByToken(token.Token); result.Err == nil { t.Fatal("The token must be deleted after be used") } }) }
explode_data.jsonl/31419
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 984 }
[ 2830, 3393, 4021, 1474, 2354, 3323, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 2822, 19060, 1669, 1614, 7344, 90, 4781, 25, 9069, 29983, 7635, 7121, 764, 2140, 488, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDecimalFixedLogicalTypeEncode(t *testing.T) { schema := `{"type": "fixed", "size": 12, "logicalType": "decimal", "precision": 4, "scale": 2}` testBinaryCodecPass(t, schema, big.NewRat(617, 50), []byte("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x04\xd2")) testBinaryCodecPass(t, schema, big.NewRat(-617, 50), []byte("\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfb\x2e")) testBinaryCodecPass(t, schema, big.NewRat(25, 4), []byte("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x71")) testBinaryCodecPass(t, schema, big.NewRat(33, 100), []byte("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x21")) schema0scale := `{"type": "fixed", "size": 12, "logicalType": "decimal", "precision": 4, "scale": 0}` // Encodes to 12 due to scale: 0 testBinaryEncodePass(t, schema0scale, big.NewRat(617, 50), []byte("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c")) testBinaryDecodePass(t, schema0scale, big.NewRat(12, 1), []byte("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0c")) }
explode_data.jsonl/12013
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 479 }
[ 2830, 3393, 11269, 13520, 64312, 929, 32535, 1155, 353, 8840, 836, 8, 341, 1903, 3416, 1669, 1565, 4913, 1313, 788, 330, 22021, 497, 330, 2141, 788, 220, 16, 17, 11, 330, 30256, 929, 788, 330, 23289, 497, 330, 27182, 788, 220, 19, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUniqueChooseX(t *testing.T) { rand.Seed(int64(time.Now().UTC().Nanosecond())) // Assert that we choose everything when n = len(weights) weights := []float64{1.0, .9, .8, .7, .6, .5, .4, .3, .2, .1} chosenCts := make([]int, len(weights)) chosen := UniqueChooseX(weights, len(weights)) for _, c := range chosen { chosenCts[c]++ } for _, v := range chosenCts { assert.Equal(t, 1, v) } // That about does it for uniqueness testing // Failure testing // -1 represents an error from this chosen = UniqueChooseX(weights, 20) assert.Contains(t, chosen, -1) // chosenCts = make([]int, len(weights)) for i := 0; i < testCt; i++ { chosen = UniqueChooseX(weights, 1) for _, c := range chosen { chosenCts[c]++ } } outWeights := make([]float64, len(weights)) for i, v := range chosenCts { outWeights[i] = float64(v) / float64(testCt) } for i := 0; i < len(weights)-1; i++ { diff := math.Abs(outWeights[i] - outWeights[i+1]) assert.True(t, (outWeights[i] > outWeights[i+1]) || diff < .1) } }
explode_data.jsonl/20511
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 435 }
[ 2830, 3393, 22811, 24051, 55, 1155, 353, 8840, 836, 8, 341, 7000, 437, 5732, 291, 1548, 21, 19, 9730, 13244, 1005, 21183, 1005, 45, 276, 960, 1297, 12145, 197, 322, 5319, 429, 582, 5157, 4297, 979, 308, 284, 2422, 62601, 340, 197, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestGetChannelStateIncorrectSender(t *testing.T) { stateServiceTest.channelServiceMock.Put( stateServiceTest.defaultChannelKey, stateServiceTest.defaultChannelData, ) defer stateServiceTest.channelServiceMock.Clear() reply, err := stateServiceTest.service.GetChannelState( nil, &ChannelStateRequest{ ChannelId: bigIntToBytes(stateServiceTest.defaultChannelId), Signature: getSignature( bigIntToBytes(stateServiceTest.defaultChannelId), GenerateTestPrivateKey()), }, ) assert.Equal(t, errors.New("only channel signer can get latest channel state"), err) assert.Nil(t, reply) }
explode_data.jsonl/6076
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 205 }
[ 2830, 3393, 1949, 9629, 1397, 40468, 20381, 1155, 353, 8840, 836, 8, 341, 24291, 1860, 2271, 16195, 1860, 11571, 39825, 1006, 197, 24291, 1860, 2271, 8764, 9629, 1592, 345, 197, 24291, 1860, 2271, 8764, 9629, 1043, 345, 197, 340, 16867, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAcquireBuildLock_waitEarlierFinished(t *testing.T) { // wait for a lower build to finish client := buildLock_Client(t) counter := buildLock_CountWatch(client) previous := buildLock_Pod(t, client, "my-owner", "my-repository", "my-branch", "11") old := buildLock_LockFromPod(t, client, "my-namespace", previous, 11) // should update the lock clean, channel := buildLock_Acquire(t, client, "my-namespace", "my-owner", "my-repository", "my-branch", "13", false) defer clean() // wait for AcquireBuildLock to be waiting for { count := 0 select { case count = <-counter: case callback := <-channel: require.NotNil(t, callback, "timeout") assert.Fail(t, "TestAcquireBuildLock returned") callback() return } if count == 1 { break } } // check the lock lock, err := client.CoreV1().ConfigMaps("jx").Get(old.Name, metav1.GetOptions{}) require.NoError(t, err) assert.Equal(t, old.ObjectMeta, lock.ObjectMeta) assert.Equal(t, "my-namespace", lock.Data["namespace"]) assert.Equal(t, "my-owner", lock.Data["owner"]) assert.Equal(t, "my-repository", lock.Data["repository"]) assert.Equal(t, "my-branch", lock.Data["branch"]) assert.Equal(t, "13", lock.Data["build"]) assert.Equal(t, "", lock.Data["pod"]) assert.Equal(t, old.Data["timestamp"], lock.Data["timestamp"]) // should acquire the lock previous.Status.Phase = v1.PodSucceeded _, err = client.CoreV1().Pods("jx").Update(previous) require.NoError(t, err) callback := <-channel require.NotNil(t, callback, "timeout") buildLock_AssertLock(t, client, "my-namespace", "my-owner", "my-repository", "my-branch", "13") callback() buildLock_AssertNoLock(t, client, "my-namespace") }
explode_data.jsonl/28231
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 651 }
[ 2830, 3393, 11654, 984, 11066, 11989, 18760, 33041, 24890, 1155, 353, 8840, 836, 8, 341, 197, 322, 3783, 369, 264, 4722, 1936, 311, 6248, 198, 25291, 1669, 1936, 11989, 46102, 1155, 340, 58261, 1669, 1936, 11989, 50775, 14247, 12805, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestXORMappedAddress_AddTo_Invalid(t *testing.T) { m := New() addr := &XORMappedAddress{ IP: []byte{1, 2, 3, 4, 5, 6, 7, 8}, Port: 21254, } if err := addr.AddTo(m); err != ErrBadIPLength { t.Errorf("AddTo should return %q, got: %v", ErrBadIPLength, err) } }
explode_data.jsonl/19445
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 125 }
[ 2830, 3393, 55, 4365, 5677, 4286, 21346, 1249, 62, 7928, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1532, 741, 53183, 1669, 609, 55, 4365, 5677, 4286, 515, 197, 197, 3298, 25, 256, 3056, 3782, 90, 16, 11, 220, 17, 11, 220, 18, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDecode(t *testing.T) { for _, test := range decodeBytesTests { dec, err := Decode(test.input) if !checkError(t, test.input, err, test.wantErr) { continue } if !bytes.Equal(test.want.([]byte), dec) { t.Errorf("input %s: value mismatch: got %x, want %x", test.input, dec, test.want) continue } } }
explode_data.jsonl/63933
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 139 }
[ 2830, 3393, 32564, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 16895, 7078, 18200, 341, 197, 197, 8169, 11, 1848, 1669, 50194, 8623, 10046, 340, 197, 743, 753, 2028, 1454, 1155, 11, 1273, 10046, 11, 1848, 11, 1273, 702...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRemoveManagerByUsernameAndOrigin(t *testing.T) { Convey("Remove manager by username and origin", t, func() { expectedBody := `{"origin":"ldap","username":"user-name"}` setup(MockRoute{"POST", "/v2/organizations/bc7b4caf-f4b8-4d85-b126-0729b9351e56/managers/remove", []string{""}, "", 204, "", &expectedBody}, t) defer teardown() c := &Config{ ApiAddress: server.URL, Token: "foobar", } client, err := NewClient(c) So(err, ShouldBeNil) org := &Org{ Guid: "bc7b4caf-f4b8-4d85-b126-0729b9351e56", c: client, } err = org.RemoveManagerByUsernameAndOrigin("user-name", "ldap") So(err, ShouldBeNil) }) }
explode_data.jsonl/4456
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 290 }
[ 2830, 3393, 13021, 2043, 91519, 3036, 13298, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 13021, 6645, 553, 5934, 323, 6238, 497, 259, 11, 2915, 368, 341, 197, 42400, 5444, 1669, 1565, 4913, 8611, 3252, 38665, 2198, 5113, 3252, 872, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIteratorTableFilter(t *testing.T) { var d *DB defer func() { if d != nil { require.NoError(t, d.Close()) } }() datadriven.RunTest(t, "testdata/iterator_table_filter", func(td *datadriven.TestData) string { switch td.Cmd { case "define": if d != nil { if err := d.Close(); err != nil { return err.Error() } } opts := &Options{} opts.TablePropertyCollectors = append(opts.TablePropertyCollectors, func() TablePropertyCollector { return &minSeqNumPropertyCollector{} }) var err error if d, err = runDBDefineCmd(td, opts); err != nil { return err.Error() } d.mu.Lock() // Disable the "dynamic base level" code for this test. d.mu.versions.picker.forceBaseLevel1() s := d.mu.versions.currentVersion().DebugString(base.DefaultFormatter) d.mu.Unlock() return s case "iter": // We're using an iterator table filter to approximate what is done by // snapshots. iterOpts := &IterOptions{} for _, arg := range td.CmdArgs { if len(arg.Vals) != 1 { return fmt.Sprintf("%s: %s=<value>", td.Cmd, arg.Key) } switch arg.Key { case "filter": seqNum, err := strconv.ParseUint(arg.Vals[0], 10, 64) if err != nil { return err.Error() } iterOpts.TableFilter = func(userProps map[string]string) bool { minSeqNum, err := strconv.ParseUint(userProps["test.min-seq-num"], 10, 64) if err != nil { return true } return minSeqNum < seqNum } default: return fmt.Sprintf("%s: unknown arg: %s", td.Cmd, arg.Key) } } // TODO(peter): runDBDefineCmd doesn't properly update the visible // sequence number. So we have to use a snapshot with a very large // sequence number, otherwise the DB appears empty. snap := Snapshot{ db: d, seqNum: InternalKeySeqNumMax, } iter := snap.NewIter(iterOpts) return runIterCmd(td, iter, true) default: return fmt.Sprintf("unknown command: %s", td.Cmd) } }) }
explode_data.jsonl/53973
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 871 }
[ 2830, 3393, 11951, 2556, 5632, 1155, 353, 8840, 836, 8, 341, 2405, 294, 353, 3506, 198, 16867, 2915, 368, 341, 197, 743, 294, 961, 2092, 341, 298, 17957, 35699, 1155, 11, 294, 10421, 2398, 197, 197, 532, 197, 66816, 2698, 266, 14666, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetDiscipleshipJournalDatabase(t *testing.T) { db := GetDiscipleshipJournalDatabase("../../resource") if len(db.BibleReadingPlan) == 0 { t.Errorf("Failed to Get DiscipleshipJournal BibleReadingPlan Data") } }
explode_data.jsonl/58607
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 72 }
[ 2830, 3393, 1949, 23477, 37458, 2151, 43494, 5988, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 2126, 23477, 37458, 2151, 43494, 5988, 36800, 9233, 5130, 743, 2422, 9791, 1785, 1238, 31899, 20485, 8, 621, 220, 15, 341, 197, 3244, 13080, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestAncestors(t *testing.T) { if !goversion.VersionAfterOrEqual(runtime.Version(), 1, 11) { t.Skip("not supported on Go <= 1.10") } savedGodebug := os.Getenv("GODEBUG") os.Setenv("GODEBUG", "tracebackancestors=100") defer os.Setenv("GODEBUG", savedGodebug) withTestProcess("testnextprog", t, func(p *proc.Target, fixture protest.Fixture) { setFunctionBreakpoint(p, t, "main.testgoroutine") assertNoError(p.Continue(), t, "Continue()") as, err := proc.Ancestors(p, p.SelectedGoroutine(), 1000) assertNoError(err, t, "Ancestors") t.Logf("ancestors: %#v\n", as) if len(as) != 1 { t.Fatalf("expected only one ancestor got %d", len(as)) } mainFound := false for i, a := range as { astack, err := a.Stack(100) assertNoError(err, t, fmt.Sprintf("Ancestor %d stack", i)) t.Logf("ancestor %d\n", i) logStacktrace(t, p, astack) for _, frame := range astack { if frame.Current.Fn != nil && frame.Current.Fn.Name == "main.main" { mainFound = true } } } if !mainFound { t.Fatal("could not find main.main function in ancestors") } }) }
explode_data.jsonl/56330
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 467 }
[ 2830, 3393, 2082, 15184, 1087, 1155, 353, 8840, 836, 8, 341, 743, 753, 53203, 1325, 35842, 6025, 2195, 2993, 89467, 35842, 1507, 220, 16, 11, 220, 16, 16, 8, 341, 197, 3244, 57776, 445, 1921, 7248, 389, 5994, 2651, 220, 16, 13, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestSortJSON(t *testing.T) { cases := []struct { unsortedJSON string want string wantErr bool }{ // simple case {unsortedJSON: `{"kingblockio":"foo", "atom":"bar", "tendermint":"foobar"}`, want: `{"atom":"bar","kingblockio":"foo","tendermint":"foobar"}`, wantErr: false}, // failing case (invalid JSON): {unsortedJSON: `"kingblockio":"foo",,,, "atom":"bar", "tendermint":"foobar"}`, want: "", wantErr: true}, // genesis.json {unsortedJSON: `{"consensus_params":{"block_size_params":{"max_bytes":22020096,"max_txs":100000,"max_gas":-1},"tx_size_params":{"max_bytes":10240,"max_gas":-1},"block_gossip_params":{"block_part_size_bytes":65536},"evidence_params":{"max_age":100000}},"validators":[{"pub_key":{"type":"AC26791624DE60","value":"c7UMMAbjFuc5GhGPy0E5q5tefy12p9Tq0imXqdrKXwo="},"power":100,"name":""}],"app_hash":"","genesis_time":"2018-05-11T15:52:25.424795506Z","chain_id":"test-chain-Q6VeoW","app_state":{"accounts":[{"address":"718C9C23F98C9642569742ADDD9F9AB9743FBD5D","coins":[{"denom":"Token","amount":1000},{"denom":"steak","amount":50}]}],"stake":{"pool":{"total_supply":50,"bonded_shares":"0","unbonded_shares":"0","bonded_pool":0,"unbonded_pool":0,"inflation_last_time":0,"inflation":"7/100"},"params":{"inflation_rate_change":"13/100","inflation_max":"1/5","inflation_min":"7/100","goal_bonded":"67/100","max_validators":100,"bond_denom":"steak"},"candidates":null,"bonds":null}}}`, want: `{"app_hash":"","app_state":{"accounts":[{"address":"718C9C23F98C9642569742ADDD9F9AB9743FBD5D","coins":[{"amount":1000,"denom":"Token"},{"amount":50,"denom":"steak"}]}],"stake":{"bonds":null,"candidates":null,"params":{"bond_denom":"steak","goal_bonded":"67/100","inflation_max":"1/5","inflation_min":"7/100","inflation_rate_change":"13/100","max_validators":100},"pool":{"bonded_pool":0,"bonded_shares":"0","inflation":"7/100","inflation_last_time":0,"total_supply":50,"unbonded_pool":0,"unbonded_shares":"0"}}},"chain_id":"test-chain-Q6VeoW","consensus_params":{"block_gossip_params":{"block_part_size_bytes":65536},"block_size_params":{"max_bytes":22020096,"max_gas":-1,"max_txs":100000},"evidence_params":{"max_age":100000},"tx_size_params":{"max_bytes":10240,"max_gas":-1}},"genesis_time":"2018-05-11T15:52:25.424795506Z","validators":[{"name":"","power":100,"pub_key":{"type":"AC26791624DE60","value":"c7UMMAbjFuc5GhGPy0E5q5tefy12p9Tq0imXqdrKXwo="}}]}`, wantErr: false}, // from the TXSpec: {unsortedJSON: `{"chain_id":"test-chain-1","sequence":1,"fee_bytes":{"amount":[{"amount":5,"denom":"photon"}],"gas":10000},"msg_bytes":{"inputs":[{"address":"696E707574","coins":[{"amount":10,"denom":"atom"}]}],"outputs":[{"address":"6F7574707574","coins":[{"amount":10,"denom":"atom"}]}]},"alt_bytes":null}`, want: `{"alt_bytes":null,"chain_id":"test-chain-1","fee_bytes":{"amount":[{"amount":5,"denom":"photon"}],"gas":10000},"msg_bytes":{"inputs":[{"address":"696E707574","coins":[{"amount":10,"denom":"atom"}]}],"outputs":[{"address":"6F7574707574","coins":[{"amount":10,"denom":"atom"}]}]},"sequence":1}`, wantErr: false}, } for _, tc := range cases { got, err := SortJSON([]byte(tc.unsortedJSON)) if tc.wantErr != (err != nil) { t.Fatalf("got %t, want: %t, err=%s", err != nil, tc.wantErr, err) } require.Equal(t, string(got), tc.want) } }
explode_data.jsonl/71388
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1316 }
[ 2830, 3393, 10231, 5370, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 20479, 28298, 5370, 914, 198, 197, 50780, 260, 914, 198, 197, 50780, 7747, 414, 1807, 198, 197, 59403, 197, 197, 322, 4285, 1142, 198, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInternalPodK8SPods(t *testing.T) { assert.Implements(t, (*pod.Source)(nil), &Pods{}) p := NewPods(DefaultNamespace) assert.NotNil(t, p) pods, err := p.Pods() assert.NoError(t, err) assert.Len(t, pods, 0) corev1Pods := []corev1.Pod{ { ObjectMeta: metav1.ObjectMeta{ Name: "not-" + t.Name(), }, }, { ObjectMeta: metav1.ObjectMeta{ Name: t.Name(), }, Status: corev1.PodStatus{ Phase: corev1.PodRunning, }, Spec: corev1.PodSpec{ Containers: []corev1.Container{ { Env: []corev1.EnvVar{ { Name: pkg.EnvMongoDBReplset, Value: "testRs", }, { Name: pkg.EnvMongoDBPort, Value: t.Name(), }, }, Ports: []corev1.ContainerPort{ { Name: mongodbPortName, HostIP: "1.2.3.4", HostPort: int32(27017), }, }, }, }, }, }, } statefulsets := []appsv1.StatefulSet{ { Spec: appsv1.StatefulSetSpec{ ServiceName: pkg.DefaultServiceName + "-testRs", }, }, } p.Update(&CustomResourceState{ Name: "test-cluster", Pods: corev1Pods, Statefulsets: statefulsets, }) pods, _ = p.Pods() assert.Len(t, pods, 1) assert.Equal(t, t.Name(), pods[0]) // test .GetTasks() tasks, err := p.GetTasks(t.Name()) assert.NoError(t, err) assert.Len(t, tasks, 1) // test several PSMDB CRs // https://jira.percona.com/browse/CLOUD-76 p2 := NewPods(DefaultNamespace) p2.Update(&CustomResourceState{ Name: "test-cluster1", Pods: []corev1.Pod{ { ObjectMeta: metav1.ObjectMeta{ Name: t.Name() + "-1", }, Status: corev1.PodStatus{ Phase: corev1.PodRunning, }, Spec: corev1.PodSpec{ Containers: []corev1.Container{ { Env: []corev1.EnvVar{ { Name: pkg.EnvMongoDBReplset, Value: "rs", }, }, Ports: []corev1.ContainerPort{ { Name: mongodbPortName, ContainerPort: int32(27017), }, }, }, }, }, }, }, }) p2.Update(&CustomResourceState{ Name: "test-cluster2", Pods: []corev1.Pod{ { ObjectMeta: metav1.ObjectMeta{ Name: t.Name() + "-2", }, Status: corev1.PodStatus{ Phase: corev1.PodRunning, }, }, }, }) pods, _ = p2.Pods() assert.Len(t, pods, 2, "expected 2 pods (1 pod per updated CR)") tasks, err = p2.GetTasks(t.Name() + "-1") assert.NoError(t, err) assert.Len(t, tasks, 1) assert.Equal(t, t.Name()+"-1", tasks[0].Name()) // test .GetMongoAddr() is correct for 1 multi-CR task // https://jira.percona.com/browse/CLOUD-76 addr, err := tasks[0].GetMongoAddr() assert.NoError(t, err) assert.NotNil(t, addr) assert.Equal(t, "TestInternalPodK8SPods-1.test-cluster1-rs.psmdb.svc.cluster.local:27017", addr.String()) // test .Delete() of CR p2.Delete(&CustomResourceState{ Name: "test-cluster2", }) pods, _ = p2.Pods() assert.Len(t, pods, 1, "expected 1 pods after delete of 2nd CR") // test Succeeded pod is not listed by .Pods() corev1Pods[1].Status.Phase = corev1.PodSucceeded p.Update(&CustomResourceState{ Name: "test-cluster", Pods: corev1Pods, }) pods, _ = p.Pods() assert.Len(t, pods, 0) assert.Equal(t, "k8s", p.Name()) assert.Equal(t, "", p.URL()) os.Setenv(EnvKubernetesHost, t.Name()) os.Setenv(EnvKubernetesPort, "443") defer os.Unsetenv(EnvKubernetesHost) defer os.Unsetenv(EnvKubernetesPort) assert.Equal(t, "tcp://"+t.Name()+":443", p.URL()) }
explode_data.jsonl/37730
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1817 }
[ 2830, 3393, 11569, 23527, 42, 23, 4592, 29697, 1155, 353, 8840, 836, 8, 341, 6948, 26914, 4674, 1155, 11, 4609, 39073, 30350, 2376, 8385, 701, 609, 23527, 82, 6257, 692, 3223, 1669, 1532, 23527, 82, 87874, 22699, 340, 6948, 93882, 1155,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetCurlCommand_json(t *testing.T) { req, _ := http.NewRequest("PUT", "http://www.example.com/abc/def.ghi?jlk=mno&pqr=stu", bytes.NewBufferString(`{"hello":"world","answer":42}`)) req.Header.Set("Content-Type", "application/json") libCommand, _ := http2curl.GetCurlCommand(req) command, _ := GetCurlCommand(req) if libCommand.String() != command.String() { t.Errorf("expected library command: %s and command: %s to match", libCommand, command) } // Output: // curl -X 'PUT' -d '{"hello":"world","answer":42}' -H 'Content-Type: application/json' 'http://www.example.com/abc/def.ghi?jlk=mno&pqr=stu' }
explode_data.jsonl/60999
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 238 }
[ 2830, 3393, 1949, 34, 1085, 4062, 9455, 1155, 353, 8840, 836, 8, 341, 24395, 11, 716, 1669, 1758, 75274, 445, 6221, 497, 330, 1254, 1110, 2136, 7724, 905, 14, 13683, 14, 750, 13, 75076, 30, 73, 41748, 27221, 2152, 96774, 23004, 28, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestForceLeaveCommandRun(t *testing.T) { a1 := testAgent(t) a2 := testAgent(t) defer a1.Shutdown() defer a2.Shutdown() addr := fmt.Sprintf("127.0.0.1:%d", a2.config.Ports.SerfLan) _, err := a1.agent.JoinLAN([]string{addr}) if err != nil { t.Fatalf("err: %s", err) } // Forcibly shutdown a2 so that it appears "failed" in a1 a2.Shutdown() ui, c := testForceLeaveCommand(t) args := []string{ "-http-addr=" + a1.httpAddr, a2.config.NodeName, } code := c.Run(args) if code != 0 { t.Fatalf("bad: %d. %#v", code, ui.ErrorWriter.String()) } m := a1.agent.LANMembers() if len(m) != 2 { t.Fatalf("should have 2 members: %#v", m) } if err := testutil.WaitForResult(func() (bool, error) { m = a1.agent.LANMembers() success := m[1].Status == serf.StatusLeft return success, errors.New(m[1].Status.String()) }); err != nil { t.Fatalf("member status is %v, should be left", err) } }
explode_data.jsonl/11382
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 398 }
[ 2830, 3393, 18573, 21833, 4062, 6727, 1155, 353, 8840, 836, 8, 341, 11323, 16, 1669, 1273, 16810, 1155, 340, 11323, 17, 1669, 1273, 16810, 1155, 340, 16867, 264, 16, 10849, 18452, 741, 16867, 264, 17, 10849, 18452, 2822, 53183, 1669, 88...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGenerateHelmWithURL(t *testing.T) { service := newService("../..") _, err := service.GenerateManifest(context.Background(), &apiclient.ManifestRequest{ Repo: &argoappv1.Repository{}, AppName: "test", ApplicationSource: &argoappv1.ApplicationSource{ Path: "./util/helm/testdata/redis", Helm: &argoappv1.ApplicationSourceHelm{ ValueFiles: []string{"https://raw.githubusercontent.com/argoproj/argocd-example-apps/master/helm-guestbook/values.yaml"}, Values: `cluster: {slaveCount: 2}`, }, }, }) assert.NoError(t, err) }
explode_data.jsonl/58028
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 31115, 39, 23162, 73100, 1155, 353, 8840, 836, 8, 341, 52934, 1669, 501, 1860, 17409, 496, 5130, 197, 6878, 1848, 1669, 2473, 57582, 38495, 5378, 19047, 1507, 609, 391, 292, 1451, 72272, 1900, 515, 197, 197, 25243, 25, 262, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConverterMapStructWithFromReqDropped(t *testing.T) { fieldMap := make(map[string]codegen.FieldMapperEntry) lines, err := convertTypes( "Foo", "Bar", `struct NestedFoo { 1: required string one 2: optional string two } struct Foo { 1: required NestedFoo three 2: required NestedFoo four } struct Bar { 1: optional NestedFoo three }`, nil, fieldMap, ) assert.NoError(t, err) assertPrettyEqual(t, trim(` if in.Three != nil { out.Three = &structs.NestedFoo{} out.Three.One = string(in.Three.One) out.Three.Two = (*string)(in.Three.Two) } else { out.Three = nil }`), lines) }
explode_data.jsonl/62069
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 288 }
[ 2830, 3393, 14920, 2227, 9422, 2354, 3830, 27234, 35, 41716, 1155, 353, 8840, 836, 8, 341, 39250, 2227, 1669, 1281, 9147, 14032, 60, 95859, 17087, 10989, 5874, 692, 78390, 11, 1848, 1669, 5508, 4173, 1006, 197, 197, 1, 40923, 497, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetConfigTxFailure(t *testing.T) { rl := NewRAMLedger(10) for i := 0; i < 10; i++ { rl.Append(ordererledger.CreateNextBlock(rl, []*cb.Envelope{ makeNormalTx(provisional.TestChainID, i), makeConfigTx(provisional.TestChainID, i), })) } rl.Append(ordererledger.CreateNextBlock(rl, []*cb.Envelope{makeNormalTx(provisional.TestChainID, 11)})) defer func() { if recover() == nil { t.Fatalf("Should have panic-ed because there was no config tx") } }() getConfigTx(rl) }
explode_data.jsonl/32474
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 207 }
[ 2830, 3393, 1949, 2648, 51, 9770, 9373, 1155, 353, 8840, 836, 8, 341, 197, 2381, 1669, 1532, 49, 31102, 291, 1389, 7, 16, 15, 340, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 16, 15, 26, 600, 1027, 341, 197, 197, 2381, 8982, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMarshalAppendAllocations(t *testing.T) { m := &test3pb.TestAllTypes{SingularInt32: 1} size := proto.Size(m) const count = 1000 b := make([]byte, size) // AllocsPerRun returns an integral value. marshalAllocs := testing.AllocsPerRun(count, func() { _, err := proto.MarshalOptions{}.MarshalAppend(b[:0], m) if err != nil { t.Fatal(err) } }) b = nil marshalAppendAllocs := testing.AllocsPerRun(count, func() { var err error b, err = proto.MarshalOptions{}.MarshalAppend(b, m) if err != nil { t.Fatal(err) } }) if marshalAllocs != marshalAppendAllocs { t.Errorf("%v allocs/op when writing to a preallocated buffer", marshalAllocs) t.Errorf("%v allocs/op when repeatedly appending to a slice", marshalAppendAllocs) t.Errorf("expect amortized allocs/op to be identical") } }
explode_data.jsonl/1539
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 336 }
[ 2830, 3393, 55438, 23877, 25154, 804, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 609, 1944, 18, 16650, 8787, 2403, 4173, 90, 50, 41880, 1072, 18, 17, 25, 220, 16, 532, 13832, 1669, 18433, 2465, 1255, 340, 4777, 1760, 284, 220, 16, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReplaceInt8(t *testing.T) { v := &Value{data: []int8{int8(1), int8(1), int8(1), int8(1), int8(1), int8(1)}} rawArr := v.MustInt8Slice() replaced := v.ReplaceInt8(func(index int, val int8) int8 { if index < len(rawArr)-1 { return rawArr[index+1] } return rawArr[0] }) replacedArr := replaced.MustInt8Slice() if assert.Equal(t, 6, len(replacedArr)) { assert.Equal(t, replacedArr[0], rawArr[1]) assert.Equal(t, replacedArr[1], rawArr[2]) assert.Equal(t, replacedArr[2], rawArr[3]) assert.Equal(t, replacedArr[3], rawArr[4]) assert.Equal(t, replacedArr[4], rawArr[5]) assert.Equal(t, replacedArr[5], rawArr[0]) } }
explode_data.jsonl/23430
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 313 }
[ 2830, 3393, 23107, 1072, 23, 1155, 353, 8840, 836, 8, 1476, 5195, 1669, 609, 1130, 90, 691, 25, 3056, 396, 23, 90, 396, 23, 7, 16, 701, 526, 23, 7, 16, 701, 526, 23, 7, 16, 701, 526, 23, 7, 16, 701, 526, 23, 7, 16, 701, 52...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTheNamingStrategy(t *testing.T) { cases := []struct { name string namer gorm.Namer expected string }{ {name: "auth", expected: "auth", namer: gorm.TheNamingStrategy.DB}, {name: "userRestrictions", expected: "user_restrictions", namer: gorm.TheNamingStrategy.Table}, {name: "clientID", expected: "client_id", namer: gorm.TheNamingStrategy.Column}, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { result := c.namer(c.name) if result != c.expected { t.Errorf("error in naming strategy. expected: %v got :%v\n", c.expected, result) } }) } }
explode_data.jsonl/48456
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 252 }
[ 2830, 3393, 785, 85410, 19816, 1155, 353, 8840, 836, 8, 1476, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 9038, 15232, 262, 342, 493, 2067, 15232, 198, 197, 42400, 914, 198, 197, 59403, 197, 197, 47006, 25, 330,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDirWithMissingKey(t *testing.T) { dir := newTempDir(t) defer os.RemoveAll(dir) newCaDirectory(t, dir, &defaultCertMaterial, nil) _, err := cert.Load(dir) if err == nil { t.Errorf("expected error, got nil") } }
explode_data.jsonl/6224
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 90 }
[ 2830, 3393, 6184, 2354, 25080, 1592, 1155, 353, 8840, 836, 8, 341, 48532, 1669, 501, 12151, 6184, 1155, 340, 16867, 2643, 84427, 14161, 340, 8638, 22571, 9310, 1155, 11, 5419, 11, 609, 2258, 36934, 13415, 11, 2092, 340, 197, 6878, 1848,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestLogAgentReachable(t *testing.T) { assert := assert.New(t) tp := new(testLogger) tracer, _, _, stop := startTestTracer(t, WithLogger(tp)) defer stop() tp.Reset() logStartup(tracer) assert.Len(tp.Lines(), 2) assert.Regexp(`Datadog Tracer v[0-9]+\.[0-9]+\.[0-9]+ WARN: DIAGNOSTICS Unable to reach agent: Post`, tp.Lines()[0]) }
explode_data.jsonl/64651
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 150 }
[ 2830, 3393, 2201, 16810, 48368, 480, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 73423, 1669, 501, 8623, 7395, 340, 25583, 9584, 11, 8358, 8358, 2936, 1669, 1191, 2271, 1282, 9584, 1155, 11, 3085, 7395, 38852, 1171,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCheckPublicKeyIsValid(t *testing.T) { invalidPublicKeyStrings := []string{ "", //empty key "0446f1c8de232a065da428bf76e44b41f59a46620dec0aedfc9b5ab651e91f2051d610fddc78b8eba38a634bfe9a74bb015a88c52b9b844c74997035e08a695c", //wrong length key "0346f1c8de232a065da428bf76e44b41f59a46620dec0aedfc9b5ab651e91f2051d610fddc78b8eba38a634bfe9a74bb015a88c52b9b844c74997035e08a695ce9", //wrong prefix key } for _, publicKeyString := range invalidPublicKeyStrings { publicKey, _ := hex.DecodeString(publicKeyString) if CheckPublicKeyIsValid(publicKey) == nil { t.Error("CheckPublicKeyIsValid accepting invalids public keys as valid.") } } }
explode_data.jsonl/67116
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 295 }
[ 2830, 3393, 3973, 61822, 55470, 1155, 353, 8840, 836, 8, 341, 197, 11808, 61822, 20859, 1669, 3056, 917, 515, 197, 197, 56323, 442, 3194, 1376, 198, 197, 197, 1, 15, 19, 19, 21, 69, 16, 66, 23, 450, 17, 18, 17, 64, 15, 21, 20, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMatchUnaryKeyword(t *testing.T) { tests := []struct { in string pos int want bool }{ { in: "NOT bar", pos: 0, want: true, }, { in: "foo NOT bar", pos: 4, want: true, }, { in: "foo NOT", pos: 4, want: false, }, { in: "fooNOT bar", pos: 3, want: false, }, { in: "NOTbar", pos: 0, want: false, }, { in: "(not bar)", pos: 1, want: true, }, } for _, tt := range tests { t.Run(tt.in, func(t *testing.T) { p := &parser{buf: []byte(tt.in), pos: tt.pos} if got := p.matchUnaryKeyword("NOT"); got != tt.want { t.Errorf("matchUnaryKeyword() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/49627
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 395 }
[ 2830, 3393, 8331, 94545, 34481, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 17430, 256, 914, 198, 197, 28164, 220, 526, 198, 197, 50780, 1807, 198, 197, 59403, 197, 197, 515, 298, 17430, 25, 256, 330, 14065, 3619,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestHeapSort(t *testing.T) { var a0 = goutil.RandIntArray(10, 1000000) // var a4 = goutil.RandIntArray(100000, 1000000) // fmt.Println(a0) HeapSort(a0) // fmt.Println(a0) }
explode_data.jsonl/20292
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 86 }
[ 2830, 3393, 27909, 10231, 1155, 353, 8840, 836, 8, 341, 2405, 264, 15, 284, 342, 30158, 2013, 437, 95338, 7, 16, 15, 11, 220, 16, 15, 15, 15, 15, 15, 15, 340, 197, 322, 762, 264, 19, 284, 342, 30158, 2013, 437, 95338, 7, 16, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseCommand(t *testing.T) { assertParse := func(expected Process, command string) { actual, err := ParseCommand(strings.NewReader(command)) assert.NoError(t, err) assert.Equal(t, expected, *actual) } assertParse(Process{ Program: "program", Arguments: []string{}, Environment: map[string]string{}, }, "program") assertParse(Process{ Program: "program", Arguments: []string{"arg"}, Environment: map[string]string{}, }, "program arg") assertParse(Process{ Program: "program", Arguments: []string{"quoted arg"}, Environment: map[string]string{}, }, `program "quoted arg"`) assertParse(Process{ Program: "foo", Arguments: []string{"bar", "baz"}, Environment: map[string]string{ "x": "1", "y": "2", }, }, "x=1 y=2 foo bar baz") assertParse(Process{ Program: "3", Arguments: []string{}, Environment: map[string]string{}, }, "$((1+2))") assertParse(Process{ Program: "axb ayb", Arguments: []string{}, Environment: map[string]string{}, }, "a{x,y}b") assertNoParse := func(errSubstr string, command string) { _, err := ParseCommand(strings.NewReader(command)) assert.Error(t, err) if !strings.Contains(err.Error(), errSubstr) { t.Errorf("expected parsing %q to produce error containing %q, got %q", command, errSubstr, err.Error()) } } assertNoParse("reached EOF without closing quote", `"`) assertNoParse("unsupported: command substitution", `foo $(echo 123)`) assertNoParse("unsupported: glob patterns", `foo *`) assertNoParse("unbound variable", `$X`) }
explode_data.jsonl/43101
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 616 }
[ 2830, 3393, 14463, 4062, 1155, 353, 8840, 836, 8, 341, 6948, 14463, 1669, 2915, 15253, 8603, 11, 3210, 914, 8, 341, 197, 88814, 11, 1848, 1669, 14775, 4062, 51442, 68587, 15143, 1171, 197, 6948, 35699, 1155, 11, 1848, 340, 197, 6948, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetYARPCErrorCode(t *testing.T) { const exName = "MyException" t.Run("success", func(t *testing.T) { spec := &compile.StructSpec{ Name: exName, Annotations: map[string]string{_errorCodeAnnotationKey: "ABORTED"}, } assert.NotPanics(t, func() { want := "yarpcerrors.CodeAborted" got := getYARPCErrorCode(spec) assert.Equal(t, want, got) }, "unexpected panic") }) t.Run("panic fail", func(t *testing.T) { spec := &compile.StructSpec{ Name: exName, Annotations: map[string]string{_errorCodeAnnotationKey: "foo"}, } assert.PanicsWithValue(t, "invalid rpc.code annotation for \"MyException\": \"foo\"\nAvailable codes: CANCELLED,UNKNOWN,INVALID_ARGUMENT,DEADLINE_EXCEEDED,NOT_FOUND,ALREADY_EXISTS,PERMISSION_DENIED,RESOURCE_EXHAUSTED,FAILED_PRECONDITION,ABORTED,OUT_OF_RANGE,UNIMPLEMENTED,INTERNAL,UNAVAILABLE,DATA_LOSS,UNAUTHENTICATED", func() { getYARPCErrorCode(spec) }, "unexpected panic") }) }
explode_data.jsonl/26374
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 418 }
[ 2830, 3393, 88300, 934, 4872, 30748, 1155, 353, 8840, 836, 8, 341, 4777, 505, 675, 284, 330, 5050, 1354, 1837, 3244, 16708, 445, 5630, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 98100, 1669, 609, 20433, 51445, 8327, 515, 298, 21297...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestResetPassword(t *testing.T) { t.Skip("test disabled during old build server changes, should be investigated") th := Setup(t).InitBasic() defer th.TearDown() th.Client.Logout() user := th.BasicUser // Delete all the messages before check the reset password mail.DeleteMailBox(user.Email) th.TestForAllClients(t, func(t *testing.T, client *model.Client4) { _, err := client.SendPasswordResetEmail(user.Email) require.NoError(t, err) resp, err := client.SendPasswordResetEmail("") require.Error(t, err) CheckBadRequestStatus(t, resp) // Should not leak whether the email is attached to an account or not _, err = client.SendPasswordResetEmail("notreal@example.com") require.NoError(t, err) }) // Check if the email was send to the right email address and the recovery key match var resultsMailbox mail.JSONMessageHeaderInbucket err := mail.RetryInbucket(5, func() error { var err error resultsMailbox, err = mail.GetMailBox(user.Email) return err }) if err != nil { t.Log(err) t.Log("No email was received, maybe due load on the server. Disabling this verification") } var recoveryTokenString string if err == nil && len(resultsMailbox) > 0 { require.Contains(t, resultsMailbox[0].To[0], user.Email, "Correct To recipient") resultsEmail, mailErr := mail.GetMessageFromMailbox(user.Email, resultsMailbox[0].ID) require.NoError(t, mailErr) loc := strings.Index(resultsEmail.Body.Text, "token=") require.NotEqual(t, -1, loc, "Code should be found in email") loc += 6 recoveryTokenString = resultsEmail.Body.Text[loc : loc+model.TokenSize] } recoveryToken, err := th.App.Srv().Store.Token().GetByToken(recoveryTokenString) require.NoError(t, err, "Recovery token not found (%s)", recoveryTokenString) resp, err := th.Client.ResetPassword(recoveryToken.Token, "") require.Error(t, err) CheckBadRequestStatus(t, resp) resp, err = th.Client.ResetPassword(recoveryToken.Token, "newp") require.Error(t, err) CheckBadRequestStatus(t, resp) resp, err = th.Client.ResetPassword("", "newpwd") require.Error(t, err) CheckBadRequestStatus(t, resp) resp, err = th.Client.ResetPassword("junk", "newpwd") require.Error(t, err) CheckBadRequestStatus(t, resp) code := "" for i := 0; i < model.TokenSize; i++ { code += "a" } resp, err = th.Client.ResetPassword(code, "newpwd") require.Error(t, err) CheckBadRequestStatus(t, resp) _, err = th.Client.ResetPassword(recoveryToken.Token, "newpwd") require.NoError(t, err) th.Client.Login(user.Email, "newpwd") th.Client.Logout() resp, err = th.Client.ResetPassword(recoveryToken.Token, "newpwd") require.Error(t, err) CheckBadRequestStatus(t, resp) authData := model.NewId() _, err = th.App.Srv().Store.User().UpdateAuthData(user.Id, "random", &authData, "", true) require.NoError(t, err) th.TestForAllClients(t, func(t *testing.T, client *model.Client4) { resp, err = client.SendPasswordResetEmail(user.Email) require.Error(t, err) CheckBadRequestStatus(t, resp) }) }
explode_data.jsonl/47529
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1093 }
[ 2830, 3393, 14828, 4876, 1155, 353, 8840, 836, 8, 341, 3244, 57776, 445, 1944, 8386, 2337, 2310, 1936, 3538, 4344, 11, 1265, 387, 26219, 5130, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 741, 70479, 1171...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCount(t *testing.T) { ctx := context.Background() ci := fs.GetConfig(ctx) r := fstest.NewRun(t) defer r.Finalise() file1 := r.WriteBoth(ctx, "potato2", "------------------------------------------------------------", t1) file2 := r.WriteBoth(ctx, "empty space", "-", t2) file3 := r.WriteBoth(ctx, "sub dir/potato3", "hello", t2) fstest.CheckItems(t, r.Fremote, file1, file2, file3) // Check the MaxDepth too ci.MaxDepth = 1 defer func() { ci.MaxDepth = -1 }() objects, size, err := operations.Count(ctx, r.Fremote) require.NoError(t, err) assert.Equal(t, int64(2), objects) assert.Equal(t, int64(61), size) }
explode_data.jsonl/51930
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 245 }
[ 2830, 3393, 2507, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 1444, 72, 1669, 8619, 2234, 2648, 7502, 340, 7000, 1669, 48434, 477, 7121, 6727, 1155, 340, 16867, 435, 991, 977, 1064, 741, 17661, 16, 1669, 435, 4073, 20...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFindMode(t *testing.T) { node33 := &TreeNode{Val: 2} node22 := &TreeNode{Val: 2, Left: node33} node11 := &TreeNode{Val: 1, Right: node22} assert.EqualValues(t, []int{2}, findMode(node11)) }
explode_data.jsonl/30449
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 84 }
[ 2830, 3393, 9885, 3636, 1155, 353, 8840, 836, 8, 341, 20831, 18, 18, 1669, 609, 26597, 90, 2208, 25, 220, 17, 532, 20831, 17, 17, 1669, 609, 26597, 90, 2208, 25, 220, 17, 11, 13727, 25, 2436, 18, 18, 532, 20831, 16, 16, 1669, 60...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidatesHostParameter(t *testing.T) { testCases := []struct { Host string Prefix string URL string Err bool }{ {"127.0.0.1", "", "http://127.0.0.1/" + testapi.Default.GroupVersion().Version, false}, {"127.0.0.1:8080", "", "http://127.0.0.1:8080/" + testapi.Default.GroupVersion().Version, false}, {"foo.bar.com", "", "http://foo.bar.com/" + testapi.Default.GroupVersion().Version, false}, {"http://host/prefix", "", "http://host/prefix/" + testapi.Default.GroupVersion().Version, false}, {"http://host", "", "http://host/" + testapi.Default.GroupVersion().Version, false}, {"http://host", "/", "http://host/" + testapi.Default.GroupVersion().Version, false}, {"http://host", "/other", "http://host/other/" + testapi.Default.GroupVersion().Version, false}, {"host/server", "", "", true}, } for i, testCase := range testCases { u, err := DefaultServerURL(testCase.Host, testCase.Prefix, *testapi.Default.GroupVersion(), false) switch { case err == nil && testCase.Err: t.Errorf("expected error but was nil") continue case err != nil && !testCase.Err: t.Errorf("unexpected error %v", err) continue case err != nil: continue } if e, a := testCase.URL, u.String(); e != a { t.Errorf("%d: expected host %s, got %s", i, e, a) continue } } }
explode_data.jsonl/25921
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 503 }
[ 2830, 3393, 4088, 973, 9296, 4971, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 197, 9296, 256, 914, 198, 197, 10025, 5060, 914, 271, 197, 79055, 914, 198, 197, 197, 7747, 1807, 198, 197, 59403, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestSokuonP(t *testing.T) { const want = "ppappippuppeppo" for _, v := range []string{"っぱっぴっぷっぺっぽ", "ッパッピップッペッポ"} { got, err := KanaToRomaji(v) assert.Equal(t, want, got) assert.Nil(t, err) } }
explode_data.jsonl/11352
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 116 }
[ 2830, 3393, 50, 16493, 263, 47, 1155, 353, 8840, 836, 8, 341, 4777, 1366, 284, 330, 602, 676, 2807, 58498, 39480, 1837, 2023, 8358, 348, 1669, 2088, 3056, 917, 4913, 126086, 125388, 112, 125388, 115, 125388, 118, 136190, 497, 330, 25204...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGrpc_SendTransactions(t *testing.T) { cfg := types.NewChain33Config(types.GetDefaultCfgstring()) //Init(cfg) g := Grpc{} qapi = new(mocks.QueueProtocolAPI) qapi.On("GetConfig", mock.Anything).Return(cfg) g.cli.QueueProtocolAPI = qapi txCount := 10 in := &types.Transactions{Txs: make([]*types.Transaction, txCount)} testMsg := []byte("test") var testTx *types.Transaction qapi.On("SendTx", testTx).Return(&types.Reply{IsOk: true, Msg: testMsg}, types.ErrInvalidParam) testTx = &types.Transaction{} qapi.On("SendTx", testTx).Return(&types.Reply{IsOk: true, Msg: testMsg}, nil) in.Txs[txCount-1] = testTx reply, err := g.SendTransactions(getOkCtx(), in) require.Nil(t, err) require.Equal(t, txCount, len(reply.GetReplyList())) require.Equal(t, types.ErrInvalidParam.Error(), string(reply.GetReplyList()[0].Msg)) require.False(t, reply.GetReplyList()[0].IsOk) require.Equal(t, testMsg, reply.GetReplyList()[txCount-1].Msg) require.True(t, reply.GetReplyList()[txCount-1].IsOk) }
explode_data.jsonl/346
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 393 }
[ 2830, 3393, 6464, 3992, 46267, 48761, 1155, 353, 8840, 836, 8, 1476, 50286, 1669, 4494, 7121, 18837, 18, 18, 2648, 52613, 2234, 3675, 42467, 917, 2398, 197, 322, 3803, 28272, 340, 3174, 1669, 2825, 3992, 16094, 18534, 2068, 284, 501, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1