text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestKexes(t *testing.T) { type kexResultErr struct { result *kexResult err error } for name, kex := range kexAlgoMap { a, b := memPipe() s := make(chan kexResultErr, 1) c := make(chan kexResultErr, 1) var magics handshakeMagics go func() { r, e := kex.Client(a, rand.Reader, &magics) a.Close() c <- kexResultErr{r, e} }() go func() { r, e := kex.Server(b, rand.Reader, &magics, testSigners["ecdsa"]) b.Close() s <- kexResultErr{r, e} }() clientRes := <-c serverRes := <-s if clientRes.err != nil { t.Errorf("client: %v", clientRes.err) } if serverRes.err != nil { t.Errorf("server: %v", serverRes.err) } if !reflect.DeepEqual(clientRes.result, serverRes.result) { t.Errorf("kex %q: mismatch %#v, %#v", name, clientRes.result, serverRes.result) } } }
explode_data.jsonl/44229
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 424 }
[ 2830, 3393, 42, 327, 288, 1155, 353, 8840, 836, 8, 972, 13158, 595, 327, 2077, 7747, 2036, 972, 197, 9559, 353, 74, 327, 2077, 319, 197, 9859, 262, 1465, 319, 197, 2570, 2023, 829, 11, 595, 327, 1669, 2088, 595, 327, 2101, 3346, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSwitchCompositeLitIssue801(t *testing.T) { gopClTest(t, ` type T struct { X int } switch (T{}) { case T{1}: panic("bad") } `, `package main type T struct { X int } func main() { switch (T{}) { case T{1}: panic("bad") } } `) }
explode_data.jsonl/73583
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 116 }
[ 2830, 3393, 16837, 41685, 68954, 42006, 23, 15, 16, 1155, 353, 8840, 836, 8, 341, 3174, 453, 5066, 2271, 1155, 11, 22074, 1313, 350, 2036, 341, 22123, 526, 198, 630, 17338, 320, 51, 28875, 341, 5638, 350, 90, 16, 92, 510, 30764, 445...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWikiGetAllPagesSorting(t *testing.T) { ctx, cancel := testhelper.Context() defer cancel() stop, serverSocketPath := runWikiServiceServer(t) defer stop() client, conn := newWikiClient(t, serverSocketPath) defer conn.Close() wikiRepo, _, cleanupFunc := setupWikiRepo(t) defer cleanupFunc() expectedPages := createTestWikiPages(t, client, wikiRepo) testcasesWithSorting := []struct { desc string limit uint32 sort gitalypb.WikiGetAllPagesRequest_SortBy directionDesc bool expectedCount int }{ { desc: "Sorting by title with no limit", limit: 0, directionDesc: false, sort: gitalypb.WikiGetAllPagesRequest_TITLE, expectedCount: 3, }, { desc: "Sorting by title with limit of 1", limit: 1, directionDesc: false, sort: gitalypb.WikiGetAllPagesRequest_TITLE, expectedCount: 1, }, { desc: "Sorting by title with limit of 3", limit: 3, directionDesc: false, sort: gitalypb.WikiGetAllPagesRequest_TITLE, expectedCount: 3, }, { desc: "Sorting by title with limit of 3 and reversed direction", limit: 3, directionDesc: true, sort: gitalypb.WikiGetAllPagesRequest_TITLE, expectedCount: 3, }, { desc: "Sorting by created_at with no limit", limit: 0, directionDesc: false, sort: gitalypb.WikiGetAllPagesRequest_CREATED_AT, expectedCount: 3, }, { desc: "Sorting by created_at with limit of 1", limit: 1, directionDesc: false, sort: gitalypb.WikiGetAllPagesRequest_CREATED_AT, expectedCount: 1, }, { desc: "Sorting by created_at with limit of 3", limit: 3, directionDesc: false, sort: gitalypb.WikiGetAllPagesRequest_CREATED_AT, expectedCount: 3, }, { desc: "Sorting by created_at with limit of 3 and reversed direction", limit: 3, directionDesc: true, sort: gitalypb.WikiGetAllPagesRequest_CREATED_AT, expectedCount: 3, }, } expectedSortedByCreatedAtPages := []*gitalypb.WikiPage{expectedPages[1], expectedPages[0], expectedPages[2]} for _, tc := range testcasesWithSorting { t.Run(tc.desc, func(t *testing.T) { rpcRequest := gitalypb.WikiGetAllPagesRequest{Repository: wikiRepo, Limit: tc.limit, DirectionDesc: tc.directionDesc, Sort: tc.sort} c, err := client.WikiGetAllPages(ctx, &rpcRequest) require.NoError(t, err) receivedPages := readWikiPagesFromWikiGetAllPagesClient(t, c) require.Len(t, receivedPages, tc.expectedCount) if tc.sort == gitalypb.WikiGetAllPagesRequest_CREATED_AT { expectedPages = expectedSortedByCreatedAtPages } for i := 0; i < tc.expectedCount; i++ { var index int if tc.directionDesc { index = tc.expectedCount - i - 1 } else { index = i } requireWikiPagesEqual(t, expectedPages[index], receivedPages[i]) } }) } }
explode_data.jsonl/48805
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1343 }
[ 2830, 3393, 53896, 1949, 2403, 17713, 71681, 1155, 353, 8840, 836, 8, 341, 20985, 11, 9121, 1669, 1273, 18764, 9328, 741, 16867, 9121, 2822, 62644, 11, 3538, 10286, 1820, 1669, 1598, 53896, 1860, 5475, 1155, 340, 16867, 2936, 2822, 25291,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCatalogConversion(t *testing.T) { catalog := &osb.CatalogResponse{} err := json.Unmarshal([]byte(testCatalog), &catalog) if err != nil { t.Fatalf("Failed to unmarshal test catalog: %v", err) } serviceClasses, servicePlans, err := convertAndFilterCatalog(catalog, nil) if err != nil { t.Fatalf("Failed to convertAndFilterCatalog: %v", err) } if len(serviceClasses) != 1 { t.Fatalf("Expected 1 serviceclasses for testCatalog, but got: %d", len(serviceClasses)) } if len(servicePlans) != 2 { t.Fatalf("Expected 2 plans for testCatalog, but got: %d", len(servicePlans)) } checkPlan(servicePlans[0], "d3031751-XXXX-XXXX-XXXX-a42377d3320e", "fake-plan-1", "Shared fake Server, 5tb persistent disk, 40 max concurrent connections", t) checkPlan(servicePlans[1], "0f4008b5-XXXX-XXXX-XXXX-dace631cd648", "fake-plan-2", "Shared fake Server, 5tb persistent disk, 40 max concurrent connections. 100 async", t) }
explode_data.jsonl/40486
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 339 }
[ 2830, 3393, 41606, 48237, 1155, 353, 8840, 836, 8, 341, 1444, 7750, 1669, 609, 436, 65, 727, 7750, 2582, 16094, 9859, 1669, 2951, 38097, 10556, 3782, 8623, 41606, 701, 609, 26539, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestCLI_NewBroker(t *testing.T) { t.Run("Valid", func(t *testing.T) { logStub := mocks.NewLoggerStub() conf := config.Config{ MQTTTCPAddress: ":1883", MetricsEnabled: true, MetricsAddress: ":8888", MetricsPath: "/metrics", MetricsProfiling: true, } b, err := newBroker(conf, logStub.Logger()) require.Nil(t, err) require.NotNil(t, b) }) t.Run("InvalidMetrics", func(t *testing.T) { logStub := mocks.NewLoggerStub() conf := config.Config{ MQTTTCPAddress: ":1883", MetricsEnabled: true, MetricsAddress: "", MetricsPath: "", } b, err := newBroker(conf, logStub.Logger()) require.NotNil(t, err) require.Nil(t, b) }) }
explode_data.jsonl/34032
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 330 }
[ 2830, 3393, 63959, 39582, 65545, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 4088, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 6725, 33838, 1669, 68909, 7121, 7395, 33838, 741, 197, 67850, 1669, 2193, 10753, 515, 298, 9209, 48, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultiUpdateOnSameTable(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t") tk.MustExec("create table t(x int, y int)") tk.MustExec("insert into t values()") tk.MustExec("update t t1, t t2 set t2.y=1, t1.x=2") tk.MustQuery("select * from t").Check(testkit.Rows("2 1")) tk.MustExec("update t t1, t t2 set t1.x=t2.y, t2.y=t1.x") tk.MustQuery("select * from t").Check(testkit.Rows("1 2")) // Update generated columns tk.MustExec("drop table if exists t") tk.MustExec("create table t(x int, y int, z int as (x+10) stored, w int as (y-10) virtual)") tk.MustExec("insert into t(x, y) values(1, 2), (3, 4)") tk.MustExec("update t t1, t t2 set t2.y=1, t1.x=2 where t1.x=1") tk.MustQuery("select * from t").Check(testkit.Rows("2 1 12 -9", "3 1 13 -9")) tk.MustExec("update t t1, t t2 set t1.x=5, t2.y=t1.x where t1.x=3") tk.MustQuery("select * from t").Check(testkit.Rows("2 3 12 -7", "5 3 15 -7")) tk.MustExec("drop table if exists t") tk.MustExec("create table t(a int, b int, c int as (a+b) stored)") tk.MustExec("insert into t(a, b) values (1, 2)") tk.MustExec("update t t1, t t2 set t2.a=3") tk.MustQuery("select * from t").Check(testkit.Rows("3 2 5")) tk.MustExec("update t t1, t t2 set t1.a=4, t2.b=5") tk.MustQuery("select * from t").Check(testkit.Rows("4 5 9")) // Update primary keys tk.MustExec("use test") tk.MustExec("drop table if exists t") tk.MustExec("create table t (a int primary key)") tk.MustExec("insert into t values (1), (2)") tk.MustExec("update t set a=a+2") tk.MustQuery("select * from t").Check(testkit.Rows("3", "4")) tk.MustExec("update t m, t n set m.a = n.a+10 where m.a=n.a") tk.MustQuery("select * from t").Check(testkit.Rows("13", "14")) tk.MustExec("drop table if exists t") tk.MustExec("create table t (a int primary key, b int)") tk.MustExec("insert into t values (1,3), (2,4)") tk.MustGetErrMsg("update t m, t n set m.a = n.a+10, n.b = m.b+1 where m.a=n.a", `[planner:1706]Primary key/partition key update is not allowed since the table is updated both as 'm' and 'n'.`) tk.MustExec("drop table if exists t") tk.MustExec("create table t (a int, b int, c int, primary key(a, b))") tk.MustExec("insert into t values (1,3,5), (2,4,6)") tk.MustExec("update t m, t n set m.a = n.a+10, m.b = n.b+10 where m.a=n.a") tk.MustQuery("select * from t").Check(testkit.Rows("11 13 5", "12 14 6")) tk.MustExec("update t m, t n, t q set q.c=m.a+n.b, n.c = m.a+1, m.c = n.b+1 where m.b=n.b AND m.a=q.a") tk.MustQuery("select * from t").Check(testkit.Rows("11 13 24", "12 14 26")) tk.MustGetErrMsg("update t m, t n, t q set m.a = m.a+1, n.c = n.c-1, q.c = q.a+q.b where m.b=n.b and n.b=q.b", `[planner:1706]Primary key/partition key update is not allowed since the table is updated both as 'm' and 'n'.`) }
explode_data.jsonl/76264
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1247 }
[ 2830, 3393, 20358, 4289, 1925, 19198, 2556, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 741, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 340, 3244, 74, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEncryptionData(t *testing.T) { auth := "secret" //Generates k1 := GenerateRandomKey() //Encrypts the key json blob ek, err := EncryptKey(k1, auth, "") f := util.TempFilePath() assert.NoError(t, ek.Save(f)) assert.NoError(t, err) //Decrypts Json Object k2, err := ek.Decrypt(auth) assert.NoError(t, err) assert.Equal(t, k1, k2) // wrong password: should fails k3, err := ek.Decrypt("Secret") assert.Error(t, err) assert.Nil(t, k3) ek.Crypto.Cipher = "invalid" /// manipulated key data k4, err := ek.Decrypt(auth) assert.Error(t, err) assert.Nil(t, k4) ek.Crypto.CipherParams.IV = "invalid" /// manipulated key data k4, err = ek.Decrypt(auth) assert.Error(t, err) assert.Nil(t, k4) ek.Crypto.CipherText = "invalid" /// manipulated key data k4, err = ek.Decrypt(auth) assert.Error(t, err) assert.Nil(t, k4) ek.Crypto.KDF = "invalid" /// manipulated key data k4, err = ek.Decrypt(auth) assert.Error(t, err) assert.Nil(t, k4) ek.Crypto.MAC = "invalid" /// manipulated key data k4, err = ek.Decrypt(auth) assert.Error(t, err) assert.Nil(t, k4) ek.Crypto.KDFParams = nil /// manipulated key data k4, err = ek.Decrypt(auth) assert.Error(t, err) assert.Nil(t, k4) }
explode_data.jsonl/79082
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 530 }
[ 2830, 3393, 79239, 1043, 1155, 353, 8840, 836, 8, 341, 78011, 1669, 330, 20474, 698, 197, 322, 5531, 973, 198, 16463, 16, 1669, 19813, 13999, 1592, 741, 197, 322, 61520, 82, 279, 1376, 2951, 23404, 198, 197, 1225, 11, 1848, 1669, 5545...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnescapePattern(t *testing.T) { defer leaktest.AfterTest(t)() defer log.Scope(t).Close(t) testCases := []struct { pattern string expected string escapeToken string }{ {``, ``, `\`}, {``, ``, `\\`}, {`ABC\\`, `ABC\`, `\`}, {`ABC\\\\`, `ABC\\`, `\\`}, {`A\\\\BC`, `A\\BC`, `\`}, {`A\\\\\\\\C`, `A\\\\C`, `\\`}, {`A\\B\\C`, `A\B\C`, `\`}, {`A\\\\B\\\\C`, `A\\B\\C`, `\\`}, {`ABC`, `ABC`, `\`}, {`ABC`, `ABC`, `\\`}, {`A\BC`, `ABC`, `\`}, {`A\BC`, `A\BC`, `\\`}, {`A\\\BC`, `A\BC`, `\`}, {`A\\\\\\BC`, `A\\BC`, `\\`}, {`\漢\字`, `漢字`, `\`}, {`\ \\A\B`, ` \AB`, `\`}, } for _, tc := range testCases { t.Run(fmt.Sprintf("%s-->%s Escape=%s", tc.pattern, tc.expected, tc.escapeToken), func(t *testing.T) { actual, err := unescapePattern(tc.pattern, tc.escapeToken, true /* emitEscapeCharacterLastError */) if err != nil { t.Fatal(err) } if tc.expected != actual { t.Errorf("expected unescaped pattern: %s, got %s\n", tc.expected, actual) } }) } }
explode_data.jsonl/24151
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 528 }
[ 2830, 3393, 1806, 12998, 15760, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 16867, 1487, 77940, 1155, 568, 7925, 1155, 340, 18185, 37302, 1669, 3056, 1235, 341, 197, 3223, 3227, 257, 914, 198, 197, 42400...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestWebhookLoopback(t *testing.T) { stopCh := make(chan struct{}) defer close(stopCh) webhookPath := "/webhook-test" called := int32(0) client, _ := startTestServer(t, stopCh, TestServerSetup{ ModifyServerRunOptions: func(opts *options.ServerRunOptions) { }, ModifyServerConfig: func(config *master.Config) { // Avoid resolveable kubernetes service config.ExtraConfig.EndpointReconcilerType = reconcilers.NoneEndpointReconcilerType // Hook into audit to watch requests config.GenericConfig.AuditBackend = auditSinkFunc(func(events ...*auditinternal.Event) {}) config.GenericConfig.AuditPolicyChecker = auditChecker(func(attrs authorizer.Attributes) (auditinternal.Level, []auditinternal.Stage) { if attrs.GetPath() == webhookPath { if attrs.GetUser().GetName() != "system:apiserver" { t.Errorf("expected user %q, got %q", "system:apiserver", attrs.GetUser().GetName()) } atomic.AddInt32(&called, 1) } return auditinternal.LevelNone, nil }) }, }) fail := admissionv1beta1.Fail _, err := client.AdmissionregistrationV1beta1().MutatingWebhookConfigurations().Create(&admissionv1beta1.MutatingWebhookConfiguration{ ObjectMeta: metav1.ObjectMeta{Name: "webhooktest.example.com"}, Webhooks: []admissionv1beta1.Webhook{{ Name: "webhooktest.example.com", ClientConfig: admissionv1beta1.WebhookClientConfig{ Service: &admissionv1beta1.ServiceReference{Namespace: "default", Name: "kubernetes", Path: &webhookPath}, }, Rules: []admissionv1beta1.RuleWithOperations{{ Operations: []admissionv1beta1.OperationType{admissionv1beta1.OperationAll}, Rule: admissionv1beta1.Rule{APIGroups: []string{""}, APIVersions: []string{"v1"}, Resources: []string{"configmaps"}}, }}, FailurePolicy: &fail, }}, }) if err != nil { t.Fatal(err) } err = wait.PollImmediate(100*time.Millisecond, 30*time.Second, func() (done bool, err error) { _, err = client.CoreV1().ConfigMaps("default").Create(&v1.ConfigMap{ ObjectMeta: metav1.ObjectMeta{Name: "webhook-test"}, Data: map[string]string{"invalid key": "value"}, }) if err == nil { t.Fatal("Unexpected success") } if called > 0 { return true, nil } t.Logf("%v", err) t.Logf("webhook not called yet, continuing...") return false, nil }) if err != nil { t.Fatal(err) } }
explode_data.jsonl/11938
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 905 }
[ 2830, 3393, 5981, 20873, 14620, 1419, 1155, 353, 8840, 836, 8, 341, 62644, 1143, 1669, 1281, 35190, 2036, 37790, 16867, 3265, 60170, 1143, 692, 97250, 20873, 1820, 1669, 3521, 2911, 20873, 16839, 1837, 1444, 4736, 1669, 526, 18, 17, 7, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDPAReconciler_getGCPRegistryEnvVars(t *testing.T) { tests := []struct { name string bsl *velerov1.BackupStorageLocation wantRegistryContainerEnvVar []corev1.EnvVar secret *corev1.Secret wantErr bool }{ { name: "given gcp bsl, appropriate env var for the container are returned", bsl: &velerov1.BackupStorageLocation{ ObjectMeta: metav1.ObjectMeta{ Name: "test-bsl", Namespace: "test-ns", }, Spec: velerov1.BackupStorageLocationSpec{ Provider: GCPProvider, StorageType: velerov1.StorageType{ ObjectStorage: &velerov1.ObjectStorageLocation{ Bucket: "gcp-bucket", }, }, Config: map[string]string{}, }, }, secret: &corev1.Secret{ ObjectMeta: metav1.ObjectMeta{ Name: "cloud-credentials-gcp", Namespace: "test-ns", }, Data: secretData, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { fakeClient, err := getFakeClientFromObjectsForRegistry(tt.secret, tt.bsl) if err != nil { t.Errorf("error in creating fake client, likely programmer error") } r := &DPAReconciler{ Client: fakeClient, Scheme: fakeClient.Scheme(), Log: logr.Discard(), Context: newContextForTest(tt.name), NamespacedName: types.NamespacedName{ Namespace: tt.bsl.Namespace, Name: tt.bsl.Name, }, EventRecorder: record.NewFakeRecorder(10), } tt.wantRegistryContainerEnvVar = []corev1.EnvVar{ { Name: RegistryStorageEnvVarKey, Value: GCS, }, { Name: RegistryStorageGCSBucket, Value: "gcp-bucket", }, { Name: RegistryStorageGCSKeyfile, Value: "/credentials-gcp/cloud", }, } gotRegistryContainerEnvVar, gotErr := r.getGCPRegistryEnvVars(tt.bsl, testGCPEnvVar) if (gotErr != nil) != tt.wantErr { t.Errorf("ValidateBackupStorageLocations() gotErr = %v, wantErr %v", gotErr, tt.wantErr) return } if !reflect.DeepEqual(tt.wantRegistryContainerEnvVar, gotRegistryContainerEnvVar) { t.Errorf("expected registry container env var to be %#v, got %#v", tt.wantRegistryContainerEnvVar, gotRegistryContainerEnvVar) } }) } }
explode_data.jsonl/45545
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1105 }
[ 2830, 3393, 35, 8041, 693, 40446, 5769, 3062, 38, 7123, 15603, 14359, 28305, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 664, 914, 198, 197, 93801, 75, 5108, 353, 889, 261, 859, 16, 8864, 454, 5793, 4707, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestSubscribeTBT(t *testing.T) { r := prework() var res bool var err error // 添加你的方法 r.AddDepthHook(func(ts time.Time, data DepthData) error { //fmt.Println("这是自定义AddBookMsgHook") fmt.Println("当前数据是:", data) return nil }) param := map[string]string{} param["channel"] = "books-l2-tbt" //param["channel"] = "books" param["instId"] = "BTC-USD-SWAP" res, _, err = r.Subscribe(param) if res { fmt.Println("Successfully subscribed!") } else { fmt.Println("Subscription failed!", err) t.Fatal("Subscription failed!", err) //return } time.Sleep(60 * time.Second) }
explode_data.jsonl/61329
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 274 }
[ 2830, 3393, 28573, 51, 17602, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 855, 1778, 741, 2405, 592, 1807, 198, 2405, 1848, 1465, 271, 197, 322, 82339, 103929, 39907, 198, 7000, 1904, 19776, 31679, 18552, 35864, 882, 16299, 11, 821, 43920...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServiceList(t *testing.T) { t.Run("success", func(t *testing.T) { // GIVEN mockRepository := &automock.Repository{} defer mockRepository.AssertExpectations(t) ctx := context.TODO() givenDefs := []model.LabelDefinition{ { Tenant: "tenant", Key: "key1", }, { Tenant: "tenant", Key: "key2", }, } mockRepository.On("List", ctx, "tenant").Return(givenDefs, nil) sut := labeldef.NewService(mockRepository, nil) // WHEN actual, err := sut.List(ctx, "tenant") // THEN require.NoError(t, err) assert.Equal(t, givenDefs, actual) }) t.Run("on error from repository", func(t *testing.T) { // GIVEN mockRepository := &automock.Repository{} defer mockRepository.AssertExpectations(t) ctx := context.TODO() mockRepository.On("List", ctx, "tenant").Return(nil, errors.New("some error")) sut := labeldef.NewService(mockRepository, nil) // WHEN _, err := sut.List(ctx, "tenant") // THEN require.EqualError(t, err, "while fetching Label Definitions: some error") }) }
explode_data.jsonl/76924
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 456 }
[ 2830, 3393, 1860, 852, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 5630, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 197, 322, 89836, 198, 197, 77333, 4624, 1669, 609, 27073, 1176, 25170, 16094, 197, 16867, 7860, 4624, 11711, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCopyFromImageLock(t *testing.T) { env := helpers.BuildEnv(t) imgpkg := helpers.Imgpkg{T: t, L: helpers.Logger{}, ImgpkgPath: env.ImgpkgPath} logger := helpers.Logger{} defer env.Cleanup() randomImageDigest := "" randomImageDigestRef := "" logger.Section("create random image for tests", func() { randomImageDigest = env.ImageFactory.PushSimpleAppImageWithRandomFile(imgpkg, env.Image) randomImageDigestRef = env.Image + randomImageDigest }) t.Run("when copying to repo, it is successful and generates an ImageLock file", func(t *testing.T) { env.UpdateT(t) imageLockYAML := fmt.Sprintf(`--- apiVersion: imgpkg.carvel.dev/v1alpha1 kind: ImagesLock images: - image: %s annotations: some-annotation: some-value `, randomImageDigestRef) testDir := env.Assets.CreateTempFolder("copy-image-to-repo-with-lock-file") lockFile := filepath.Join(testDir, "images.lock.yml") err := ioutil.WriteFile(lockFile, []byte(imageLockYAML), 0700) require.NoError(t, err) logger.Section("copy from lock file", func() { lockOutputPath := filepath.Join(testDir, "image-relocate-lock.yml") imgpkg.Run([]string{"copy", "--lock", lockFile, "--to-repo", env.RelocationRepo, "--lock-output", lockOutputPath}) imageRefs := []lockconfig.ImageRef{{ Image: fmt.Sprintf("%s%s", env.RelocationRepo, randomImageDigest), Annotations: map[string]string{"some-annotation": "some-value"}, }} env.Assert.AssertImagesLock(lockOutputPath, imageRefs) refs := []string{env.RelocationRepo + randomImageDigest} require.NoError(t, env.Assert.ValidateImagesPresenceInRegistry(refs)) }) }) t.Run("when Copying images to Tar file and after importing to a new Repo, it keeps the tags and generates a ImageLock file", func(t *testing.T) { env.UpdateT(t) imageLockYAML := fmt.Sprintf(`--- apiVersion: imgpkg.carvel.dev/v1alpha1 kind: ImagesLock images: - image: %s `, randomImageDigestRef) testDir := env.Assets.CreateTempFolder("copy--image-lock-via-tar-keep-tag") lockFile := filepath.Join(testDir, "images.lock.yml") err := ioutil.WriteFile(lockFile, []byte(imageLockYAML), 0700) require.NoError(t, err) tarFilePath := filepath.Join(testDir, "image.tar") logger.Section("copy image to tar file", func() { imgpkg.Run([]string{"copy", "--lock", lockFile, "--to-tar", tarFilePath}) env.Assert.ImagesDigestIsOnTar(tarFilePath, randomImageDigestRef) }) lockOutputPath := filepath.Join(testDir, "relocate-from-tar-lock.yml") logger.Section("import tar to new repository", func() { imgpkg.Run([]string{"copy", "--tar", tarFilePath, "--to-repo", env.RelocationRepo, "--lock-output", lockOutputPath}) expectedRef := fmt.Sprintf("%s%s", env.RelocationRepo, randomImageDigest) env.Assert.AssertImagesLock(lockOutputPath, []lockconfig.ImageRef{{Image: expectedRef}}) refs := []string{env.RelocationRepo + randomImageDigest} require.NoError(t, env.Assert.ValidateImagesPresenceInRegistry(refs)) }) }) }
explode_data.jsonl/45463
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1115 }
[ 2830, 3393, 12106, 3830, 1906, 11989, 1155, 353, 8840, 836, 8, 341, 57538, 1669, 30187, 25212, 14359, 1155, 340, 39162, 30069, 1669, 30187, 13, 13033, 30069, 76025, 25, 259, 11, 444, 25, 30187, 12750, 22655, 2362, 21888, 7351, 1820, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRejectModelSpecMissing(t *testing.T) { g := gomega.NewGomegaWithT(t) kfsvc := TFExampleKFService.DeepCopy() kfsvc.Spec.Default.Tensorflow = nil g.Expect(kfsvc.ValidateCreate()).Should(gomega.MatchError("Exactly one of [Custom, Tensorflow, ScikitLearn, XGBoost] may be specified in ModelSpec")) }
explode_data.jsonl/50832
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 116 }
[ 2830, 3393, 78413, 1712, 8327, 25080, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 342, 32696, 7121, 38, 32696, 2354, 51, 1155, 340, 16463, 69, 58094, 1669, 29145, 13314, 65008, 1860, 55602, 12106, 741, 16463, 69, 58094, 36473, 13275, 44203,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBigIntToUintBytes_128(t *testing.T) { b := big.NewInt(0) b.SetBytes([]byte{0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x12, 0x34}) res, err := BigIntToUintBytes(b, 16) assert.NoError(t, err) assert.Equal(t, MustHexDecodeString("0x00000000000000000000000000001234"), res) }
explode_data.jsonl/18433
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 159 }
[ 2830, 3393, 87474, 1249, 21570, 7078, 62, 16, 17, 23, 1155, 353, 8840, 836, 8, 341, 2233, 1669, 2409, 7121, 1072, 7, 15, 340, 2233, 4202, 7078, 10556, 3782, 90, 15, 87, 15, 11, 220, 15, 87, 15, 11, 220, 15, 87, 15, 11, 220, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWaitWithCancelledContext(t *testing.T) { t.Parallel() ctx, cancel := context.WithCancel(context.Background()) go func() { time.Sleep(100 * time.Millisecond) cancel() }() err := wait(ctx, 1*time.Hour) assert.True(t, errors.Is(err, context.Canceled)) }
explode_data.jsonl/82067
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 106 }
[ 2830, 3393, 14190, 2354, 39473, 1972, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 30680, 2915, 368, 341, 197, 21957, 31586, 7, 16, 15, 15, 353, 882, 71482, 340, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGeneratePKIXPublicKeyIdFromPrivateKey(t *testing.T) { tcs := []struct { name string privateKey []byte expectedError bool }{ { name: "generate rsa private key id successful", privateKey: []byte(rsa2048PrivateKey), expectedError: false, }, { name: "generate ecdsa private key id successful", privateKey: []byte(ec256PrivateKey), expectedError: false, }, } for _, tc := range tcs { t.Run(tc.name, func(t *testing.T) { key, err := parsePkixPrivateKeyPem(tc.privateKey) if err != nil { t.Fatalf("error parsing key %v", err) } _, err = generatePkixPublicKeyId(key) if tc.expectedError { if err == nil { t.Errorf("generatePkixPublicKeyId(...) = nil, expected non nil") } } else { if err != nil { t.Errorf("generatePkixPublicKeyId(..) = %v, expected nil", err) } } }) } }
explode_data.jsonl/59157
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 409 }
[ 2830, 3393, 31115, 22242, 5396, 61822, 764, 3830, 75981, 1155, 353, 8840, 836, 8, 341, 3244, 4837, 1669, 3056, 1235, 341, 197, 11609, 688, 914, 198, 197, 2455, 1592, 262, 3056, 3782, 198, 197, 42400, 1454, 1807, 198, 197, 59403, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestSokuon(t *testing.T) { const want = "nn" for _, v := range []string{"っん", "ッン"} { got, err := KanaToRomaji(v) assert.Equal(t, want, got) assert.Nil(t, err) } }
explode_data.jsonl/11342
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 86 }
[ 2830, 3393, 50, 16493, 263, 1155, 353, 8840, 836, 8, 341, 4777, 1366, 284, 330, 7370, 1837, 2023, 8358, 348, 1669, 2088, 3056, 917, 4913, 41791, 24791, 497, 330, 25204, 15698, 9207, 341, 197, 3174, 354, 11, 1848, 1669, 730, 3362, 1249...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFileStoreGet(t *testing.T) { path, tearDown := setupConfigFile(t, testConfig) defer tearDown() fs, err := config.NewFileStore(path, false) require.NoError(t, err) configStore, err := config.NewStoreFromBacking(fs, nil, false) require.NoError(t, err) defer configStore.Close() cfg := configStore.Get() assert.Equal(t, "http://TestStoreNew", *cfg.ServiceSettings.SiteURL) cfg2 := configStore.Get() assert.Equal(t, "http://TestStoreNew", *cfg.ServiceSettings.SiteURL) assert.True(t, cfg == cfg2, "Get() returned different configuration instances") newCfg := &model.Config{} _, err = configStore.Set(newCfg) require.NoError(t, err) assert.False(t, newCfg == cfg, "returned config should have been different from original") }
explode_data.jsonl/32379
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 262 }
[ 2830, 3393, 1703, 6093, 1949, 1155, 353, 8840, 836, 8, 341, 26781, 11, 32825, 1669, 6505, 2648, 1703, 1155, 11, 1273, 2648, 340, 16867, 32825, 2822, 53584, 11, 1848, 1669, 2193, 7121, 1703, 6093, 5581, 11, 895, 340, 17957, 35699, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEncodeAsKeyEdgeCases(t *testing.T) { _, err := EncodeAsKey(nil, IntegerType, 0) require.ErrorIs(t, err, ErrInvalidValue) _, err = EncodeAsKey("a", VarcharType, maxKeyLen+1) require.ErrorIs(t, err, ErrMaxKeyLengthExceeded) _, err = EncodeAsKey("a", "NOTATYPE", maxKeyLen) require.ErrorIs(t, err, ErrInvalidValue) t.Run("varchar cases", func(t *testing.T) { _, err = EncodeAsKey(true, VarcharType, 10) require.ErrorIs(t, err, ErrInvalidValue) _, err = EncodeAsKey("abc", VarcharType, 1) require.ErrorIs(t, err, ErrMaxLengthExceeded) }) t.Run("integer cases", func(t *testing.T) { _, err = EncodeAsKey(true, IntegerType, 8) require.ErrorIs(t, err, ErrInvalidValue) _, err = EncodeAsKey(int64(10), IntegerType, 4) require.ErrorIs(t, err, ErrCorruptedData) }) t.Run("boolean cases", func(t *testing.T) { _, err = EncodeAsKey("abc", BooleanType, 1) require.ErrorIs(t, err, ErrInvalidValue) _, err = EncodeAsKey(true, BooleanType, 2) require.ErrorIs(t, err, ErrCorruptedData) }) t.Run("blob cases", func(t *testing.T) { _, err = EncodeAsKey("abc", BLOBType, 3) require.ErrorIs(t, err, ErrInvalidValue) _, err = EncodeAsKey([]byte{1, 2, 3}, BLOBType, 2) require.ErrorIs(t, err, ErrMaxLengthExceeded) }) }
explode_data.jsonl/64097
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 525 }
[ 2830, 3393, 32535, 2121, 1592, 11656, 37302, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 56562, 2121, 1592, 27907, 11, 4440, 929, 11, 220, 15, 340, 17957, 6141, 3872, 1155, 11, 1848, 11, 15495, 7928, 1130, 692, 197, 6878, 184...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOverwrite(t *testing.T) { tests := []struct { name string key func(int) interface{} }{ {name: "uintptr", key: uKey}, {name: "int", key: iKey}, {name: "string", key: sKey}, {name: "[]byte", key: bKey}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { m := &HashMap{} elephant := "elephant" monkey := "monkey" m.Set(tt.key(1), elephant) m.Set(tt.key(1), monkey) if m.Len() != 1 { t.Errorf("map should contain exactly one element but has %v items.", m.Len()) } item, ok := m.Get(tt.key(1)) // Retrieve inserted element. if !ok { t.Error("ok should be true for item stored within the map.") } if item != monkey { t.Error("wrong item returned.") } }) } }
explode_data.jsonl/24422
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 330 }
[ 2830, 3393, 1918, 4934, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 23634, 220, 2915, 1548, 8, 3749, 16094, 197, 59403, 197, 197, 47006, 25, 330, 51380, 497, 1376, 25, 575, 1592, 1583, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLargeDefs(t *testing.T) { testenv.MustHaveGoBuild(t) dir := tmpDir(t) defer os.RemoveAll(dir) large := filepath.Join(dir, "large.go") f, err := os.Create(large) if err != nil { t.Fatal(err) } b := bufio.NewWriter(f) printf := func(format string, args ...interface{}) { _, err := fmt.Fprintf(b, format, args...) if err != nil { t.Fatalf("Writing to %s: %v", large, err) } } printf("package large\n\ntype T struct {\n") for i := 0; i < 1000; i++ { printf("f%d int `tag:\"", i) for j := 0; j < 100; j++ { printf("t%d=%d,", j, j) } printf("\"`\n") } printf("}\n") if err = b.Flush(); err != nil { t.Fatal(err) } if err = f.Close(); err != nil { t.Fatal(err) } main := filepath.Join(dir, "main.go") prog := ` package main import "large" var V large.T func main() { println("ok") } ` err = ioutil.WriteFile(main, []byte(prog), 0666) if err != nil { t.Fatal(err) } run := func(args ...string) string { return doRun(t, dir, args...) } goBin := testenv.GoToolPath(t) run(goBin, "build", "cmd/pack") // writes pack binary to dir run(goBin, "tool", "compile", "large.go") run("./pack", "grc", "large.a", "large.o") run(goBin, "tool", "compile", "-I", ".", "main.go") run(goBin, "tool", "link", "-L", ".", "-o", "a.out", "main.o") out := run("./a.out") if out != "ok\n" { t.Fatalf("incorrect output: %q, want %q", out, "ok\n") } }
explode_data.jsonl/67507
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 654 }
[ 2830, 3393, 34253, 56031, 1155, 353, 8840, 836, 8, 341, 18185, 3160, 50463, 12116, 10850, 11066, 1155, 692, 48532, 1669, 4174, 6184, 1155, 340, 16867, 2643, 84427, 14161, 340, 8810, 2744, 1669, 26054, 22363, 14161, 11, 330, 16767, 18002, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestKubeadmConfigReconciler_Reconcile_DoesNotFailIfCASecretsAlreadyExist(t *testing.T) { g := NewWithT(t) cluster := newCluster("my-cluster") cluster.Status.InfrastructureReady = true cluster.Status.ControlPlaneInitialized = false m := newControlPlaneMachine(cluster, "control-plane-machine") configName := "my-config" c := newControlPlaneInitKubeadmConfig(m, configName) scrt := &corev1.Secret{ ObjectMeta: metav1.ObjectMeta{ Name: fmt.Sprintf("%s-%s", cluster.Name, secret.EtcdCA), Namespace: "default", }, Data: map[string][]byte{ "tls.crt": []byte("hello world"), "tls.key": []byte("hello world"), }, } fakec := helpers.NewFakeClientWithScheme(setupScheme(), []client.Object{cluster, m, c, scrt}...) reconciler := &KubeadmConfigReconciler{ Client: fakec, KubeadmInitLock: &myInitLocker{}, } req := ctrl.Request{ NamespacedName: client.ObjectKey{Namespace: "default", Name: configName}, } _, err := reconciler.Reconcile(ctx, req) g.Expect(err).NotTo(HaveOccurred()) }
explode_data.jsonl/44330
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 419 }
[ 2830, 3393, 42, 392, 3149, 76, 2648, 693, 40446, 5769, 50693, 40446, 457, 1557, 7072, 2623, 19524, 2679, 87516, 50856, 82, 38370, 25613, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1532, 2354, 51, 1155, 692, 197, 18855, 1669, 501, 28678, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_New(t *testing.T) { r := require.New(t) g, err := New(&Options{ Name: "widget", }) r.NoError(err) run := gentest.NewRunner() run.With(g) r.NoError(run.Run()) res := run.Results() r.Len(res.Commands, 0) r.Len(res.Files, 2) r.NoError(gentest.CompareFiles([]string{"models/widget.go", "models/widget_test.go"}, res.Files)) }
explode_data.jsonl/6874
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 151 }
[ 2830, 3393, 39582, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1373, 7121, 1155, 692, 3174, 11, 1848, 1669, 1532, 2099, 3798, 515, 197, 21297, 25, 330, 9797, 756, 197, 3518, 7000, 35699, 3964, 692, 56742, 1669, 15886, 477, 7121, 19486, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSyncToUnsignedCommit(t *testing.T) { SkipOnEnv(t, "GPG") Given(t). Project("gpg"). Path(guestbookPath). When(). IgnoreErrors(). CreateApp(). Sync(). Then(). Expect(OperationPhaseIs(OperationError)). Expect(SyncStatusIs(SyncStatusCodeOutOfSync)). Expect(HealthIs(health.HealthStatusMissing)) }
explode_data.jsonl/35615
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 129 }
[ 2830, 3393, 12154, 1249, 56421, 33441, 1155, 353, 8840, 836, 8, 341, 7568, 13389, 1925, 14359, 1155, 11, 330, 38, 11383, 1138, 9600, 2071, 1155, 4292, 197, 197, 7849, 445, 70, 3517, 38609, 197, 69640, 3268, 3045, 2190, 1820, 4292, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOrphanPartitionCleanup(t *testing.T) { // Explicitly create orphan index folder. // Restart the indexer // Check if orphan index folder gets deleted. if clusterconfig.IndexUsing == "forestdb" { fmt.Println("Not running TestOrphanPartitionCleanup for forestdb") return } absIndexStorageDir := getIndexerStorageDir(t) if absIndexStorageDir == "" { return } var err error err = secondaryindex.CreateSecondaryIndex2("idx3_age_regular", "default", indexManagementAddress, "", []string{"age"}, []bool{false}, false, nil, c.KEY, []string{"age"}, false, 60, nil) FailTestIfError(err, "Error in creating the index", t) // Let the persistent snapshot complete. time.Sleep(10 * time.Second) // Verify the index is queryable _, err = secondaryindex.Range("idx3_age_regular", "default", indexScanAddress, []interface{}{35}, []interface{}{40}, 1, false, defaultlimit, c.SessionConsistency, nil) FailTestIfError(err, "Error in range scan", t) log.Printf("Query on idx3_age_regular is successful\n") slicePath, err := tc.GetIndexSlicePath("idx3_age_regular", "default", absIndexStorageDir, c.PartitionId(1)) FailTestIfError(err, "Error in GetIndexSlicePath", t) comps := strings.Split(slicePath, "_") dummyPartnPath := strings.Join(comps[:len(comps)-1], "_") dummyPartnPath += fmt.Sprintf("_%d.index", c.PartitionId(404)) err = os.MkdirAll(dummyPartnPath, 0755) FailTestIfError(err, "Error creating dummy orphan partition", t) // restart the indexer forceKillIndexer() // Verify that the indexer has come up - and query on non-orphan index succeeds. _, err = secondaryindex.Range("idx3_age_regular", "default", indexScanAddress, []interface{}{35}, []interface{}{40}, 1, false, defaultlimit, c.SessionConsistency, nil) FailTestIfError(err, "Error in range scan after indexer restart", t) log.Printf("Query on idx3_age_regular is successful - after indexer restart.\n") err = verifyDeletedPath(dummyPartnPath) FailTestIfError(err, "Cleanup of orphan partition did not happen", t) }
explode_data.jsonl/59153
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 672 }
[ 2830, 3393, 2195, 9943, 49978, 67335, 1155, 353, 8840, 836, 8, 341, 197, 322, 31330, 398, 1855, 12746, 1922, 8527, 624, 197, 322, 56801, 279, 87216, 198, 197, 322, 4248, 421, 12746, 1922, 8527, 5221, 11062, 382, 743, 10652, 1676, 18338,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInlineBreakpoint(t *testing.T) { // We should be able to set a breakpoint on the call site of an inlined function. if ver, _ := goversion.Parse(runtime.Version()); ver.Major >= 0 && !ver.AfterOrEqual(goversion.GoVersion{Major: 1, Minor: 10, Rev: -1}) { // Versions of go before 1.10 do not have DWARF information for inlined calls t.Skip("inlining not supported") } withTestProcessArgs("testinline", t, ".", []string{}, protest.EnableInlining|protest.EnableOptimization, func(p *proc.Target, fixture protest.Fixture) { pcs, err := proc.FindFileLocation(p, fixture.Source, 17) t.Logf("%#v\n", pcs) if len(pcs) != 1 { t.Fatalf("unable to get PC for inlined function call: %v", pcs) } fn := p.BinInfo().PCToFunc(pcs[0]) expectedFn := "main.main" if fn.Name != expectedFn { t.Fatalf("incorrect function returned, expected %s, got %s", expectedFn, fn.Name) } _, err = p.SetBreakpoint(pcs[0], proc.UserBreakpoint, nil) if err != nil { t.Fatalf("unable to set breakpoint: %v", err) } }) }
explode_data.jsonl/56314
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 396 }
[ 2830, 3393, 25324, 22524, 2768, 1155, 353, 8840, 836, 8, 341, 197, 322, 1205, 1265, 387, 2952, 311, 738, 264, 52745, 389, 279, 1618, 2747, 315, 458, 304, 15121, 729, 624, 743, 2739, 11, 716, 1669, 728, 4366, 8937, 89467, 35842, 13426,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestPullRequestReview_String(t *testing.T) { v := PullRequestReview{ ID: Int64(0), NodeID: String(""), User: &User{}, Body: String(""), CommitID: String(""), HTMLURL: String(""), PullRequestURL: String(""), State: String(""), } want := `github.PullRequestReview{ID:0, NodeID:"", User:github.User{}, Body:"", CommitID:"", HTMLURL:"", PullRequestURL:"", State:""}` if got := v.String(); got != want { t.Errorf("PullRequestReview.String = %v, want %v", got, want) } }
explode_data.jsonl/33271
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 250 }
[ 2830, 3393, 36068, 1900, 19432, 31777, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 31828, 1900, 19432, 515, 197, 29580, 25, 1797, 1333, 21, 19, 7, 15, 1326, 197, 30217, 915, 25, 260, 923, 445, 4461, 197, 31672, 25, 1843, 609, 1474, 38...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestBcrypt(t *testing.T) { p := config.MustNew(t, logrusx.New("", ""), configx.SkipValidation()) require.NoError(t, p.Set(config.ViperKeyHasherBcryptCost, 4)) require.NoError(t, p.Set("dev", false)) assert.EqualValues(t, uint32(12), p.HasherBcrypt().Cost) require.NoError(t, p.Set("dev", true)) assert.EqualValues(t, uint32(4), p.HasherBcrypt().Cost) }
explode_data.jsonl/52908
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 149 }
[ 2830, 3393, 33, 48125, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 2193, 50463, 3564, 1155, 11, 1487, 20341, 87, 7121, 19814, 77130, 2193, 87, 57776, 13799, 12367, 17957, 35699, 1155, 11, 281, 4202, 8754, 5058, 12858, 1592, 6370, 261, 33,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRegenerateOAuthAppSecret(t *testing.T) { th := Setup().InitBasic() defer th.TearDown() Client := th.Client AdminClient := th.SystemAdminClient defaultRolePermissions := th.SaveDefaultRolePermissions() enableOAuthServiceProvider := th.App.Config().ServiceSettings.EnableOAuthServiceProvider defer func() { th.RestoreDefaultRolePermissions(defaultRolePermissions) th.App.UpdateConfig(func(cfg *model.Config) { cfg.ServiceSettings.EnableOAuthServiceProvider = enableOAuthServiceProvider }) }() // Grant permission to regular users. th.AddPermissionToRole(model.PERMISSION_MANAGE_OAUTH.Id, model.SYSTEM_USER_ROLE_ID) th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.EnableOAuthServiceProvider = true }) oapp := &model.OAuthApp{Name: GenerateTestAppName(), Homepage: "https://nowhere.com", Description: "test", CallbackUrls: []string{"https://nowhere.com"}} rapp, resp := AdminClient.CreateOAuthApp(oapp) CheckNoError(t, resp) oapp.Name = GenerateTestAppName() rapp2, resp := Client.CreateOAuthApp(oapp) CheckNoError(t, resp) rrapp, resp := AdminClient.RegenerateOAuthAppSecret(rapp.Id) CheckNoError(t, resp) if rrapp.Id != rapp.Id { t.Fatal("wrong app") } if rrapp.ClientSecret == rapp.ClientSecret { t.Fatal("secret didn't change") } _, resp = AdminClient.RegenerateOAuthAppSecret(rapp2.Id) CheckNoError(t, resp) rapp, resp = AdminClient.CreateOAuthApp(oapp) CheckNoError(t, resp) oapp.Name = GenerateTestAppName() rapp2, resp = Client.CreateOAuthApp(oapp) CheckNoError(t, resp) _, resp = Client.RegenerateOAuthAppSecret(rapp.Id) CheckForbiddenStatus(t, resp) _, resp = Client.RegenerateOAuthAppSecret(rapp2.Id) CheckNoError(t, resp) // Revoke permission from regular users. th.RemovePermissionFromRole(model.PERMISSION_MANAGE_OAUTH.Id, model.SYSTEM_USER_ROLE_ID) _, resp = Client.RegenerateOAuthAppSecret(rapp.Id) CheckForbiddenStatus(t, resp) Client.Logout() _, resp = Client.RegenerateOAuthAppSecret(rapp.Id) CheckUnauthorizedStatus(t, resp) _, resp = AdminClient.RegenerateOAuthAppSecret("junk") CheckBadRequestStatus(t, resp) _, resp = AdminClient.RegenerateOAuthAppSecret(model.NewId()) CheckNotFoundStatus(t, resp) th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.EnableOAuthServiceProvider = false }) _, resp = AdminClient.RegenerateOAuthAppSecret(rapp.Id) CheckNotImplementedStatus(t, resp) }
explode_data.jsonl/30130
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 863 }
[ 2830, 3393, 3477, 13220, 57850, 2164, 19773, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1005, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 741, 71724, 1669, 270, 11716, 198, 197, 7210, 2959, 1669, 270, 16620, 7210, 2959, 271, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Sku_Status_WhenSerializedToJson_DeserializesAsEqual(t *testing.T) { t.Parallel() parameters := gopter.DefaultTestParameters() parameters.MaxSize = 10 properties := gopter.NewProperties(parameters) properties.Property( "Round trip of Sku_Status via JSON returns original", prop.ForAll(RunJSONSerializationTestForSkuStatus, SkuStatusGenerator())) properties.TestingRun(t, gopter.NewFormatedReporter(true, 240, os.Stdout)) }
explode_data.jsonl/44563
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 1098, 12133, 36449, 62, 4498, 77521, 78967, 98054, 2848, 4756, 2121, 2993, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 67543, 1669, 728, 73137, 13275, 2271, 9706, 741, 67543, 14535, 1695, 284, 220, 16, 15, 198, 869...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetDepositAddress(t *testing.T) { t.Parallel() if !areTestAPIKeysSet() { t.Skip("API keys required but not set, skipping test") } _, err := f.GetDepositAddress(context.Background(), currency.NewCode("FTT"), "") if err != nil { t.Error(err) } }
explode_data.jsonl/15230
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 1949, 78982, 4286, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 743, 753, 546, 2271, 7082, 8850, 1649, 368, 341, 197, 3244, 57776, 445, 7082, 6894, 2567, 714, 537, 738, 11, 42659, 1273, 1138, 197, 532, 197, 6878, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFormatWithdrawPermissions(t *testing.T) { // Arrange c.SetDefaults() expectedResult := exchange.WithdrawCryptoViaWebsiteOnlyText + " & " + exchange.WithdrawFiatViaWebsiteOnlyText // Act withdrawPermissions := c.FormatWithdrawPermissions() // Assert if withdrawPermissions != expectedResult { t.Errorf("Expected: %s, Recieved: %s", expectedResult, withdrawPermissions) } }
explode_data.jsonl/21361
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 124 }
[ 2830, 3393, 4061, 92261, 23851, 1155, 353, 8840, 836, 8, 341, 197, 322, 40580, 198, 1444, 4202, 16273, 741, 42400, 2077, 1669, 9289, 26124, 7633, 58288, 54428, 30051, 7308, 1178, 488, 330, 609, 330, 488, 9289, 26124, 7633, 37, 10358, 54...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNodes(t *testing.T) { // single node single := &node{ id: testNodeId, address: testNodeAddress, peers: make(map[string]*node), network: newNetwork(Name(testNodeNetName)), } // get all the nodes including yourself nodes := single.Nodes() nodeCount := 1 if len(nodes) != nodeCount { t.Errorf("Expected to find %d nodes, found: %d", nodeCount, len(nodes)) } // complicated node graph node := testSetup() // get all the nodes including yourself nodes = node.Nodes() // compile a list of ids of all nodes in the network into map for easy indexing nodeIds := make(map[string]bool) // add yourself nodeIds[node.id] = true // add peer Ids for _, id := range testNodePeerIds { nodeIds[id] = true } // add peer1 peers i.e. peers of peer for _, id := range testPeerOfPeerIds { nodeIds[id] = true } // we should return the correct number of nodes if len(nodes) != len(nodeIds) { t.Errorf("Expected %d nodes, found: %d", len(nodeIds), len(nodes)) } // iterate through the list of nodes and makes sure all have been returned for _, node := range nodes { if _, ok := nodeIds[node.Id()]; !ok { t.Errorf("Expected to find %s node", node.Id()) } } // this is a leaf node id := "peer11" if nodePeer := node.GetPeerNode(id); nodePeer == nil { t.Errorf("Expected to find %s node", id) } }
explode_data.jsonl/23544
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 512 }
[ 2830, 3393, 12288, 1155, 353, 8840, 836, 8, 341, 197, 322, 3175, 2436, 198, 1903, 2173, 1669, 609, 3509, 515, 197, 15710, 25, 414, 1273, 79206, 345, 197, 63202, 25, 1273, 1955, 4286, 345, 197, 197, 375, 388, 25, 256, 1281, 9147, 140...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestWhichTimezoneIsMidnightAt(t *testing.T) { s := is.New(t) // let it be midnight in Greenwich first utcHour := 0 s.Equal(WhichTimezoneIsMidnightAt(utcHour, 0), 0) // Rio (-3) utcHour = 3 s.Equal(WhichTimezoneIsMidnightAt(utcHour, 0), -3*60*60) // San Francisco (-7) utcHour = 7 s.Equal(WhichTimezoneIsMidnightAt(utcHour, 0), -7*60*60) // Honolulu (-10) utcHour = 10 s.Equal(WhichTimezoneIsMidnightAt(utcHour, 0), -10*60*60) // Oakland (+13) utcHour = 11 s.Equal(WhichTimezoneIsMidnightAt(utcHour, 0), 13*60*60) // Sydney (+10) utcHour = 14 s.Equal(WhichTimezoneIsMidnightAt(utcHour, 0), 10*60*60) // Vienna (+2) utcHour = 22 s.Equal(WhichTimezoneIsMidnightAt(utcHour, 0), 2*60*60) // Mumbai (+5:30) utcHour = 18 utcMinute := 30 s.Equal(WhichTimezoneIsMidnightAt(utcHour, utcMinute), 19800) utcHour = 21 utcMinute = 30 s.Equal(WhichTimezoneIsMidnightAt(utcHour, utcMinute), 10800+30*60) }
explode_data.jsonl/25486
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 417 }
[ 2830, 3393, 23085, 1462, 8684, 3872, 33648, 9287, 1655, 1155, 353, 8840, 836, 8, 1476, 1903, 1669, 374, 7121, 1155, 340, 197, 322, 1077, 432, 387, 32333, 304, 85838, 1156, 198, 197, 28355, 30254, 1669, 220, 15, 198, 1903, 12808, 7, 23...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetDependencies(t *testing.T) { tests := []struct { description string workspace string target string files map[string]string expectedQuery string output string expected []string shouldErr bool }{ { description: "with WORKSPACE", workspace: ".", target: "target", files: map[string]string{ "WORKSPACE": "", "BUILD": "", "dep1": "", "dep2": "", }, expectedQuery: "bazel query kind('source file', deps('target')) union buildfiles('target') --noimplicit_deps --order_output=no", output: "@ignored\n//:BUILD\n//external/ignored\n\n//:dep1\n//:dep2\n", expected: []string{"BUILD", "dep1", "dep2", "WORKSPACE"}, }, { description: "with parent WORKSPACE", workspace: "./sub/folder", target: "target2", files: map[string]string{ "WORKSPACE": "", "BUILD": "", "sub/folder/BUILD": "", "sub/folder/dep1": "", "sub/folder/dep2": "", "sub/folder/baz/dep3": "", }, expectedQuery: "bazel query kind('source file', deps('target2')) union buildfiles('target2') --noimplicit_deps --order_output=no", output: "@ignored\n//:BUILD\n//sub/folder:BUILD\n//external/ignored\n\n//sub/folder:dep1\n//sub/folder:dep2\n//sub/folder/baz:dep3\n", expected: []string{filepath.Join("..", "..", "BUILD"), "BUILD", "dep1", "dep2", filepath.Join("baz", "dep3"), filepath.Join("..", "..", "WORKSPACE")}, }, { description: "without WORKSPACE", workspace: ".", target: "target", shouldErr: true, }, } for _, test := range tests { testutil.Run(t, test.description, func(t *testutil.T) { t.Override(&util.DefaultExecCommand, t.FakeRunOut(test.expectedQuery, test.output)) t.NewTempDir().WriteFiles(test.files).Chdir() deps, err := GetDependencies(context.Background(), test.workspace, &latest.BazelArtifact{ BuildTarget: test.target, }) t.CheckErrorAndDeepEqual(test.shouldErr, err, test.expected, deps) }) } }
explode_data.jsonl/61216
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 951 }
[ 2830, 3393, 1949, 48303, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 42407, 256, 914, 198, 197, 197, 42909, 257, 914, 198, 197, 28861, 286, 914, 198, 197, 74075, 260, 2415, 14032, 30953, 198, 197, 42400, 2859, 914...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIDAllocatorNegativeValue(t *testing.T) { defer leaktest.AfterTest(t) store, _, stopper := createTestStore(t) defer stopper.Stop() // Increment our key to a negative value. newValue, err := engine.MVCCIncrement(store.Engine(), nil, keys.RaftIDGenerator, store.ctx.Clock.Now(), nil, -1024) if err != nil { t.Fatal(err) } if newValue != -1024 { t.Errorf("expected new value to be -1024; got %d", newValue) } idAlloc, err := newIDAllocator(keys.RaftIDGenerator, store.ctx.DB, 2, 10, stopper) if err != nil { t.Errorf("failed to create IDAllocator: %v", err) } value, err := idAlloc.Allocate() if err != nil { t.Fatal(err) } if value != 2 { t.Errorf("expected id allocation to have value 2; got %d", value) } }
explode_data.jsonl/45458
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 295 }
[ 2830, 3393, 915, 42730, 38489, 1130, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 340, 57279, 11, 8358, 2936, 712, 1669, 1855, 2271, 6093, 1155, 340, 16867, 2936, 712, 30213, 2822, 197, 322, 52835, 1039, 1376, 31...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestGenerateMetricTasksInterval(t *testing.T) { run := &v1alpha1.AnalysisRun{ Spec: v1alpha1.AnalysisRunSpec{ Metrics: []v1alpha1.Metric{{ Name: "success-rate", Interval: "60s", }}, }, Status: v1alpha1.AnalysisRunStatus{ Phase: v1alpha1.AnalysisPhaseRunning, MetricResults: []v1alpha1.MetricResult{{ Name: "success-rate", Phase: v1alpha1.AnalysisPhaseRunning, Measurements: []v1alpha1.Measurement{{ Value: "99", Phase: v1alpha1.AnalysisPhaseSuccessful, StartedAt: timePtr(metav1.NewTime(time.Now().Add(-50 * time.Second))), FinishedAt: timePtr(metav1.NewTime(time.Now().Add(-50 * time.Second))), }}, }}, }, } { // ensure we don't take measurements when within the interval tasks := generateMetricTasks(run, run.Spec.Metrics) assert.Equal(t, 0, len(tasks)) } { // ensure we do take measurements when outside interval successRate := run.Status.MetricResults[0] successRate.Measurements[0].StartedAt = timePtr(metav1.NewTime(time.Now().Add(-61 * time.Second))) successRate.Measurements[0].FinishedAt = timePtr(metav1.NewTime(time.Now().Add(-61 * time.Second))) run.Status.MetricResults[0] = successRate tasks := generateMetricTasks(run, run.Spec.Metrics) assert.Equal(t, 1, len(tasks)) } }
explode_data.jsonl/75807
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 540 }
[ 2830, 3393, 31115, 54310, 25449, 10256, 1155, 353, 8840, 836, 8, 341, 56742, 1669, 609, 85, 16, 7141, 16, 8624, 9092, 6727, 515, 197, 7568, 992, 25, 348, 16, 7141, 16, 8624, 9092, 6727, 8327, 515, 298, 9209, 13468, 25, 3056, 85, 16,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPercentiles(t *testing.T) { N := 100 backend := memory.New(N*2, N*2) driver := MemoryCache(N, N, backend) test.StorageDriverTestPercentiles(driver, t) }
explode_data.jsonl/16178
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 63 }
[ 2830, 3393, 32010, 3658, 1155, 353, 8840, 836, 8, 341, 18317, 1669, 220, 16, 15, 15, 198, 197, 20942, 1669, 4938, 7121, 8204, 9, 17, 11, 451, 9, 17, 340, 33652, 1669, 13850, 8233, 8204, 11, 451, 11, 19163, 340, 18185, 43771, 11349, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestCaveGenerate(t *testing.T) { c := &cave{} c.Generate(80, 40) w, h := c.Size() if w != 81 || h != 41 { t.Error("Could not create grid") } // c.dump() // view level. }
explode_data.jsonl/23297
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 83 }
[ 2830, 3393, 34, 523, 31115, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 609, 66, 523, 16094, 1444, 57582, 7, 23, 15, 11, 220, 19, 15, 340, 6692, 11, 305, 1669, 272, 2465, 741, 743, 289, 961, 220, 23, 16, 1369, 305, 961, 220, 19, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAbsCollection_Push(t *testing.T) { intColl := NewIntCollection([]int{1, 2, 3, 4, 5, 6}) intColl.Push(7) if intColl.Count() != 7 { t.Fatal("Push 后本体错误") } }
explode_data.jsonl/66446
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 85 }
[ 2830, 3393, 27778, 6482, 1088, 1116, 1155, 353, 8840, 836, 8, 341, 2084, 15265, 1669, 1532, 1072, 6482, 10556, 396, 90, 16, 11, 220, 17, 11, 220, 18, 11, 220, 19, 11, 220, 20, 11, 220, 21, 3518, 2084, 15265, 34981, 7, 22, 340, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPatternMonths(t *testing.T) { f := newTestLister(t) entries, err := months(context.Background(), f, "potato/", []string{"", "2020"}) require.NoError(t, err) assert.Equal(t, 12, len(entries)) for i, entry := range entries { assert.Equal(t, fmt.Sprintf("potato/2020-%02d", i+1), entry.Remote()) } }
explode_data.jsonl/24369
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 126 }
[ 2830, 3393, 15760, 59184, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 2271, 852, 261, 1155, 340, 197, 12940, 11, 1848, 1669, 3951, 5378, 19047, 1507, 282, 11, 330, 19099, 4330, 28105, 3056, 917, 4913, 497, 330, 17, 15, 17, 15, 23...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDescribeServiceAccount(t *testing.T) { fake := fake.NewSimpleClientset(&api.ServiceAccount{ ObjectMeta: metav1.ObjectMeta{ Name: "bar", Namespace: "foo", }, Secrets: []api.ObjectReference{ { Name: "test-objectref", }, }, ImagePullSecrets: []api.LocalObjectReference{ { Name: "test-local-ref", }, }, }) c := &describeClient{T: t, Namespace: "foo", Interface: fake} d := ServiceAccountDescriber{c} out, err := d.Describe("foo", "bar", printers.DescriberSettings{ShowEvents: true}) if err != nil { t.Errorf("unexpected error: %v", err) } expectedOut := `Name: bar Namespace: foo Labels: <none> Annotations: <none> Image pull secrets: test-local-ref (not found) Mountable secrets: test-objectref (not found) Tokens: <none> Events: <none>` + "\n" if out != expectedOut { t.Errorf("expected : %q\n but got output:\n %q", expectedOut, out) } }
explode_data.jsonl/34950
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 433 }
[ 2830, 3393, 74785, 1860, 7365, 1155, 353, 8840, 836, 8, 341, 1166, 726, 1669, 12418, 7121, 16374, 2959, 746, 2099, 2068, 13860, 7365, 515, 197, 23816, 12175, 25, 77520, 16, 80222, 515, 298, 21297, 25, 414, 330, 2257, 756, 298, 90823, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInvalidateAllEmailInvites(t *testing.T) { th := Setup(t) defer th.TearDown() t.Run("Forbidden when request performed by system user", func(t *testing.T) { ok, res := th.Client.InvalidateEmailInvites() require.Equal(t, false, ok) CheckForbiddenStatus(t, res) }) t.Run("OK when request performed by system user with requisite system permission", func(t *testing.T) { th.AddPermissionToRole(model.PERMISSION_SYSCONSOLE_WRITE_AUTHENTICATION.Id, model.SYSTEM_USER_ROLE_ID) defer th.RemovePermissionFromRole(model.PERMISSION_SYSCONSOLE_WRITE_AUTHENTICATION.Id, model.SYSTEM_USER_ROLE_ID) ok, res := th.Client.InvalidateEmailInvites() require.Equal(t, true, ok) CheckOKStatus(t, res) }) t.Run("OK when request performed by system admin", func(t *testing.T) { ok, res := th.SystemAdminClient.InvalidateEmailInvites() require.Equal(t, true, ok) CheckOKStatus(t, res) }) }
explode_data.jsonl/70746
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 342 }
[ 2830, 3393, 641, 7067, 2403, 4781, 15174, 3611, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 340, 16867, 270, 836, 682, 4454, 2822, 3244, 16708, 445, 69115, 979, 1681, 10660, 553, 1849, 1196, 497, 2915, 1155, 353, 8840, 836, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_BeginAuth(t *testing.T) { t.Parallel() a := assert.New(t) p := provider() session, err := p.BeginAuth("test_state") s := session.(*salesforce.Session) a.NoError(err) a.Contains(s.AuthURL, "login.salesforce.com/services/oauth2/authorize") }
explode_data.jsonl/30759
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 105 }
[ 2830, 3393, 93447, 5087, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 11323, 1669, 2060, 7121, 1155, 340, 3223, 1669, 9109, 741, 25054, 11, 1848, 1669, 281, 28467, 5087, 445, 1944, 4387, 1138, 1903, 1669, 3797, 41399, 29041, 88...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestChildDeletedEvent(t *testing.T) { cluster := newCluster(testPod, testRS, testDeploy) err := cluster.ensureSynced() assert.Nil(t, err) err = cluster.processEvent(watch.Deleted, testPod) assert.Nil(t, err) rsChildren := cluster.getChildren(testRS) assert.Equal(t, []appv1.ResourceNode{}, rsChildren) }
explode_data.jsonl/75182
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 118 }
[ 2830, 3393, 3652, 26039, 1556, 1155, 353, 8840, 836, 8, 341, 197, 18855, 1669, 501, 28678, 8623, 23527, 11, 1273, 11451, 11, 1273, 69464, 340, 9859, 1669, 10652, 63065, 12154, 291, 741, 6948, 59678, 1155, 11, 1848, 692, 9859, 284, 10652...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPublicKeyHex(t *testing.T) { t.Parallel() want := "3132333435363738393031323334353637383930313233343536373839303132" if id1.PublicKeyHex() != want { t.Errorf("PublicKeyHex() = %s, want %s", id1.PublicKeyHex(), want) } }
explode_data.jsonl/7129
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 105 }
[ 2830, 3393, 61822, 20335, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 50780, 1669, 330, 18, 16, 18, 17, 18, 18, 18, 19, 18, 20, 18, 21, 18, 22, 18, 23, 18, 24, 18, 15, 18, 16, 18, 17, 18, 18, 18, 19, 18, 20, 18,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNonEmptyLogFile(t *testing.T) { is := is.New(t) m, err := NewMemtable(10000, "/tmp/memtable.log") is.NoErr(err) val, found := m.Get("key1") is.True(found) is.Equal(val, "value1") _, found = m.Get("key2") is.True(!found) val, found = m.Get("key3") is.True(found) is.Equal(val, "new_value") m.Clear() }
explode_data.jsonl/56389
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 8121, 3522, 98857, 1155, 353, 8840, 836, 8, 341, 19907, 1669, 374, 7121, 1155, 340, 2109, 11, 1848, 1669, 1532, 18816, 2005, 7, 16, 15, 15, 15, 15, 11, 3521, 5173, 3183, 336, 2005, 1665, 1138, 19907, 16766, 7747, 3964, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEntry_TappedSecondary(t *testing.T) { entry, window := setupImageTest(false) defer teardownImageTest(window) c := window.Canvas() test.AssertImageMatches(t, "entry/tapped_secondary_initial.png", c.Capture()) tapPos := fyne.NewPos(20, 10) test.TapSecondaryAt(entry, tapPos) test.AssertImageMatches(t, "entry/tapped_secondary_full_menu.png", c.Capture()) assert.Equal(t, 1, len(c.Overlays().List())) c.Overlays().Remove(c.Overlays().Top()) entry.Disable() test.TapSecondaryAt(entry, tapPos) test.AssertImageMatches(t, "entry/tapped_secondary_read_menu.png", c.Capture()) assert.Equal(t, 1, len(c.Overlays().List())) c.Overlays().Remove(c.Overlays().Top()) entry.Password = true entry.Refresh() test.TapSecondaryAt(entry, tapPos) test.AssertImageMatches(t, "entry/tapped_secondary_no_password_menu.png", c.Capture()) assert.Nil(t, c.Overlays().Top(), "No popup for disabled password") entry.Enable() test.TapSecondaryAt(entry, tapPos) test.AssertImageMatches(t, "entry/tapped_secondary_password_menu.png", c.Capture()) assert.Equal(t, 1, len(c.Overlays().List())) }
explode_data.jsonl/57332
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 424 }
[ 2830, 3393, 5874, 1139, 5677, 48963, 1155, 353, 8840, 836, 8, 341, 48344, 11, 3241, 1669, 6505, 1906, 2271, 3576, 340, 16867, 49304, 1906, 2271, 15906, 340, 1444, 1669, 3241, 54121, 2822, 18185, 11711, 1906, 42470, 1155, 11, 330, 4085, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCycles(t *testing.T) { sc, c, cleanup := createContexts(t) defer cleanup() root, _, _, stopper := createNamespace(t, sc) defer stopper() ns := v23.GetNamespace(c) ns.SetRoots(root.name) c1 := runMT(t, c, "c1") c2 := runMT(t, c, "c2") c3 := runMT(t, c, "c3") defer c1.stop() defer c2.stop() defer c3.stop() m := "c1/c2" if err := ns.Mount(c, m, c1.name, ttl, naming.ServesMountTable(true)); err != nil { boom(t, "Failed to Mount %s: %s", "c1/c2", err) } m = "c1/c2/c3" if err := ns.Mount(c, m, c3.name, ttl, naming.ServesMountTable(true)); err != nil { boom(t, "Failed to Mount %s: %s", m, err) } m = "c1/c3/c4" if err := ns.Mount(c, m, c1.name, ttl, naming.ServesMountTable(true)); err != nil { boom(t, "Failed to Mount %s: %s", m, err) } // Since c1 was mounted with the Serve call, it will have both the tcp and ws endpoints. testResolve(t, c, ns, "c1", c1.name) testResolve(t, c, ns, "c1/c2", c1.name) testResolve(t, c, ns, "c1/c3", c3.name) testResolve(t, c, ns, "c1/c3/c4", c1.name) testResolve(t, c, ns, "c1/c3/c4/c3/c4", c1.name) cycle := "c3/c4" for i := 0; i < 40; i++ { cycle += "/c3/c4" } if _, err := ns.Resolve(c, "c1/"+cycle); verror.ErrorID(err) != naming.ErrResolutionDepthExceeded.ID { boom(t, "Failed to detect cycle") } // Perform the glob with a response length limit and dup suppression. The dup supression // should win. r := doGlob(t, c, ns, "c1/...", 1000) if len(r) != 6 { t.Fatalf("expected 6 replies, got %v", r) } }
explode_data.jsonl/8230
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 718 }
[ 2830, 3393, 34, 15805, 1155, 353, 8840, 836, 8, 341, 29928, 11, 272, 11, 21290, 1669, 83674, 82, 1155, 340, 16867, 21290, 2822, 33698, 11, 8358, 8358, 2936, 712, 1669, 1855, 22699, 1155, 11, 1136, 340, 16867, 2936, 712, 741, 84041, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestReadRootValidation(t *testing.T) { cases := []struct { note string files [][2]string err string }{ { note: "default full extent", files: [][2]string{ {"/.manifest", `{"revision": "abcd"}`}, {"/data.json", `{"a": 1}`}, {"/x.rego", `package foo`}, }, err: "", }, { note: "explicit full extent", files: [][2]string{ {"/.manifest", `{"revision": "abcd", "roots": [""]}`}, {"/data.json", `{"a": 1}`}, {"/x.rego", `package foo`}, }, err: "", }, { note: "implicit prefixed", files: [][2]string{ {"/.manifest", `{"revision": "abcd", "roots": ["a/b", "foo"]}`}, {"/data.json", `{"a": {"b": 1}}`}, {"/x.rego", `package foo.bar`}, }, err: "", }, { note: "err empty", files: [][2]string{ {"/.manifest", `{"revision": "abcd", "roots": []}`}, {"/x.rego", `package foo`}, }, err: "manifest roots [] do not permit 'package foo' in module '/x.rego'", }, { note: "err overlapped", files: [][2]string{ {"/.manifest", `{"revision": "abcd", "roots": ["a/b", "a"]}`}, }, err: "manifest has overlapped roots: 'a/b' and 'a'", }, { note: "edge overlapped partial segment", files: [][2]string{ {"/.manifest", `{"revision": "abcd", "roots": ["a", "another_root"]}`}, }, err: "", }, { note: "err package outside scope", files: [][2]string{ {"/.manifest", `{"revision": "abcd", "roots": ["a", "b", "c/d"]}`}, {"/a.rego", `package b.c`}, {"/x.rego", `package c.e`}, }, err: "manifest roots [a b c/d] do not permit 'package c.e' in module '/x.rego'", }, { note: "err data outside scope", files: [][2]string{ {"/.manifest", `{"revision": "abcd", "roots": ["a", "b", "c/d"]}`}, {"/data.json", `{"a": 1}`}, {"/c/e/data.json", `"bad bad bad"`}, }, err: "manifest roots [a b c/d] do not permit data at path '/c/e'", }, } for _, tc := range cases { t.Run(tc.note, func(t *testing.T) { buf := archive.MustWriteTarGz(tc.files) _, err := NewReader(buf).IncludeManifestInData(true).Read() if tc.err == "" && err != nil { t.Fatal("Unexpected error occurred:", err) } else if tc.err != "" && err == nil { t.Fatal("Expected error but got success") } else if tc.err != "" && err != nil { if !strings.Contains(err.Error(), tc.err) { t.Fatalf("Expected error to contain %q but got: %v", tc.err, err) } } }) } }
explode_data.jsonl/55375
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1171 }
[ 2830, 3393, 4418, 8439, 13799, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 9038, 1272, 220, 914, 198, 197, 74075, 508, 1457, 17, 30953, 198, 197, 9859, 256, 914, 198, 197, 59403, 197, 197, 515, 298, 9038, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestSubmitStartupFail(t *testing.T) { // Tests the case where we submit a job that fails to start up we want to immediately roll back, even though // the Flink API unhelpfully returns a 500 in that case httpmock.Activate() defer httpmock.DeactivateAndReset() ctx := context.Background() responder := httpmock.NewStringResponder(500, wrongEntryClassResponse) httpmock.RegisterResponder("POST", fakeSubmitURL, responder) client := getTestJobManagerClient() resp, err := client.SubmitJob(ctx, testURL, "1", SubmitJobRequest{ Parallelism: 10, }) assert.Nil(t, resp) flinkAppError, _ := err.(*v1beta1.FlinkApplicationError) assert.True(t, flinkAppError.IsFailFast) assert.EqualError(t, err, "SubmitJob call failed with status 500 and message '"+ wrongEntryClassResponse+"'") }
explode_data.jsonl/32367
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 258 }
[ 2830, 3393, 8890, 39076, 19524, 1155, 353, 8840, 836, 8, 341, 197, 322, 20150, 279, 1142, 1380, 582, 9318, 264, 2618, 429, 14525, 311, 1191, 705, 582, 1366, 311, 7069, 6502, 1182, 11, 1496, 3498, 198, 197, 322, 279, 434, 2080, 5333, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBadConfigs(t *testing.T) { for _, ee := range expectedErrors { _, err := LoadFile("testdata/" + ee.filename) testutil.NotOk(t, err, "%s", ee.filename) testutil.Assert(t, strings.Contains(err.Error(), ee.errMsg), "Expected error for %s to contain %q but got: %s", ee.filename, ee.errMsg, err) } }
explode_data.jsonl/74674
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 131 }
[ 2830, 3393, 17082, 84905, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 36343, 1669, 2088, 3601, 13877, 341, 197, 197, 6878, 1848, 1669, 8893, 1703, 445, 92425, 11225, 488, 36343, 30882, 340, 197, 18185, 1314, 15000, 11578, 1155, 11, 1848, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSpanProcessor_SeparatorMultipleKeys(t *testing.T) { factory := NewFactory() cfg := factory.CreateDefaultConfig() oCfg := cfg.(*Config) oCfg.Rename.FromAttributes = []string{"key1", "key2", "key3", "key4"} oCfg.Rename.Separator = "::" tp, err := factory.CreateTracesProcessor(context.Background(), componenttest.NewNopProcessorCreateSettings(), oCfg, consumertest.NewNop()) require.Nil(t, err) require.NotNil(t, tp) traceData := generateTraceData( "", "rename with separators and multiple keys", map[string]interface{}{ "key1": "bob", "key2": 123, "key3": 234.129312, "key4": true, }) assert.NoError(t, tp.ConsumeTraces(context.Background(), traceData)) assert.Equal(t, generateTraceData( "", "bob::123::234.129312::true", map[string]interface{}{ "key1": "bob", "key2": 123, "key3": 234.129312, "key4": true, }), traceData) }
explode_data.jsonl/51041
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 375 }
[ 2830, 3393, 12485, 22946, 1098, 91640, 32089, 8850, 1155, 353, 8840, 836, 8, 1476, 1166, 2919, 1669, 1532, 4153, 741, 50286, 1669, 8633, 7251, 3675, 2648, 741, 22229, 42467, 1669, 13286, 41399, 2648, 340, 22229, 42467, 2013, 1840, 11439, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWasAnyOperationErroneous(t *testing.T) { emptySyncher := setupTest(0, nil) if emptySyncher.WasAnyOperationErroneous() { t.Errorf("An empty Syncher object shall not think that any CNI operations were erroneous!") } failSyncher := setupTest(len(failingTestConsts), failingTestConsts) if !failSyncher.WasAnyOperationErroneous() { t.Errorf("A Syncher object with failing operations shall not think that all CNI operations were successful!") } successSyncher := setupTest(len(totalSuccessTestConsts), totalSuccessTestConsts) if successSyncher.WasAnyOperationErroneous() { t.Errorf("A Syncher object with only successful operations shall not think that any CNI operations failed!") } }
explode_data.jsonl/69740
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 222 }
[ 2830, 3393, 26034, 8610, 8432, 7747, 603, 782, 1155, 353, 8840, 836, 8, 341, 220, 4287, 37134, 9034, 1669, 6505, 2271, 7, 15, 11, 2092, 340, 220, 421, 4287, 37134, 9034, 1175, 300, 8610, 8432, 7747, 603, 782, 368, 341, 257, 259, 130...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDeleteStreamPropagate(t *testing.T) { defer cleanupStorage(t) // Use a central NATS server. ns := natsdTest.RunDefaultServer() defer ns.Shutdown() // Configure first server. s1Config := getTestConfig("a", true, 0) s1 := runServerWithConfig(t, s1Config) defer s1.Stop() // Configure second server. s2Config := getTestConfig("b", false, 5050) s2 := runServerWithConfig(t, s2Config) defer s2.Stop() // Connect and send the request to the follower. client, err := lift.Connect([]string{"localhost:5050"}) require.NoError(t, err) defer client.Close() ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second) defer cancel() err = client.CreateStream(ctx, "foo", "foo") require.NoError(t, err) err = client.DeleteStream(context.Background(), "foo") require.NoError(t, err) }
explode_data.jsonl/34466
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 288 }
[ 2830, 3393, 6435, 3027, 2008, 46836, 1155, 353, 8840, 836, 8, 341, 16867, 21290, 5793, 1155, 692, 197, 322, 5443, 264, 8622, 18248, 50, 3538, 624, 84041, 1669, 308, 1862, 67, 2271, 16708, 3675, 5475, 741, 16867, 12268, 10849, 18452, 282...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_signVerify(t *testing.T) { priv := "ED77803C1C04AD646C3F0245B6D506EE6DF7A022187921F4D2ABCAF22012F72B" privBytes, _ := hex.DecodeString(priv) msg := "hello,world" signature, _ := crypto.Sign(privBytes, []byte(msg)) pub, _ := GetPublicKeyFromPrivKey(priv) err := crypto.Verify(*pub, []byte(msg), signature) if err != nil { println("Verify failed") } }
explode_data.jsonl/48161
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 155 }
[ 2830, 3393, 11172, 32627, 1155, 353, 8840, 836, 8, 341, 71170, 1669, 330, 1479, 22, 22, 23, 15, 18, 34, 16, 34, 15, 19, 1808, 21, 19, 21, 34, 18, 37, 15, 17, 19, 20, 33, 21, 35, 20, 15, 21, 7099, 21, 5262, 22, 32, 15, 17, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestWatchCompactRevision(t *testing.T) { integration2.BeforeTest(t) clus := integration2.NewClusterV3(t, &integration2.ClusterConfig{Size: 1}) defer clus.Terminate(t) // set some keys kv := clus.RandClient() for i := 0; i < 5; i++ { if _, err := kv.Put(context.TODO(), "foo", "bar"); err != nil { t.Fatal(err) } } w := clus.RandClient() if _, err := kv.Compact(context.TODO(), 4); err != nil { t.Fatal(err) } wch := w.Watch(context.Background(), "foo", clientv3.WithRev(2)) // get compacted error message wresp, ok := <-wch if !ok { t.Fatalf("expected wresp, but got closed channel") } if wresp.Err() != rpctypes.ErrCompacted { t.Fatalf("wresp.Err() expected %v, but got %v", rpctypes.ErrCompacted, wresp.Err()) } if !wresp.Canceled { t.Fatalf("wresp.Canceled expected true, got %+v", wresp) } // ensure the channel is closed if wresp, ok = <-wch; ok { t.Fatalf("expected closed channel, but got %v", wresp) } }
explode_data.jsonl/28930
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 409 }
[ 2830, 3393, 14247, 98335, 33602, 1155, 353, 8840, 836, 8, 341, 2084, 17376, 17, 31153, 2271, 1155, 692, 197, 4163, 1669, 17590, 17, 7121, 28678, 53, 18, 1155, 11, 609, 60168, 17, 72883, 2648, 90, 1695, 25, 220, 16, 3518, 16867, 1185, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestApplicationContext_NestedAutowireBean(t *testing.T) { c, ch := container() c.Provide(func() int { return 3 }) c.Object(new(NestedAutowireBean)) c.Object(&PtrNestedAutowireBean{ SubNestedAutowireBean: new(SubNestedAutowireBean), }) err := c.Refresh() assert.Nil(t, err) p := <-ch var b *NestedAutowireBean err = p.BeanRegistry().Get(&b) assert.Nil(t, err) assert.Equal(t, *b.Int, 3) var b0 *PtrNestedAutowireBean err = p.BeanRegistry().Get(&b0) assert.Nil(t, err) assert.Equal(t, b0.Int, (*int)(nil)) }
explode_data.jsonl/17422
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 241 }
[ 2830, 3393, 19736, 1604, 9980, 32, 9616, 554, 10437, 1155, 353, 8840, 836, 8, 1476, 1444, 11, 521, 1669, 5476, 741, 1444, 7763, 19448, 18552, 368, 526, 314, 470, 220, 18, 2751, 1444, 8348, 1755, 8204, 9980, 32, 9616, 554, 10437, 1171,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEvaluators(t *testing.T) { t.Run("greater then", func(t *testing.T) { require.True(t, evaluatorScenario(t, `{"type": "gt", "params": [1] }`, 3)) require.False(t, evaluatorScenario(t, `{"type": "gt", "params": [3] }`, 1)) }) t.Run("less then", func(t *testing.T) { require.False(t, evaluatorScenario(t, `{"type": "lt", "params": [1] }`, 3)) require.True(t, evaluatorScenario(t, `{"type": "lt", "params": [3] }`, 1)) }) t.Run("within_range", func(t *testing.T) { require.True(t, evaluatorScenario(t, `{"type": "within_range", "params": [1, 100] }`, 3)) require.False(t, evaluatorScenario(t, `{"type": "within_range", "params": [1, 100] }`, 300)) require.True(t, evaluatorScenario(t, `{"type": "within_range", "params": [100, 1] }`, 3)) require.False(t, evaluatorScenario(t, `{"type": "within_range", "params": [100, 1] }`, 300)) }) t.Run("outside_range", func(t *testing.T) { require.True(t, evaluatorScenario(t, `{"type": "outside_range", "params": [1, 100] }`, 1000)) require.False(t, evaluatorScenario(t, `{"type": "outside_range", "params": [1, 100] }`, 50)) require.True(t, evaluatorScenario(t, `{"type": "outside_range", "params": [100, 1] }`, 1000)) require.False(t, evaluatorScenario(t, `{"type": "outside_range", "params": [100, 1] }`, 50)) }) t.Run("no_value", func(t *testing.T) { t.Run("should be false if series have values", func(t *testing.T) { require.False(t, evaluatorScenario(t, `{"type": "no_value", "params": [] }`, 50)) }) t.Run("should be true when the series have no value", func(t *testing.T) { jsonModel, err := simplejson.NewJson([]byte(`{"type": "no_value", "params": [] }`)) require.NoError(t, err) evaluator, err := NewAlertEvaluator(jsonModel) require.NoError(t, err) require.True(t, evaluator.Eval(null.FloatFromPtr(nil))) }) }) }
explode_data.jsonl/3490
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 764 }
[ 2830, 3393, 36, 25510, 2973, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 65235, 1221, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 17957, 32443, 1155, 11, 69810, 54031, 1155, 11, 1565, 4913, 1313, 788, 330, 5178, 497, 330, 3519, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEqEmptyToSql(t *testing.T) { sql, args, err := Eq{}.ToSql() assert.NoError(t, err) expectedSql := "(1=1)" assert.Equal(t, expectedSql, sql) assert.Empty(t, args) }
explode_data.jsonl/44159
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 81 }
[ 2830, 3393, 27312, 3522, 1249, 8269, 1155, 353, 8840, 836, 8, 341, 30633, 11, 2827, 11, 1848, 1669, 33122, 46391, 1249, 8269, 741, 6948, 35699, 1155, 11, 1848, 692, 42400, 8269, 1669, 11993, 16, 28, 16, 12954, 6948, 12808, 1155, 11, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestStorageRemoveLeastRecent(t *testing.T) { storage := Init(driver.NewMemory()) storage.Log = t.Logf // Make sure that specifying this at the outset doesn't cause any bugs. storage.MaxHistory = 10 const name = "angry-bird" // setup storage with test releases setup := func() { // release records rls0 := ReleaseTestData{Name: name, Version: 1, Status: rspb.Status_SUPERSEDED}.ToRelease() rls1 := ReleaseTestData{Name: name, Version: 2, Status: rspb.Status_SUPERSEDED}.ToRelease() rls2 := ReleaseTestData{Name: name, Version: 3, Status: rspb.Status_SUPERSEDED}.ToRelease() rls3 := ReleaseTestData{Name: name, Version: 4, Status: rspb.Status_DEPLOYED}.ToRelease() // create the release records in the storage assertErrNil(t.Fatal, storage.Create(rls0), "Storing release 'angry-bird' (v1)") assertErrNil(t.Fatal, storage.Create(rls1), "Storing release 'angry-bird' (v2)") assertErrNil(t.Fatal, storage.Create(rls2), "Storing release 'angry-bird' (v3)") assertErrNil(t.Fatal, storage.Create(rls3), "Storing release 'angry-bird' (v4)") } setup() // Because we have not set a limit, we expect 4. expect := 4 if hist, err := storage.History(name); err != nil { t.Fatal(err) } else if len(hist) != expect { t.Fatalf("expected %d items in history, got %d", expect, len(hist)) } storage.MaxHistory = 3 rls5 := ReleaseTestData{Name: name, Version: 5, Status: rspb.Status_DEPLOYED}.ToRelease() assertErrNil(t.Fatal, storage.Create(rls5), "Storing release 'angry-bird' (v5)") // On inserting the 5th record, we expect two records to be pruned from history. hist, err := storage.History(name) if err != nil { t.Fatal(err) } else if len(hist) != storage.MaxHistory { for _, item := range hist { t.Logf("%s %v", item.Name, item.Version) } t.Fatalf("expected %d items in history, got %d", storage.MaxHistory, len(hist)) } // We expect the existing records to be 3, 4, and 5. for i, item := range hist { v := int(item.Version) if expect := i + 3; v != expect { t.Errorf("Expected release %d, got %d", expect, v) } } }
explode_data.jsonl/35129
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 782 }
[ 2830, 3393, 5793, 13021, 81816, 25140, 1155, 353, 8840, 836, 8, 341, 197, 16172, 1669, 15690, 24032, 7121, 10642, 2398, 197, 16172, 5247, 284, 259, 98954, 271, 197, 322, 7405, 2704, 429, 37838, 419, 518, 279, 67764, 3171, 944, 5240, 894...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnescape(t *testing.T) { unescape(string([]byte{'\\', '\\', 0})) unescape(string([]byte{'\\', '/', '\\', 'b', '\\', 'f'})) }
explode_data.jsonl/43434
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 63 }
[ 2830, 3393, 1806, 12998, 1155, 353, 8840, 836, 8, 341, 197, 8531, 5330, 3609, 10556, 3782, 13608, 3422, 516, 28078, 516, 220, 15, 44194, 197, 8531, 5330, 3609, 10556, 3782, 13608, 3422, 516, 56427, 28078, 516, 364, 65, 516, 28078, 516, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestBytesOffset(t *testing.T) { tests := []struct { text string pos protocol.Position want int }{ {text: `a𐐀b`, pos: protocol.Position{Line: 0, Character: 0}, want: 0}, {text: `a𐐀b`, pos: protocol.Position{Line: 0, Character: 1}, want: 1}, {text: `a𐐀b`, pos: protocol.Position{Line: 0, Character: 2}, want: 1}, {text: `a𐐀b`, pos: protocol.Position{Line: 0, Character: 3}, want: 5}, {text: `a𐐀b`, pos: protocol.Position{Line: 0, Character: 4}, want: 6}, {text: `a𐐀b`, pos: protocol.Position{Line: 0, Character: 5}, want: -1}, {text: "aaa\nbbb\n", pos: protocol.Position{Line: 0, Character: 3}, want: 3}, {text: "aaa\nbbb\n", pos: protocol.Position{Line: 0, Character: 4}, want: 3}, {text: "aaa\nbbb\n", pos: protocol.Position{Line: 1, Character: 0}, want: 4}, {text: "aaa\nbbb\n", pos: protocol.Position{Line: 1, Character: 3}, want: 7}, {text: "aaa\nbbb\n", pos: protocol.Position{Line: 1, Character: 4}, want: 7}, {text: "aaa\nbbb\n", pos: protocol.Position{Line: 2, Character: 0}, want: 8}, {text: "aaa\nbbb\n", pos: protocol.Position{Line: 2, Character: 1}, want: -1}, {text: "aaa\nbbb\n\n", pos: protocol.Position{Line: 2, Character: 0}, want: 8}, } for i, test := range tests { fname := fmt.Sprintf("test %d", i) fset := token.NewFileSet() f := fset.AddFile(fname, -1, len(test.text)) f.SetLinesForContent([]byte(test.text)) uri := span.URIFromPath(fname) converter := span.NewContentConverter(fname, []byte(test.text)) mapper := &protocol.ColumnMapper{ URI: uri, Converter: converter, Content: []byte(test.text), } got, err := mapper.Point(test.pos) if err != nil && test.want != -1 { t.Errorf("unexpected error: %v", err) } if err == nil && got.Offset() != test.want { t.Errorf("want %d for %q(Line:%d,Character:%d), but got %d", test.want, test.text, int(test.pos.Line), int(test.pos.Character), got.Offset()) } } }
explode_data.jsonl/72543
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 818 }
[ 2830, 3393, 7078, 6446, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 15425, 914, 198, 197, 28164, 220, 11507, 21954, 198, 197, 50780, 526, 198, 197, 59403, 197, 197, 83021, 25, 1565, 64, 123934, 238, 222, 65, 7808,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestHeartbeatAnnounce(t *testing.T) { t.Parallel() tests := []struct { mode HeartbeatMode kind string }{ {mode: HeartbeatModeProxy, kind: services.KindProxy}, {mode: HeartbeatModeAuth, kind: services.KindAuthServer}, {mode: HeartbeatModeKube, kind: services.KindKubeService}, } for _, tt := range tests { t.Run(tt.mode.String(), func(t *testing.T) { ctx, cancel := context.WithCancel(context.TODO()) defer cancel() clock := clockwork.NewFakeClock() announcer := newFakeAnnouncer(ctx) hb, err := NewHeartbeat(HeartbeatConfig{ Context: ctx, Mode: tt.mode, Component: "test", Announcer: announcer, CheckPeriod: time.Second, AnnouncePeriod: 60 * time.Second, KeepAlivePeriod: 10 * time.Second, ServerTTL: 600 * time.Second, Clock: clock, GetServerInfo: func() (services.Server, error) { srv := &services.ServerV2{ Kind: tt.kind, Version: services.V2, Metadata: services.Metadata{ Namespace: defaults.Namespace, Name: "1", }, Spec: services.ServerSpecV2{ Addr: "127.0.0.1:1234", Hostname: "2", }, } srv.SetTTL(clock, defaults.ServerAnnounceTTL) return srv, nil }, }) require.NoError(t, err) require.Equal(t, hb.state, HeartbeatStateInit) // on the first run, heartbeat will move to announce state, // will call announce right away err = hb.fetch() require.NoError(t, err) require.Equal(t, hb.state, HeartbeatStateAnnounce) err = hb.announce() require.NoError(t, err) require.Equal(t, announcer.upsertCalls[hb.Mode], 1) require.Equal(t, hb.state, HeartbeatStateAnnounceWait) require.Equal(t, hb.nextAnnounce, clock.Now().UTC().Add(hb.AnnouncePeriod)) // next call will not move to announce, because time is not up yet err = hb.fetchAndAnnounce() require.NoError(t, err) require.Equal(t, hb.state, HeartbeatStateAnnounceWait) // advance time, and heartbeat will move to announce clock.Advance(hb.AnnouncePeriod * time.Second) err = hb.fetch() require.NoError(t, err) require.Equal(t, hb.state, HeartbeatStateAnnounce) err = hb.announce() require.NoError(t, err) require.Equal(t, announcer.upsertCalls[hb.Mode], 2) require.Equal(t, hb.state, HeartbeatStateAnnounceWait) require.Equal(t, hb.nextAnnounce, clock.Now().UTC().Add(hb.AnnouncePeriod)) // in case of error, system will move to announce wait state, // with next attempt scheduled on the next keep alive period announcer.err = trace.ConnectionProblem(nil, "boom") clock.Advance(hb.AnnouncePeriod + time.Second) err = hb.fetchAndAnnounce() require.Error(t, err) require.True(t, trace.IsConnectionProblem(err)) require.Equal(t, announcer.upsertCalls[hb.Mode], 3) require.Equal(t, hb.state, HeartbeatStateAnnounceWait) require.Equal(t, hb.nextAnnounce, clock.Now().UTC().Add(hb.KeepAlivePeriod)) // once announce is successful, next announce is set on schedule announcer.err = nil clock.Advance(hb.KeepAlivePeriod + time.Second) err = hb.fetchAndAnnounce() require.NoError(t, err) require.Equal(t, announcer.upsertCalls[hb.Mode], 4) require.Equal(t, hb.state, HeartbeatStateAnnounceWait) require.Equal(t, hb.nextAnnounce, clock.Now().UTC().Add(hb.AnnouncePeriod)) }) } }
explode_data.jsonl/30089
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1461 }
[ 2830, 3393, 45384, 22227, 27093, 9734, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 78216, 1669, 3056, 1235, 341, 197, 60247, 17965, 22227, 3636, 198, 197, 197, 15314, 914, 198, 197, 59403, 197, 197, 90, 8516, 25, 17965, 22227,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBQInvariantToEntryOrder(t *testing.T) { lower, upper := -5.0, 5.0 bq1 := getNoiselessBQ(t, lower, upper) bq2 := getNoiselessBQ(t, lower, upper) entries := createEntries() // The list of entries contains 1001 elements. However, we only add the first 997. The reason // is that 997 is a prime number, which allows us to shuffle the entires easily using modular // arithmetic. for i := 0; i < 997; i++ { bq1.Add(entries[i]) // Adding entries with an arbitrary step length of 643. Because the two values are coprime, // all entries between 0 and 997 will be added. bq2.Add(entries[i*643%997]) } for _, rank := range getRanks() { got, err := bq1.Result(rank) if err != nil { t.Fatalf("Couldn't compute dp result for rank=%f: %v", rank, err) } want, err := bq2.Result(rank) if err != nil { t.Fatalf("Couldn't compute dp result for rank=%f: %v", rank, err) } if !cmp.Equal(got, want) { t.Errorf("Add: Wanted the same result for same list of entries with a different order for rank %f got %f, want %f", rank, got, want) } } }
explode_data.jsonl/80638
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 411 }
[ 2830, 3393, 33, 48, 76621, 1249, 5874, 4431, 1155, 353, 8840, 836, 8, 341, 8810, 1202, 11, 8416, 1669, 481, 20, 13, 15, 11, 220, 20, 13, 15, 198, 2233, 80, 16, 1669, 633, 61819, 1717, 33, 48, 1155, 11, 4722, 11, 8416, 340, 2233,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestGossipDiscoverySkipConnectingToLocalhostBootstrap(t *testing.T) { inst := createDiscoveryInstance(11611, "d1", []string{"localhost:11611", "127.0.0.1:11611"}) inst.comm.lock.Lock() inst.comm.mock = &mock.Mock{} inst.comm.mock.On("SendToPeer", mock.Anything, mock.Anything).Run(func(mock.Arguments) { t.Fatal("Should not have connected to any peer") }) inst.comm.mock.On("Ping", mock.Anything).Run(func(mock.Arguments) { t.Fatal("Should not have connected to any peer") }) inst.comm.lock.Unlock() time.Sleep(time.Second * 3) waitUntilOrFailBlocking(t, inst.Stop) }
explode_data.jsonl/62267
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 227 }
[ 2830, 3393, 38, 41473, 67400, 35134, 62924, 96856, 3790, 45511, 1155, 353, 8840, 836, 8, 341, 88656, 1669, 1855, 67400, 2523, 7, 16, 16, 21, 16, 16, 11, 330, 67, 16, 497, 3056, 917, 4913, 8301, 25, 16, 16, 21, 16, 16, 497, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSecretProofTransactionSerialization(t *testing.T) { s := "b778a39a3663719dfc5e48c9d78431b1e45c2af9df538782bf199c189dabeac7680ada57dcec8eee91c4e3bf3bfa9af6ffde90cd1d249d1c6121d7b759a001b1" ss := "9a493664" tx, err := NewSecretProofTransaction(fakeDeadline, SHA3_512, s, ss, MijinTest) assert.Nilf(t, err, "NewSecretProofTransaction returned error: %s", err) b, err := tx.generateBytes() assert.Nilf(t, err, "generateBytes returned error: %s", err) assert.Equal(t, secretProofTransactionSerializationCorr, b) }
explode_data.jsonl/69204
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 221 }
[ 2830, 3393, 19773, 31076, 8070, 35865, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 330, 65, 22, 22, 23, 64, 18, 24, 64, 18, 21, 21, 18, 22, 16, 24, 2940, 66, 20, 68, 19, 23, 66, 24, 67, 22, 23, 19, 18, 16, 65, 16, 68, 19, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRetractRationale(t *testing.T) { for _, tt := range retractRationaleTests { t.Run(tt.desc, func(t *testing.T) { f, err := Parse("in", []byte(tt.in), nil) if err != nil { t.Fatal(err) } if len(f.Retract) != 1 { t.Fatalf("got %d retract directives; want 1", len(f.Retract)) } if got := f.Retract[0].Rationale; got != tt.want { t.Errorf("got %q; want %q", got, tt.want) } }) } }
explode_data.jsonl/74340
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 209 }
[ 2830, 3393, 12020, 81, 531, 49, 37035, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 72200, 49, 37035, 18200, 341, 197, 3244, 16708, 47152, 30514, 11, 2915, 1155, 353, 8840, 836, 8, 341, 298, 1166, 11, 1848, 1669, 14775...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestBroadcastTx(t *testing.T) { require := require.New(t) res, err := rpctest.GetGRPCClient().BroadcastTx(context.Background(), &core_grpc.RequestBroadcastTx{[]byte("this is a tx")}) require.Nil(err, "%+v", err) require.EqualValues(0, res.CheckTx.Code) require.EqualValues(0, res.DeliverTx.Code) }
explode_data.jsonl/73405
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 119 }
[ 2830, 3393, 43362, 31584, 1155, 353, 8840, 836, 8, 341, 17957, 1669, 1373, 7121, 1155, 340, 10202, 11, 1848, 1669, 33109, 67880, 2234, 8626, 4872, 2959, 1005, 43362, 31584, 5378, 19047, 1507, 609, 2153, 15682, 3992, 9659, 43362, 31584, 90...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Domain_WhenConvertedToHub_RoundTripsWithoutLoss(t *testing.T) { t.Parallel() parameters := gopter.DefaultTestParameters() parameters.MaxSize = 10 properties := gopter.NewProperties(parameters) properties.Property( "Round trip from Domain to hub returns original", prop.ForAll(RunResourceConversionTestForDomain, DomainGenerator())) properties.TestingRun(t, gopter.NewFormatedReporter(false, 240, os.Stdout)) }
explode_data.jsonl/39605
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 137 }
[ 2830, 3393, 1557, 3121, 62, 4498, 61941, 1249, 19316, 2568, 795, 21884, 1690, 26040, 39838, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 67543, 1669, 728, 73137, 13275, 2271, 9706, 741, 67543, 14535, 1695, 284, 220, 16, 15, 198...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServiceResolver_ServiceQuery(t *testing.T) { name := "name" namespace := "namespace" assert := assert.New(t) t.Run("Success", func(t *testing.T) { expected := &gqlschema.Service{ Name: "Test", } resource := &v1.Service{} resourceGetter := automock.NewServiceSvc() resourceGetter.On("Find", name, namespace).Return(resource, nil).Once() defer resourceGetter.AssertExpectations(t) converter := automock.NewGqlServiceConverter() converter.On("ToGQL", resource).Return(expected, nil).Once() defer converter.AssertExpectations(t) resolver := k8s.NewServiceResolver(resourceGetter) resolver.SetInstanceConverter(converter) result, err := resolver.ServiceQuery(nil, name, namespace) require.NoError(t, err) assert.Equal(expected, result) }) t.Run("NotFound", func(t *testing.T) { resourceGetter := automock.NewServiceSvc() resourceGetter.On("Find", name, namespace).Return(nil, nil).Once() defer resourceGetter.AssertExpectations(t) resolver := k8s.NewServiceResolver(resourceGetter) result, err := resolver.ServiceQuery(nil, name, namespace) require.NoError(t, err) assert.Nil(result) }) t.Run("ErrorGetting", func(t *testing.T) { expected := errors.New("test") resource := &v1.Service{} resourceGetter := automock.NewServiceSvc() resourceGetter.On("Find", name, namespace).Return(resource, expected).Once() defer resourceGetter.AssertExpectations(t) resolver := k8s.NewServiceResolver(resourceGetter) result, err := resolver.ServiceQuery(nil, name, namespace) assert.Error(err) assert.True(gqlerror.IsInternal(err)) assert.Nil(result) }) }
explode_data.jsonl/51131
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 601 }
[ 2830, 3393, 1860, 18190, 52548, 2859, 1155, 353, 8840, 836, 8, 341, 11609, 1669, 330, 606, 698, 56623, 1669, 330, 2231, 1837, 6948, 1669, 2060, 7121, 1155, 692, 3244, 16708, 445, 7188, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 424...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Params_Parse_DefaultValueTag(t *testing.T) { type T struct { Name string `d:"john"` Score float32 `d:"60"` } p, _ := ports.PopRand() s := g.Server(p) s.BindHandler("/parse", func(r *ghttp.Request) { var t *T if err := r.Parse(&t); err != nil { r.Response.WriteExit(err) } r.Response.WriteExit(t) }) s.SetPort(p) s.SetDumpRouterMap(false) s.Start() defer s.Shutdown() time.Sleep(100 * time.Millisecond) gtest.C(t, func(t *gtest.T) { prefix := fmt.Sprintf("http://127.0.0.1:%d", p) client := g.Client() client.SetPrefix(prefix) t.Assert(client.PostContent("/parse"), `{"Name":"john","Score":60}`) t.Assert(client.PostContent("/parse", `{"name":"smith"}`), `{"Name":"smith","Score":60}`) t.Assert(client.PostContent("/parse", `{"name":"smith", "score":100}`), `{"Name":"smith","Score":100}`) }) }
explode_data.jsonl/25344
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 376 }
[ 2830, 3393, 44656, 77337, 60336, 1130, 5668, 1155, 353, 8840, 836, 8, 341, 13158, 350, 2036, 341, 197, 21297, 220, 914, 220, 1565, 67, 2974, 47817, 8805, 197, 7568, 2153, 2224, 18, 17, 1565, 67, 2974, 21, 15, 8805, 197, 532, 3223, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDaoAddReportLog(t *testing.T) { var ( c = context.TODO() l = &model.ReportLog{} ) convey.Convey("AddReportLog", t, func(ctx convey.C) { id, err := d.AddReportLog(c, l) ctx.Convey("Then err should be nil.id should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(id, convey.ShouldNotBeNil) }) }) }
explode_data.jsonl/51298
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 156 }
[ 2830, 3393, 12197, 2212, 10361, 2201, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 1444, 284, 2266, 90988, 741, 197, 8810, 284, 609, 2528, 25702, 2201, 16094, 197, 340, 37203, 5617, 4801, 5617, 445, 2212, 10361, 2201, 497, 259, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetVersion(t *testing.T) { env := NewTestVDBEnv(t) env.Cleanup("testgetversion_") env.Cleanup("testgetversion_ns") env.Cleanup("testgetversion_ns2") defer env.Cleanup("testgetversion_") defer env.Cleanup("testgetversion_ns") defer env.Cleanup("testgetversion_ns2") commontests.TestGetVersion(t, env.DBProvider) }
explode_data.jsonl/596
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 128 }
[ 2830, 3393, 1949, 5637, 1155, 353, 8840, 836, 8, 341, 57538, 1669, 1532, 2271, 53, 3506, 14359, 1155, 340, 57538, 727, 60639, 445, 1944, 455, 4366, 62, 1138, 57538, 727, 60639, 445, 1944, 455, 4366, 34728, 1138, 57538, 727, 60639, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSlackReporterValidation(t *testing.T) { testCases := []struct { name string config func() Config successExpected bool }{ { name: "Valid config w/ wildcard slack_reporter_configs - no error", config: func() Config { slackCfg := map[string]SlackReporter{ "*": { SlackReporterConfig: prowjobv1.SlackReporterConfig{ Channel: "my-channel", }, }, } return Config{ ProwConfig: ProwConfig{ SlackReporterConfigs: slackCfg, }, } }, successExpected: true, }, { name: "Valid config w/ org/repo slack_reporter_configs - no error", config: func() Config { slackCfg := map[string]SlackReporter{ "istio/proxy": { SlackReporterConfig: prowjobv1.SlackReporterConfig{ Channel: "my-channel", }, }, } return Config{ ProwConfig: ProwConfig{ SlackReporterConfigs: slackCfg, }, } }, successExpected: true, }, { name: "Valid config w/ repo slack_reporter_configs - no error", config: func() Config { slackCfg := map[string]SlackReporter{ "proxy": { SlackReporterConfig: prowjobv1.SlackReporterConfig{ Channel: "my-channel", }, }, } return Config{ ProwConfig: ProwConfig{ SlackReporterConfigs: slackCfg, }, } }, successExpected: true, }, { name: "No channel w/ slack_reporter_configs - error", config: func() Config { slackCfg := map[string]SlackReporter{ "*": { JobTypesToReport: []prowapi.ProwJobType{"presubmit"}, }, } return Config{ ProwConfig: ProwConfig{ SlackReporterConfigs: slackCfg, }, } }, successExpected: false, }, { name: "Empty config - no error", config: func() Config { slackCfg := map[string]SlackReporter{} return Config{ ProwConfig: ProwConfig{ SlackReporterConfigs: slackCfg, }, } }, successExpected: true, }, { name: "Invalid template - error", config: func() Config { slackCfg := map[string]SlackReporter{ "*": { SlackReporterConfig: prowjobv1.SlackReporterConfig{ Channel: "my-channel", ReportTemplate: "{{ if .Spec.Name}}", }, }, } return Config{ ProwConfig: ProwConfig{ SlackReporterConfigs: slackCfg, }, } }, successExpected: false, }, { name: "Template accessed invalid property - error", config: func() Config { slackCfg := map[string]SlackReporter{ "*": { SlackReporterConfig: prowjobv1.SlackReporterConfig{ Channel: "my-channel", ReportTemplate: "{{ .Undef}}", }, }, } return Config{ ProwConfig: ProwConfig{ SlackReporterConfigs: slackCfg, }, } }, successExpected: false, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { cfg := tc.config() if err := cfg.validateComponentConfig(); (err == nil) != tc.successExpected { t.Errorf("Expected success=%t but got err=%v", tc.successExpected, err) } if tc.successExpected { for _, config := range cfg.SlackReporterConfigs { if config.ReportTemplate == "" { t.Errorf("expected default ReportTemplate to be set") } if config.Channel == "" { t.Errorf("expected Channel to be required") } } } }) } }
explode_data.jsonl/41031
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1629 }
[ 2830, 3393, 7442, 473, 52766, 13799, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 310, 914, 198, 197, 25873, 688, 2915, 368, 5532, 198, 197, 30553, 18896, 1807, 198, 197, 59403, 197, 197, 515, 298, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewProvider(t *testing.T) { c, err := lighthttp.New("chain-test", "192.168.0.1:26657") require.NoError(t, err) require.Equal(t, fmt.Sprintf("%s", c), "http{http://192.168.0.1:26657}") c, err = lighthttp.New("chain-test", "http://153.200.0.1:26657") require.NoError(t, err) require.Equal(t, fmt.Sprintf("%s", c), "http{http://153.200.0.1:26657}") c, err = lighthttp.New("chain-test", "153.200.0.1") require.NoError(t, err) require.Equal(t, fmt.Sprintf("%s", c), "http{http://153.200.0.1}") }
explode_data.jsonl/47996
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 235 }
[ 2830, 3393, 3564, 5179, 1155, 353, 8840, 836, 8, 341, 1444, 11, 1848, 1669, 326, 21877, 1209, 7121, 445, 8819, 16839, 497, 330, 16, 24, 17, 13, 16, 21, 23, 13, 15, 13, 16, 25, 17, 21, 21, 20, 22, 1138, 17957, 35699, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRewriteWithPDFLink(t *testing.T) { description := "test" output := Rewriter("https://example.org/document.pdf", description, ``) expected := `<a href="https://example.org/document.pdf">PDF</a><br>test` if expected != output { t.Errorf(`Not expected output: got "%s" instead of "%s"`, output, expected) } }
explode_data.jsonl/21478
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 110 }
[ 2830, 3393, 58465, 1247, 2354, 23424, 3939, 1155, 353, 8840, 836, 8, 341, 42407, 1669, 330, 1944, 698, 21170, 1669, 31691, 2542, 445, 2428, 1110, 8687, 2659, 46388, 15995, 497, 4008, 11, 9902, 340, 42400, 1669, 30586, 64, 1801, 428, 242...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_compactionPlanHandler_completeCompaction(t *testing.T) { type fields struct { plans map[int64]*compactionTask sessions *SessionManager meta *meta flushCh chan UniqueID } type args struct { result *datapb.CompactionResult } tests := []struct { name string fields fields args args wantErr bool want *compactionTask }{ { "test complete non existed compaction task", fields{ plans: map[int64]*compactionTask{1: {}}, }, args{ result: &datapb.CompactionResult{PlanID: 2}, }, true, nil, }, { "test complete completed task", fields{ plans: map[int64]*compactionTask{1: {state: completed}}, }, args{ result: &datapb.CompactionResult{PlanID: 1}, }, true, nil, }, { "test complete inner compaction", fields{ map[int64]*compactionTask{ 1: { triggerInfo: &compactionSignal{id: 1}, state: executing, plan: &datapb.CompactionPlan{ PlanID: 1, SegmentBinlogs: []*datapb.CompactionSegmentBinlogs{ {SegmentID: 1, FieldBinlogs: []*datapb.FieldBinlog{getFieldBinlogPaths(1, "log1")}}, }, Type: datapb.CompactionType_InnerCompaction, }, }, }, nil, &meta{ client: memkv.NewMemoryKV(), segments: &SegmentsInfo{ map[int64]*SegmentInfo{ 1: {SegmentInfo: &datapb.SegmentInfo{ID: 1, Binlogs: []*datapb.FieldBinlog{getFieldBinlogPaths(1, "log1")}}}, }, }, }, make(chan UniqueID, 1), }, args{ result: &datapb.CompactionResult{ PlanID: 1, SegmentID: 1, InsertLogs: []*datapb.FieldBinlog{getFieldBinlogPaths(1, "log2")}, }, }, false, &compactionTask{ triggerInfo: &compactionSignal{id: 1}, state: completed, plan: &datapb.CompactionPlan{ PlanID: 1, SegmentBinlogs: []*datapb.CompactionSegmentBinlogs{ {SegmentID: 1, FieldBinlogs: []*datapb.FieldBinlog{getFieldBinlogPaths(1, "log1")}}, }, Type: datapb.CompactionType_InnerCompaction, }, }, }, { "test complete merge compaction", fields{ map[int64]*compactionTask{ 1: { triggerInfo: &compactionSignal{id: 1}, state: executing, plan: &datapb.CompactionPlan{ PlanID: 1, SegmentBinlogs: []*datapb.CompactionSegmentBinlogs{ {SegmentID: 1, FieldBinlogs: []*datapb.FieldBinlog{getFieldBinlogPaths(1, "log1")}}, {SegmentID: 2, FieldBinlogs: []*datapb.FieldBinlog{getFieldBinlogPaths(1, "log2")}}, }, Type: datapb.CompactionType_MergeCompaction, }, }, }, nil, &meta{ client: memkv.NewMemoryKV(), segments: &SegmentsInfo{ map[int64]*SegmentInfo{ 1: {SegmentInfo: &datapb.SegmentInfo{ID: 1, Binlogs: []*datapb.FieldBinlog{getFieldBinlogPaths(1, "log1")}}}, 2: {SegmentInfo: &datapb.SegmentInfo{ID: 2, Binlogs: []*datapb.FieldBinlog{getFieldBinlogPaths(1, "log2")}}}, }, }, }, make(chan UniqueID, 1), }, args{ result: &datapb.CompactionResult{ PlanID: 1, SegmentID: 3, InsertLogs: []*datapb.FieldBinlog{getFieldBinlogPaths(1, "log3")}, }, }, false, &compactionTask{ triggerInfo: &compactionSignal{id: 1}, state: completed, plan: &datapb.CompactionPlan{ PlanID: 1, SegmentBinlogs: []*datapb.CompactionSegmentBinlogs{ {SegmentID: 1, FieldBinlogs: []*datapb.FieldBinlog{getFieldBinlogPaths(1, "log1")}}, {SegmentID: 2, FieldBinlogs: []*datapb.FieldBinlog{getFieldBinlogPaths(1, "log2")}}, }, Type: datapb.CompactionType_MergeCompaction, }, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { c := &compactionPlanHandler{ plans: tt.fields.plans, sessions: tt.fields.sessions, meta: tt.fields.meta, flushCh: tt.fields.flushCh, } err := c.completeCompaction(tt.args.result) assert.Equal(t, tt.wantErr, err != nil) }) } }
explode_data.jsonl/9511
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2058 }
[ 2830, 3393, 18177, 1311, 20485, 3050, 27675, 13552, 1311, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 72213, 596, 262, 2415, 18640, 21, 19, 8465, 5689, 1311, 6262, 198, 197, 1903, 8551, 353, 5283, 2043, 198, 197, 84004, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSend(t *testing.T) { t.Parallel() utils.InitLoggerForTests(testing.Verbose()) modtime := testNow atime := testNow.Add(1 * time.Second) dirModtime := testNow.Add(2 * time.Second) dirAtime := testNow.Add(3 * time.Second) logger := logrus.WithField(trace.Component, "t:send") var testCases = []struct { desc string config Config fs testFS args []string }{ { desc: "regular file preserving the attributes", config: newSourceConfig("file", Flags{PreserveAttrs: true}), args: args("-v", "-t", "-p"), fs: newTestFS(logger, newFile("file", modtime, atime, "file contents")), }, { desc: "directory preserving the attributes", config: newSourceConfig("dir", Flags{PreserveAttrs: true, Recursive: true}), args: args("-v", "-t", "-r", "-p"), fs: newTestFS( logger, // Use timestamps extending backwards to test time application newDir("dir", dirModtime.Add(1*time.Second), dirAtime.Add(2*time.Second), newFile("dir/file", modtime.Add(1*time.Minute), atime.Add(2*time.Minute), "file contents"), newDir("dir/dir2", dirModtime, dirAtime, newFile("dir/dir2/file2", modtime, atime, "file2 contents")), ), ), }, } for _, tt := range testCases { tt := tt t.Run(tt.desc, func(t *testing.T) { t.Parallel() cmd, err := CreateCommand(tt.config) require.NoError(t, err) targetDir := tempDir(t) target := filepath.Join(targetDir, tt.config.Flags.Target[0]) args := append(tt.args, target) // Source is missing, expect an error. err = runSCP(cmd, args...) require.Regexp(t, "could not access local path.*no such file or directory", err) tt.config.FileSystem = tt.fs cmd, err = CreateCommand(tt.config) require.NoError(t, err) // Resend the data err = runSCP(cmd, args...) require.NoError(t, err) fs := newEmptyTestFS(logger) fromOS(t, targetDir, &fs) validateSCP(t, fs, tt.fs) validateSCPContents(t, fs, tt.fs) }) } }
explode_data.jsonl/74623
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 830 }
[ 2830, 3393, 11505, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 80206, 26849, 7395, 2461, 18200, 8623, 287, 42505, 8297, 2398, 42228, 1678, 1669, 1273, 7039, 198, 35447, 545, 1669, 1273, 7039, 1904, 7, 16, 353, 882, 32435, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTooSmall(t *testing.T) { // load test jpeg f, err := os.Open("testdata/video-001.jpeg") if err != nil { t.Fatal(err) } defer f.Close() img, err := jpeg.Decode(f) if err != nil { t.Fatal(err) } // attempt hide data in img var buf bytes.Buffer data := make([]byte, 10e6) err = Hide(&buf, img, data, nil) if err != ErrTooSmall { t.Fatal("expected ErrTooSmall, got", err) } }
explode_data.jsonl/74661
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 174 }
[ 2830, 3393, 31246, 25307, 1155, 353, 8840, 836, 8, 341, 197, 322, 2795, 1273, 54566, 198, 1166, 11, 1848, 1669, 2643, 12953, 445, 92425, 41303, 12, 15, 15, 16, 30675, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestConsumeRefill(t *testing.T) { // Given rates := NewRateSet() require.NoError(t, rates.Add(10*time.Second, 10, 20)) require.NoError(t, rates.Add(100*time.Second, 20, 50)) clock := testutils.GetClock() tbs := NewTokenBucketSet(rates, clock) _, err := tbs.Consume(15) require.NoError(t, err) assert.Equal(t, "{10s: 5}, {1m40s: 35}", tbs.debugState()) // When clock.Sleep(10 * time.Second) delay, err := tbs.Consume(0) // Consumes nothing but forces an internal state update. require.NoError(t, err) // Then assert.Equal(t, time.Duration(0), delay) assert.Equal(t, "{10s: 15}, {1m40s: 37}", tbs.debugState()) }
explode_data.jsonl/25390
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 257 }
[ 2830, 3393, 1109, 31323, 3945, 483, 1155, 353, 8840, 836, 8, 341, 197, 322, 16246, 198, 7000, 973, 1669, 1532, 11564, 1649, 741, 17957, 35699, 1155, 11, 7813, 1904, 7, 16, 15, 77053, 32435, 11, 220, 16, 15, 11, 220, 17, 15, 1171, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_issueClose(t *testing.T) { t.Parallel() oldUpdateIssue := api.UpdateIssue timer, _ := time.Parse(time.RFC3339, "2014-11-12T11:45:26.371Z") api.UpdateIssue = func(client *gitlab.Client, projectID interface{}, issueID int, opts *gitlab.UpdateIssueOptions) (*gitlab.Issue, error) { if projectID == "" || projectID == "WRONG_REPO" || projectID == "expected_err" || issueID == 0 { return nil, fmt.Errorf("error expected") } return &gitlab.Issue{ ID: issueID, IID: issueID, State: "closed", Description: "Dummy description for issue " + string(rune(issueID)), Author: &gitlab.IssueAuthor{ ID: 1, Name: "John Dev Wick", Username: "jdwick", }, CreatedAt: &timer, }, nil } testCases := []struct { Name string Issue string ExpectedMsg []string wantErr bool }{ { Name: "Issue Exists", Issue: "1", ExpectedMsg: []string{"Closing Issue...", "Issue #1 closed"}, }, { Name: "Issue Does Not Exist", Issue: "0", ExpectedMsg: []string{"Closing Issue", "404 Not found"}, wantErr: true, }, } for _, tc := range testCases { t.Run(tc.Name, func(t *testing.T) { var stderr bytes.Buffer var stdout bytes.Buffer cmd := NewCmdClose(cmdtest.StubFactory("https://gitlab.com/glab-cli/test")) cmd.SetArgs([]string{tc.Issue}) cmd.SetOut(&stdout) cmd.SetErr(&stderr) _, err := cmd.ExecuteC() if tc.wantErr { require.Error(t, err) return } else { require.NoError(t, err) } out := stripansi.Strip(stdout.String()) //outErr := stripansi.Strip(stderr.String()) for _, msg := range tc.ExpectedMsg { assert.Contains(t, out, msg) } }) } api.UpdateIssue = oldUpdateIssue }
explode_data.jsonl/32271
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 812 }
[ 2830, 3393, 53340, 7925, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 61828, 4289, 42006, 1669, 6330, 16689, 42006, 198, 51534, 11, 716, 1669, 882, 8937, 9730, 2013, 6754, 18, 18, 18, 24, 11, 330, 17, 15, 16, 19, 12, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestDeleteRole(t *testing.T) { ts := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.Method != http.MethodDelete { t.Errorf("unexpected method: want %s, got %s", http.MethodDelete, r.Method) } if r.URL.Path != "/api/v0/services/awesome-service/roles/application" { t.Errorf("unexpected path: want %s, got %s", "/api/v0/services/awesome-service/roles", r.URL.Path) } w.Header().Set("Content-Type", "application/json") w.WriteHeader(http.StatusOK) fmt.Fprint(w, `{"name":"application","memo":"the application of awesome service"}`) })) defer ts.Close() u, err := url.Parse(ts.URL) if err != nil { t.Fatal(err) } c := &Client{ BaseURL: u, APIKey: "DUMMY-API-KEY", HTTPClient: ts.Client(), } got, err := c.DeleteRole(context.Background(), "awesome-service", "application") if err != nil { t.Error(err) } want := &Role{ Name: "application", Memo: "the application of awesome service", } if diff := cmp.Diff(got, want); diff != "" { t.Errorf("role differs: (-got +want)\n%s", diff) } }
explode_data.jsonl/38078
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 444 }
[ 2830, 3393, 6435, 9030, 1155, 353, 8840, 836, 8, 341, 57441, 1669, 54320, 70334, 7121, 13470, 1220, 2836, 19886, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 743, 435, 20798, 961, 1758, 20798, 6435, 341, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestForNameShouldReturnService(t *testing.T) { svc, err := ForName("github") assert.Nil(t, err) assert.Equal(t, "*service.Github", reflect.TypeOf(svc).String()) }
explode_data.jsonl/18570
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 66 }
[ 2830, 3393, 2461, 675, 14996, 5598, 1860, 1155, 353, 8840, 836, 8, 341, 1903, 7362, 11, 1848, 1669, 1752, 675, 445, 5204, 1138, 6948, 59678, 1155, 11, 1848, 340, 6948, 12808, 1155, 11, 15630, 7936, 1224, 3827, 497, 8708, 73921, 1141, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestDataSourceOnePasswordVaultRead(t *testing.T) { expectedVault := onepassword.Vault{ ID: "vault-uuid", Name: "Name of the vault", Description: "This vault will be retrieve", } var connectErr = errors.New("some request error") cases := map[string]struct { input map[string]string getVaultRes onepassword.Vault getVaultErr error getVaultByTitleRes []onepassword.Vault getVaultByTitleErr error expected onepassword.Vault expectedErr error }{ "by name": { input: map[string]string{ "name": expectedVault.Name, }, getVaultByTitleRes: []onepassword.Vault{ expectedVault, }, expected: expectedVault, }, "by error": { input: map[string]string{ "name": expectedVault.Name, }, getVaultByTitleErr: connectErr, expectedErr: connectErr, }, "not_found_by_name": { input: map[string]string{ "name": expectedVault.Name, }, getVaultByTitleRes: []onepassword.Vault{}, expectedErr: fmt.Errorf("no vault found with name '%s'", expectedVault.Name), }, "multiple_found_by_name": { input: map[string]string{ "name": expectedVault.Name, }, getVaultByTitleRes: []onepassword.Vault{ expectedVault, expectedVault, }, expectedErr: fmt.Errorf("multiple vaults found with name '%s'", expectedVault.Name), }, "by uuid": { input: map[string]string{ "uuid": expectedVault.ID, }, getVaultRes: expectedVault, expected: expectedVault, }, "by uuid error": { input: map[string]string{ "uuid": expectedVault.ID, }, getVaultErr: connectErr, expectedErr: connectErr, }, } for name, tc := range cases { t.Run(name, func(t *testing.T) { client := &testClient{ GetVaultsByTitleFunc: func(title string) ([]onepassword.Vault, error) { return tc.getVaultByTitleRes, tc.getVaultByTitleErr }, GetVaultFunc: func(uuid string) (*onepassword.Vault, error) { return &tc.getVaultRes, tc.getVaultErr }, } dataSourceData := schema.TestResourceDataRaw(t, dataSourceOnepasswordVault().Schema, nil) for key, value := range tc.input { dataSourceData.Set(key, value) } err := dataSourceOnepasswordVaultRead(dataSourceData, client) if tc.expectedErr != nil { if err == nil || err.Error() != tc.expectedErr.Error() { t.Errorf("Unexpected error occured. Expected %v, got %v", tc.expectedErr, err) } } else { if err != nil { t.Errorf("Got unexpected error: %s", err) } assertResourceValue(t, dataSourceData, "uuid", tc.expected.ID) assertResourceValue(t, dataSourceData, "name", tc.expected.Name) assertResourceValue(t, dataSourceData, "description", tc.expected.Description) } }) } }
explode_data.jsonl/78289
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1209 }
[ 2830, 3393, 17173, 3966, 4876, 79177, 4418, 1155, 353, 8840, 836, 8, 341, 42400, 79177, 1669, 389, 747, 1938, 5058, 945, 515, 197, 29580, 25, 688, 330, 82983, 12, 17128, 756, 197, 21297, 25, 286, 330, 675, 315, 279, 34584, 756, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMetric_CopyTo(t *testing.T) { ms := NewMetric() NewMetric().CopyTo(ms) assert.True(t, ms.IsNil()) generateTestMetric().CopyTo(ms) assert.EqualValues(t, generateTestMetric(), ms) }
explode_data.jsonl/19510
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 80 }
[ 2830, 3393, 54310, 77637, 1249, 1155, 353, 8840, 836, 8, 341, 47691, 1669, 1532, 54310, 741, 197, 3564, 54310, 1005, 12106, 1249, 35680, 340, 6948, 32443, 1155, 11, 9829, 4506, 19064, 2398, 3174, 13220, 2271, 54310, 1005, 12106, 1249, 356...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestOcSameProcessAsParentSpanToInternal(t *testing.T) { span := pdata.NewSpan() ocSameProcessAsParentSpanToInternal(nil, span) assert.Equal(t, 0, span.Attributes().Len()) ocSameProcessAsParentSpanToInternal(wrapperspb.Bool(false), span) assert.Equal(t, 1, span.Attributes().Len()) v, ok := span.Attributes().Get(occonventions.AttributeSameProcessAsParentSpan) assert.True(t, ok) assert.EqualValues(t, pdata.ValueTypeBool, v.Type()) assert.False(t, v.BoolVal()) ocSameProcessAsParentSpanToInternal(wrapperspb.Bool(true), span) assert.Equal(t, 1, span.Attributes().Len()) v, ok = span.Attributes().Get(occonventions.AttributeSameProcessAsParentSpan) assert.True(t, ok) assert.EqualValues(t, pdata.ValueTypeBool, v.Type()) assert.True(t, v.BoolVal()) }
explode_data.jsonl/53774
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 286 }
[ 2830, 3393, 46, 66, 19198, 7423, 2121, 8387, 12485, 1249, 11569, 1155, 353, 8840, 836, 8, 341, 197, 1480, 1669, 70311, 7121, 12485, 741, 197, 509, 19198, 7423, 2121, 8387, 12485, 1249, 11569, 27907, 11, 9390, 340, 6948, 12808, 1155, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSourceDiffURL(t *testing.T) { var ctl control for _, tt := range []struct { p config.Project from, to revision.Revision want string }{ { p: config.Project{ Name: "test project", Repo: config.Repo{ RepoOwner: "foo", RepoName: "test", }, }, from: "abc123", to: "abc123", want: "", }, { p: config.Project{ Name: "test project", Repo: config.Repo{ RepoOwner: "foo", RepoName: "test", }, }, from: "abc123", to: "abc456", want: "https://github.com/foo/test/compare/abc123...abc456", }, } { if got := ctl.SourceDiffURL(tt.p, tt.from, tt.to); got != tt.want { t.Errorf("ctl.SourceDiffURL(%#v, %q, %q) = %q; want %q", tt.p, tt.from, tt.to, got, tt.want) } } }
explode_data.jsonl/27452
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 407 }
[ 2830, 3393, 3608, 21751, 3144, 1155, 353, 8840, 836, 8, 341, 2405, 64935, 2524, 198, 2023, 8358, 17853, 1669, 2088, 3056, 1235, 341, 197, 3223, 286, 2193, 30944, 198, 197, 42727, 11, 311, 23578, 2817, 13013, 198, 197, 50780, 257, 914, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetFillUpJSON(t *testing.T) { var u []User resp := rb.Get("/user") if resp.StatusCode != http.StatusOK { t.Fatal("Status != OK (200)") } err := resp.FillUp(&u) if err != nil { t.Fatal("Json fill up failed. Error: " + err.Error()) } for _, v := range users { if v.Name == "Hernan" { return } } t.Fatal("Couldn't found Hernan") }
explode_data.jsonl/49702
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 159 }
[ 2830, 3393, 1949, 14449, 2324, 5370, 1155, 353, 8840, 836, 8, 1476, 2405, 575, 3056, 1474, 271, 34653, 1669, 18717, 2234, 4283, 872, 5130, 743, 9039, 37828, 961, 1758, 52989, 341, 197, 3244, 26133, 445, 2522, 961, 10402, 320, 17, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestFlaggingDecommissionedHosts(t *testing.T) { Convey("When flagging decommissioned hosts", t, func() { Convey("only hosts in the database who are marked decommissioned"+ " should be returned", func() { // reset the db require.NoError(t, db.ClearCollections(host.Collection), "error clearing hosts collection") // insert hosts with different statuses host1 := &host.Host{ Provider: evergreen.ProviderNameMock, Id: "h1", Status: evergreen.HostRunning, } require.NoError(t, host1.Insert(), "error inserting host") host2 := &host.Host{ Provider: evergreen.ProviderNameMock, Id: "h2", Status: evergreen.HostTerminated, } require.NoError(t, host2.Insert(), "error inserting host") host3 := &host.Host{ Provider: evergreen.ProviderNameMock, Id: "h3", Status: evergreen.HostDecommissioned, } require.NoError(t, host3.Insert(), "error inserting host") host4 := &host.Host{ Provider: evergreen.ProviderNameMock, Id: "h4", Status: evergreen.HostDecommissioned, } require.NoError(t, host4.Insert(), "error inserting host") host5 := &host.Host{ Provider: evergreen.ProviderNameMock, Id: "h5", Status: evergreen.HostQuarantined, } require.NoError(t, host5.Insert(), "error inserting host") // flag the decommissioned hosts - there should be 2 of them decommissioned, err := host.FindHostsToTerminate() So(err, ShouldBeNil) So(len(decommissioned), ShouldEqual, 2) var ids []string for _, h := range decommissioned { ids = append(ids, h.Id) } So(util.StringSliceContains(ids, host3.Id), ShouldBeTrue) So(util.StringSliceContains(ids, host4.Id), ShouldBeTrue) }) }) }
explode_data.jsonl/31496
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 719 }
[ 2830, 3393, 12135, 3173, 4900, 316, 2728, 291, 9296, 82, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 4498, 5181, 3173, 28502, 2728, 291, 18432, 497, 259, 11, 2915, 368, 1476, 197, 93070, 5617, 445, 3243, 18432, 304, 279, 4625, 879...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEngineDeleteRange(t *testing.T) { defer leaktest.AfterTest(t)() testEngineDeleteRange(t, func(engine Engine, start, end MVCCKey) error { return engine.ClearRange(start, end) }) }
explode_data.jsonl/38113
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 67 }
[ 2830, 3393, 4571, 6435, 6046, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 18185, 4571, 6435, 6046, 1155, 11, 2915, 48974, 8200, 11, 1191, 11, 835, 42271, 3706, 1592, 8, 1465, 341, 197, 853, 4712, 13524...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestImageTransform(t *testing.T) { for _, tt := range updateImageSpecTests { t.Run(tt.name, func(t *testing.T) { runImageTransformTest(t, &tt) }) } }
explode_data.jsonl/17100
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 1906, 8963, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 2647, 1906, 8327, 18200, 341, 197, 3244, 16708, 47152, 2644, 11, 2915, 1155, 353, 8840, 836, 8, 341, 298, 56742, 1906, 8963, 2271, 1155, 11, 609, 556...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestCall_Call(t *testing.T) { mockJSONRPC := &mocks.JSONRPC{} mockGraphQL := &mocks.GraphQL{} c := &Client{ c: mockJSONRPC, g: mockGraphQL, traceSemaphore: semaphore.NewWeighted(100), } ctx := context.Background() mockJSONRPC.On( "CallContext", ctx, mock.Anything, "eth_call", map[string]string{ "to": "0xB5E5D0F8C0cbA267CD3D7035d6AdC8eBA7Df7Cdd", "data": "0x70a08231000000000000000000000000b5e5d0f8c0cba267cd3d7035d6adc8eba7df7cdd", }, toBlockNumArg(big.NewInt(11408349)), ).Return( nil, ).Run( func(args mock.Arguments) { r := args.Get(1).(*string) var expected map[string]interface{} file, err := ioutil.ReadFile("testdata/call_balance_11408349.json") assert.NoError(t, err) err = json.Unmarshal(file, &expected) assert.NoError(t, err) *r = expected["data"].(string) }, ).Once() correctRaw, err := ioutil.ReadFile("testdata/call_balance_11408349.json") assert.NoError(t, err) var correct map[string]interface{} assert.NoError(t, json.Unmarshal(correctRaw, &correct)) resp, err := c.Call( ctx, &RosettaTypes.CallRequest{ Method: "eth_call", Parameters: map[string]interface{}{ "index": 11408349, "to": "0xB5E5D0F8C0cbA267CD3D7035d6AdC8eBA7Df7Cdd", "data": "0x70a08231000000000000000000000000b5e5d0f8c0cba267cd3d7035d6adc8eba7df7cdd", }, }, ) assert.Equal(t, &RosettaTypes.CallResponse{ Result: correct, Idempotent: false, }, resp) assert.NoError(t, err) mockJSONRPC.AssertExpectations(t) mockGraphQL.AssertExpectations(t) }
explode_data.jsonl/55498
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 763 }
[ 2830, 3393, 7220, 76028, 1155, 353, 8840, 836, 8, 341, 77333, 5370, 29528, 1669, 609, 16712, 82, 18009, 29528, 16094, 77333, 88637, 1669, 609, 16712, 82, 40237, 3588, 31483, 1444, 1669, 609, 2959, 515, 197, 1444, 25, 1060, 7860, 5370, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDataURIValidation(t *testing.T) { tests := []struct { param string expected bool }{ {"data:image/png;base64,TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4=", true}, {"data:text/plain;base64,Vml2YW11cyBmZXJtZW50dW0gc2VtcGVyIHBvcnRhLg==", true}, {"image/gif;base64,U3VzcGVuZGlzc2UgbGVjdHVzIGxlbw==", false}, {"data:image/gif;base64,MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuMPNS1Ufof9EW/M98FNw" + "UAKrwflsqVxaxQjBQnHQmiI7Vac40t8x7pIb8gLGV6wL7sBTJiPovJ0V7y7oc0Ye" + "rhKh0Rm4skP2z/jHwwZICgGzBvA0rH8xlhUiTvcwDCJ0kc+fh35hNt8srZQM4619" + "FTgB66Xmp4EtVyhpQV+t02g6NzK72oZI0vnAvqhpkxLeLiMCyrI416wHm5Tkukhx" + "QmcL2a6hNOyu0ixX/x2kSFXApEnVrJ+/IxGyfyw8kf4N2IZpW5nEP847lpfj0SZZ" + "Fwrd1mnfnDbYohX2zRptLy2ZUn06Qo9pkG5ntvFEPo9bfZeULtjYzIl6K8gJ2uGZ" + "HQIDAQAB", true}, {"data:image/png;base64,12345", false}, {"", false}, {"data:text,:;base85,U3VzcGVuZGlzc2UgbGVjdHVzIGxlbw==", false}, } validate := New() for i, test := range tests { errs := validate.Var(test.param, "datauri") if test.expected { if !IsEqual(errs, nil) { t.Fatalf("Index: %d DataURI failed Error: %s", i, errs) } } else { if IsEqual(errs, nil) { t.Fatalf("Index: %d DataURI failed Error: %s", i, errs) } else { val := getError(errs, "", "") if val.Tag() != "datauri" { t.Fatalf("Index: %d DataURI failed Error: %s", i, errs) } } } } }
explode_data.jsonl/77263
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 851 }
[ 2830, 93200, 10301, 13799, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 36037, 262, 914, 198, 197, 42400, 1807, 198, 197, 59403, 197, 197, 4913, 691, 37670, 35960, 81860, 21, 19, 19997, 38, 24, 88, 81756, 15, 6743,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestStatementQueryRowConcurrent(t *testing.T) { db := newTestDB(t, "people") defer closeDB(t, db) stmt, err := db.Prepare("SELECT|people|age|name=?") if err != nil { t.Fatalf("Prepare: %v", err) } defer stmt.Close() const n = 10 ch := make(chan error, n) for i := 0; i < n; i++ { go func() { var age int err := stmt.QueryRow("Alice").Scan(&age) if err == nil && age != 1 { err = fmt.Errorf("unexpected age %d", age) } ch <- err }() } for i := 0; i < n; i++ { if err := <-ch; err != nil { t.Error(err) } } }
explode_data.jsonl/15967
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 258 }
[ 2830, 3393, 8636, 2859, 3102, 1109, 3231, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 501, 2271, 3506, 1155, 11, 330, 16069, 1138, 16867, 3265, 3506, 1155, 11, 2927, 340, 55822, 11, 1848, 1669, 2927, 28770, 3380, 445, 4858, 91, 16069, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestJobSpecsController_Index_noSort(t *testing.T) { t.Parallel() rpcClient, gethClient, _, assertMocksCalled := cltest.NewEthMocksWithStartupAssertions(t) defer assertMocksCalled() app, cleanup := cltest.NewApplication(t, eth.NewClientWith(rpcClient, gethClient), ) defer cleanup() require.NoError(t, app.Start()) client := app.NewHTTPClient() j1, err := setupJobSpecsControllerIndex(app) assert.NoError(t, err) resp, cleanup := client.Get("/v2/specs?size=x") defer cleanup() cltest.AssertServerResponse(t, resp, http.StatusUnprocessableEntity) resp, cleanup = client.Get("/v2/specs?size=1") defer cleanup() cltest.AssertServerResponse(t, resp, http.StatusOK) body := cltest.ParseResponseBody(t, resp) metaCount, err := cltest.ParseJSONAPIResponseMetaCount(body) require.NoError(t, err) require.Equal(t, 2, metaCount) var links jsonapi.Links jobs := []models.JobSpec{} err = web.ParsePaginatedResponse(body, &jobs, &links) assert.NoError(t, err) assert.NotEmpty(t, links["next"].Href) assert.Empty(t, links["prev"].Href) assert.Len(t, jobs, 1) assert.Equal(t, j1.ID, jobs[0].ID) resp, cleanup = client.Get(links["next"].Href) defer cleanup() cltest.AssertServerResponse(t, resp, http.StatusOK) jobs = []models.JobSpec{} err = web.ParsePaginatedResponse(cltest.ParseResponseBody(t, resp), &jobs, &links) assert.NoError(t, err) assert.Empty(t, links["next"]) assert.NotEmpty(t, links["prev"]) require.Len(t, jobs, 1) assert.Equal(t, models.InitiatorWeb, jobs[0].Initiators[0].Type, "should have the same type") assert.NotEqual(t, true, jobs[0].Initiators[0].Ran, "should ignore fields for other initiators") }
explode_data.jsonl/31803
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 640 }
[ 2830, 3393, 12245, 8327, 82, 2051, 50361, 6536, 10231, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 7000, 3992, 2959, 11, 633, 71, 2959, 11, 8358, 2060, 72577, 20960, 1669, 1185, 1944, 7121, 65390, 11571, 16056, 39076, 90206, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRetryPermanent(t *testing.T) { connected := make(chan *ConnReq) disconnected := make(chan *ConnReq) cmgr, err := New(&Config{ RetryDuration: time.Millisecond, TargetOutbound: 1, Dial: mockDialer, OnConnection: func(c *ConnReq, conn net.Conn) { connected <- c }, OnDisconnection: func(c *ConnReq) { disconnected <- c }, }) if err != nil { t.Fatalf("New error: %v", err) } cr := &ConnReq{ Addr: &net.TCPAddr{ IP: net.ParseIP("127.0.0.1"), Port: 18555, }, Permanent: true, } go cmgr.Connect(cr) cmgr.Start() gotConnReq := <-connected wantID := cr.ID() gotID := gotConnReq.ID() if gotID != wantID { t.Fatalf("retry: %v - want ID %v, got ID %v", cr.Addr, wantID, gotID) } gotState := cr.State() wantState := ConnEstablished if gotState != wantState { t.Fatalf("retry: %v - want state %v, got state %v", cr.Addr, wantState, gotState) } cmgr.Disconnect(cr.ID()) gotConnReq = <-disconnected wantID = cr.ID() gotID = gotConnReq.ID() if gotID != wantID { t.Fatalf("retry: %v - want ID %v, got ID %v", cr.Addr, wantID, gotID) } gotState = cr.State() wantState = ConnDisconnected if gotState != wantState { t.Fatalf("retry: %v - want state %v, got state %v", cr.Addr, wantState, gotState) } gotConnReq = <-connected wantID = cr.ID() gotID = gotConnReq.ID() if gotID != wantID { t.Fatalf("retry: %v - want ID %v, got ID %v", cr.Addr, wantID, gotID) } gotState = cr.State() wantState = ConnEstablished if gotState != wantState { t.Fatalf("retry: %v - want state %v, got state %v", cr.Addr, wantState, gotState) } cmgr.Remove(cr.ID()) gotConnReq = <-disconnected wantID = cr.ID() gotID = gotConnReq.ID() if gotID != wantID { t.Fatalf("retry: %v - want ID %v, got ID %v", cr.Addr, wantID, gotID) } gotState = cr.State() wantState = ConnDisconnected if gotState != wantState { t.Fatalf("retry: %v - want state %v, got state %v", cr.Addr, wantState, gotState) } cmgr.Stop() }
explode_data.jsonl/53950
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 864 }
[ 2830, 3393, 51560, 78793, 1155, 353, 8840, 836, 8, 341, 197, 15288, 1669, 1281, 35190, 353, 9701, 27234, 340, 34597, 15288, 1669, 1281, 35190, 353, 9701, 27234, 340, 1444, 48292, 11, 1848, 1669, 1532, 2099, 2648, 515, 197, 11143, 15149, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestActivate(t *testing.T) { user := &model.User{Id: model.NewId(), Roles: "system_user"} user.MfaSecret = model.NewRandomBase32String(MFA_SECRET_SIZE) token := dgoogauth.ComputeCode(user.MfaSecret, time.Now().UTC().Unix()/30) config := model.Config{} config.SetDefaults() config.ServiceSettings.EnableMultifactorAuthentication = model.NewBool(true) configService := testutils.StaticConfigService{Cfg: &config} t.Run("fail on disabled mfa", func(t *testing.T) { wrongConfig := model.Config{} wrongConfig.SetDefaults() wrongConfig.ServiceSettings.EnableMultifactorAuthentication = model.NewBool(false) wrongConfigService := testutils.StaticConfigService{Cfg: &wrongConfig} mfa := New(wrongConfigService, nil) err := mfa.Activate(user, "not-important") require.NotNil(t, err) require.Equal(t, "mfa.mfa_disabled.app_error", err.Id) }) t.Run("fail on wrongly formatted token", func(t *testing.T) { mfa := New(configService, nil) err := mfa.Activate(user, "invalid-token") require.NotNil(t, err) require.Equal(t, "mfa.activate.authenticate.app_error", err.Id) }) t.Run("fail on invalid token", func(t *testing.T) { mfa := New(configService, nil) err := mfa.Activate(user, "000000") require.NotNil(t, err) require.Equal(t, "mfa.activate.bad_token.app_error", err.Id) }) t.Run("fail on store action fail", func(t *testing.T) { storeMock := mocks.Store{} userStoreMock := mocks.UserStore{} userStoreMock.On("UpdateMfaActive", user.Id, true).Return(func(userId string, active bool) *model.AppError { return model.NewAppError("Activate", "mfa.activate.save_active.app_error", nil, "", http.StatusInternalServerError) }) storeMock.On("User").Return(&userStoreMock) mfa := New(configService, &storeMock) err := mfa.Activate(user, fmt.Sprintf("%06d", token)) require.NotNil(t, err) require.Equal(t, "mfa.activate.save_active.app_error", err.Id) }) t.Run("Successful activate", func(t *testing.T) { storeMock := mocks.Store{} userStoreMock := mocks.UserStore{} userStoreMock.On("UpdateMfaActive", user.Id, true).Return(func(userId string, active bool) *model.AppError { return nil }) storeMock.On("User").Return(&userStoreMock) mfa := New(configService, &storeMock) err := mfa.Activate(user, fmt.Sprintf("%06d", token)) require.Nil(t, err) }) }
explode_data.jsonl/64310
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 900 }
[ 2830, 3393, 31242, 1155, 353, 8840, 836, 8, 341, 19060, 1669, 609, 2528, 7344, 90, 764, 25, 1614, 7121, 764, 1507, 50907, 25, 330, 8948, 3317, 16707, 19060, 1321, 3632, 19773, 284, 1614, 7121, 13999, 3978, 18, 17, 703, 3189, 3627, 314...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPubmaticInvalidJson(t *testing.T) { server := httptest.NewServer( http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { fmt.Fprint(w, "Blah") }), ) defer server.Close() conf := *adapters.DefaultHTTPAdapterConfig an := NewPubmaticLegacyAdapter(&conf, server.URL) ctx := context.Background() pbReq := pbs.PBSRequest{} pbBidder := pbs.PBSBidder{ BidderCode: "bannerCode", AdUnits: []pbs.PBSAdUnit{ { Code: "unitCode", MediaTypes: []pbs.MediaType{pbs.MEDIA_TYPE_BANNER}, Sizes: []openrtb2.Format{ { W: 120, H: 240, }, }, Params: json.RawMessage("{\"publisherId\": \"10\", \"adSlot\": \"slot@120x240\"}"), }, }, } _, err := an.Call(ctx, &pbReq, &pbBidder) if err == nil { t.Fatalf("No error received for invalid request") } }
explode_data.jsonl/77913
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 382 }
[ 2830, 3393, 29162, 37244, 7928, 5014, 1155, 353, 8840, 836, 8, 1476, 41057, 1669, 54320, 70334, 7121, 5475, 1006, 197, 28080, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 298, 11009, 991, 1350, 3622, 11, 330, 4923,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1