text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestHandlePasswordExpiring(t *testing.T) { data, err := ioutil.ReadFile("example/password-expiring.html") require.Nil(t, err) doc, err := goquery.NewDocumentFromReader(bytes.NewReader(data)) require.Nil(t, err) ac := Client{} loginDetails := creds.LoginDetails{ Username: "fdsa", Password: "secret", URL: "https://example.com/foo", } ctx := context.WithValue(context.Background(), ctxKey("login"), &loginDetails) _, req, err := ac.handlePasswordExpiring(ctx, doc) require.Nil(t, err) b, err := ioutil.ReadAll(req.Body) require.Nil(t, err) s := string(b[:]) require.Contains(t, s, "pf.passwordExpiring=true") require.Contains(t, s, "pf.notificationCancel=clicked") require.Contains(t, s, "pf.pcvId=PDPCVOIDC") }
explode_data.jsonl/4336
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 300 }
[ 2830, 3393, 6999, 4876, 840, 78763, 1155, 353, 8840, 836, 8, 341, 8924, 11, 1848, 1669, 43144, 78976, 445, 8687, 59470, 18376, 6191, 2564, 1138, 17957, 59678, 1155, 11, 1848, 692, 59536, 11, 1848, 1669, 728, 1631, 7121, 7524, 3830, 5062...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCerts(t *testing.T) { dynatraceServer, _ := createTestDynatraceClient(t, http.NotFoundHandler()) defer dynatraceServer.Close() dtc := dynatraceClient{ url: dynatraceServer.URL, apiToken: apiToken, paasToken: paasToken, httpClient: dynatraceServer.Client(), hostCache: nil, logger: log.Log.WithName("dtc"), } transport := dtc.httpClient.Transport.(*http.Transport) certs := Certs(nil) assert.NotNil(t, certs) certs(&dtc) assert.Equal(t, [][]uint8{}, transport.TLSClientConfig.RootCAs.Subjects()) }
explode_data.jsonl/3776
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 228 }
[ 2830, 3393, 34, 15546, 1155, 353, 8840, 836, 8, 341, 2698, 1872, 266, 41183, 5475, 11, 716, 1669, 1855, 2271, 95709, 266, 41183, 2959, 1155, 11, 1758, 67255, 3050, 2398, 16867, 31070, 266, 41183, 5475, 10421, 2822, 2698, 10413, 1669, 31...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestH12_Head_ImplicitLen(t *testing.T) { h12Compare{ ReqFunc: (*Client).Head, Handler: func(w ResponseWriter, r *Request) { if r.Method != "HEAD" { t.Errorf("unexpected method %q", r.Method) } io.WriteString(w, "foo") }, }.run(t) }
explode_data.jsonl/4748
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 118 }
[ 2830, 3393, 39, 16, 17, 62, 12346, 7959, 1307, 6026, 11271, 1155, 353, 8840, 836, 8, 341, 9598, 16, 17, 27374, 515, 197, 197, 27234, 9626, 25, 4609, 2959, 568, 12346, 345, 197, 197, 3050, 25, 2915, 3622, 5949, 6492, 11, 435, 353, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSequenceKey(t *testing.T) { actual := SystemSQLCodec.SequenceKey(55) expected := []byte("\xbf\x89\x88\x88") if !bytes.Equal(actual, expected) { t.Errorf("expected %q (len %d), got %q (len %d)", expected, len(expected), actual, len(actual)) } }
explode_data.jsonl/56592
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 105 }
[ 2830, 3393, 14076, 1592, 1155, 353, 8840, 836, 8, 341, 88814, 1669, 739, 6688, 36913, 63537, 1592, 7, 20, 20, 340, 42400, 1669, 3056, 3782, 4921, 47659, 3462, 23, 24, 3462, 23, 23, 3462, 23, 23, 1138, 743, 753, 9651, 12808, 29721, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAccCirconusCheckStatsd_basic(t *testing.T) { checkName := fmt.Sprintf("statsd test check - %s", acctest.RandString(5)) resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, Providers: testAccProviders, CheckDestroy: testAccCheckDestroyCirconusCheckBundle, Steps: []resource.TestStep{ { Config: fmt.Sprintf(testAccCirconusCheckStatsdConfigFmt, checkName), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("circonus_check.statsd_dump", "active", "true"), resource.TestCheckResourceAttr("circonus_check.statsd_dump", "collector.#", "1"), resource.TestCheckResourceAttr("circonus_check.statsd_dump", "collector.2084916526.id", "/broker/2110"), resource.TestCheckResourceAttr("circonus_check.statsd_dump", "statsd.#", "1"), resource.TestCheckResourceAttr("circonus_check.statsd_dump", "statsd.3733287963.source_ip", `127.0.0.2`), resource.TestCheckResourceAttr("circonus_check.statsd_dump", "name", checkName), resource.TestCheckResourceAttr("circonus_check.statsd_dump", "period", "60s"), resource.TestCheckResourceAttr("circonus_check.statsd_dump", "metric.#", "1"), resource.TestCheckResourceAttr("circonus_check.statsd_dump", "tags.#", "4"), resource.TestCheckResourceAttr("circonus_check.statsd_dump", "tags.3728194417", "app:consul"), resource.TestCheckResourceAttr("circonus_check.statsd_dump", "tags.2087084518", "author:terraform"), resource.TestCheckResourceAttr("circonus_check.statsd_dump", "tags.1401442048", "lifecycle:unittest"), resource.TestCheckResourceAttr("circonus_check.statsd_dump", "tags.2812916752", "source:statsd"), resource.TestCheckResourceAttr("circonus_check.statsd_dump", "target", "127.0.0.2"), resource.TestCheckResourceAttr("circonus_check.statsd_dump", "type", "statsd"), ), }, }, }) }
explode_data.jsonl/21351
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 756 }
[ 2830, 3393, 14603, 34, 88276, 355, 3973, 16635, 67, 34729, 1155, 353, 8840, 836, 8, 341, 25157, 675, 1669, 8879, 17305, 445, 16260, 67, 1273, 1779, 481, 1018, 82, 497, 1613, 67880, 2013, 437, 703, 7, 20, 4390, 50346, 8787, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReconciler_StopOldCanaries(t *testing.T) { job := mock.Job() job.TaskGroups[0].Update = canaryUpdate // Create an old deployment that has placed some canaries d := structs.NewDeployment(job) s := &structs.DeploymentState{ Promoted: false, DesiredTotal: 10, DesiredCanaries: 2, PlacedAllocs: 2, } d.TaskGroups[job.TaskGroups[0].Name] = s // Update the job job.Version += 10 // Create 10 allocations from the old job var allocs []*structs.Allocation for i := 0; i < 10; i++ { alloc := mock.Alloc() alloc.Job = job alloc.JobID = job.ID alloc.NodeID = uuid.Generate() alloc.Name = structs.AllocName(job.ID, job.TaskGroups[0].Name, uint(i)) alloc.TaskGroup = job.TaskGroups[0].Name allocs = append(allocs, alloc) } // Create canaries for i := 0; i < 2; i++ { // Create one canary canary := mock.Alloc() canary.Job = job canary.JobID = job.ID canary.NodeID = uuid.Generate() canary.Name = structs.AllocName(job.ID, job.TaskGroups[0].Name, uint(i)) canary.TaskGroup = job.TaskGroups[0].Name s.PlacedCanaries = append(s.PlacedCanaries, canary.ID) canary.DeploymentID = d.ID allocs = append(allocs, canary) } reconciler := NewAllocReconciler(testLogger(), allocUpdateFnDestructive, false, job.ID, job, d, allocs, nil) r := reconciler.Compute() newD := structs.NewDeployment(job) newD.StatusDescription = structs.DeploymentStatusDescriptionRunningNeedsPromotion newD.TaskGroups[job.TaskGroups[0].Name] = &structs.DeploymentState{ DesiredCanaries: 2, DesiredTotal: 10, } // Assert the correct results assertResults(t, r, &resultExpectation{ createDeployment: newD, deploymentUpdates: []*structs.DeploymentStatusUpdate{ { DeploymentID: d.ID, Status: structs.DeploymentStatusCancelled, StatusDescription: structs.DeploymentStatusDescriptionNewerJob, }, }, place: 2, inplace: 0, stop: 2, desiredTGUpdates: map[string]*structs.DesiredUpdates{ job.TaskGroups[0].Name: { Canary: 2, Stop: 2, Ignore: 10, }, }, }) assertNamesHaveIndexes(t, intRange(0, 1), stopResultsToNames(r.stop)) assertNamesHaveIndexes(t, intRange(0, 1), placeResultsToNames(r.place)) }
explode_data.jsonl/67260
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 922 }
[ 2830, 3393, 693, 40446, 5769, 80308, 18284, 6713, 5431, 1155, 353, 8840, 836, 8, 341, 68577, 1669, 7860, 45293, 741, 68577, 28258, 22173, 58, 15, 936, 4289, 284, 646, 658, 4289, 271, 197, 322, 4230, 458, 2310, 23172, 429, 702, 9099, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestChangesetCountsOverTime(t *testing.T) { if testing.Short() { t.Skip() } ctx := backend.WithAuthzBypass(context.Background()) dbtesting.SetupGlobalTestDB(t) rcache.SetupForTest(t) cf, save := httptestutil.NewGitHubRecorderFactory(t, *update, "test-changeset-counts-over-time") defer save() userID := insertTestUser(t, dbconn.Global, "changeset-counts-over-time", false) repoStore := repos.NewDBStore(dbconn.Global, sql.TxOptions{}) githubExtSvc := &repos.ExternalService{ Kind: extsvc.KindGitHub, DisplayName: "GitHub", Config: marshalJSON(t, &schema.GitHubConnection{ Url: "https://github.com", Token: os.Getenv("GITHUB_TOKEN"), Repos: []string{"sourcegraph/sourcegraph"}, }), } err := repoStore.UpsertExternalServices(ctx, githubExtSvc) if err != nil { t.Fatal(t) } githubSrc, err := repos.NewGithubSource(githubExtSvc, cf) if err != nil { t.Fatal(t) } githubRepo, err := githubSrc.GetRepo(ctx, "sourcegraph/sourcegraph") if err != nil { t.Fatal(err) } err = repoStore.UpsertRepos(ctx, githubRepo) if err != nil { t.Fatal(err) } store := ee.NewStore(dbconn.Global) campaign := &campaigns.Campaign{ Name: "Test campaign", Description: "Testing changeset counts", AuthorID: userID, NamespaceUserID: userID, } err = store.CreateCampaign(ctx, campaign) if err != nil { t.Fatal(err) } changesets := []*campaigns.Changeset{ { RepoID: githubRepo.ID, ExternalID: "5834", ExternalServiceType: githubRepo.ExternalRepo.ServiceType, CampaignIDs: []int64{campaign.ID}, }, { RepoID: githubRepo.ID, ExternalID: "5849", ExternalServiceType: githubRepo.ExternalRepo.ServiceType, CampaignIDs: []int64{campaign.ID}, }, } err = store.CreateChangesets(ctx, changesets...) if err != nil { t.Fatal(err) } mockState := ct.MockChangesetSyncState(&protocol.RepoInfo{ Name: api.RepoName(githubRepo.Name), VCS: protocol.VCSInfo{URL: githubRepo.URI}, }) defer mockState.Unmock() err = ee.SyncChangesets(ctx, repoStore, store, cf, changesets...) if err != nil { t.Fatal(err) } for _, c := range changesets { campaign.ChangesetIDs = append(campaign.ChangesetIDs, c.ID) } err = store.UpdateCampaign(ctx, campaign) if err != nil { t.Fatal(err) } // Date when PR #5834 was created: "2019-10-02T14:49:31Z" // We start exactly one day earlier // Date when PR #5849 was created: "2019-10-03T15:03:21Z" start := parseJSONTime(t, "2019-10-01T14:49:31Z") // Date when PR #5834 was merged: "2019-10-07T13:13:45Z" // Date when PR #5849 was merged: "2019-10-04T08:55:21Z" end := parseJSONTime(t, "2019-10-07T13:13:45Z") daysBeforeEnd := func(days int) time.Time { return end.AddDate(0, 0, -days) } r := &campaignResolver{store: store, Campaign: campaign} rs, err := r.ChangesetCountsOverTime(ctx, &graphqlbackend.ChangesetCountsArgs{ From: &graphqlbackend.DateTime{Time: start}, To: &graphqlbackend.DateTime{Time: end}, }) if err != nil { t.Fatalf("ChangsetCountsOverTime failed with error: %s", err) } have := make([]*ee.ChangesetCounts, 0, len(rs)) for _, cr := range rs { r := cr.(*changesetCountsResolver) have = append(have, r.counts) } want := []*ee.ChangesetCounts{ {Time: daysBeforeEnd(5), Total: 0, Open: 0}, {Time: daysBeforeEnd(4), Total: 1, Open: 1, OpenPending: 1}, {Time: daysBeforeEnd(3), Total: 2, Open: 1, OpenPending: 1, Merged: 1}, {Time: daysBeforeEnd(2), Total: 2, Open: 1, OpenPending: 1, Merged: 1}, {Time: daysBeforeEnd(1), Total: 2, Open: 1, OpenPending: 1, Merged: 1}, {Time: end, Total: 2, Merged: 2}, } if !reflect.DeepEqual(have, want) { t.Errorf("wrong counts listed. diff=%s", cmp.Diff(have, want)) } }
explode_data.jsonl/27596
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1612 }
[ 2830, 3393, 11317, 295, 63731, 1918, 1462, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 741, 197, 630, 20985, 1669, 19163, 26124, 5087, 89, 33, 49911, 5378, 19047, 2398, 20939, 8840, 39820, 11646, 2271, 35...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTemplateTimeoutDuration(t *testing.T) { t.Run("Step Template Deadline", func(t *testing.T) { wf := unmarshalWF(stepTimeoutWf) cancel, controller := newController(wf) defer cancel() ctx := context.Background() woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) assert.Equal(t, wfv1.WorkflowRunning, woc.wf.Status.Phase) time.Sleep(6 * time.Second) makePodsPhase(ctx, woc, apiv1.PodPending) woc.operate(ctx) assert.Equal(t, wfv1.WorkflowFailed, woc.wf.Status.Phase) assert.Equal(t, wfv1.NodeFailed, woc.wf.Status.Nodes.FindByDisplayName("step1").Phase) }) t.Run("DAG Template Deadline", func(t *testing.T) { wf := unmarshalWF(dagTimeoutWf) cancel, controller := newController(wf) defer cancel() ctx := context.Background() woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) assert.Equal(t, wfv1.WorkflowRunning, woc.wf.Status.Phase) time.Sleep(6 * time.Second) makePodsPhase(ctx, woc, apiv1.PodPending) woc = newWorkflowOperationCtx(woc.wf, controller) woc.operate(ctx) assert.Equal(t, wfv1.WorkflowFailed, woc.wf.Status.Phase) assert.Equal(t, wfv1.NodeFailed, woc.wf.Status.Nodes.FindByDisplayName("hello-world-dag").Phase) }) t.Run("Invalid timeout format", func(t *testing.T) { wf := unmarshalWF(stepTimeoutWf) tmpl := wf.Spec.Templates[1] tmpl.Timeout = "23" wf.Spec.Templates[1] = tmpl cancel, controller := newController(wf) defer cancel() ctx := context.Background() woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) assert.Equal(t, wfv1.WorkflowFailed, woc.wf.Status.Phase) jsonByte, err := json.Marshal(woc.wf) assert.NoError(t, err) assert.Contains(t, string(jsonByte), "has invalid duration format in timeout") }) t.Run("Invalid timeout in step", func(t *testing.T) { wf := unmarshalWF(stepTimeoutWf) tmpl := wf.Spec.Templates[0] tmpl.Timeout = "23" wf.Spec.Templates[0] = tmpl cancel, controller := newController(wf) defer cancel() ctx := context.Background() woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) assert.Equal(t, wfv1.WorkflowFailed, woc.wf.Status.Phase) jsonByte, err := json.Marshal(woc.wf) assert.NoError(t, err) assert.Contains(t, string(jsonByte), "doesn't support timeout field") }) }
explode_data.jsonl/71029
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 974 }
[ 2830, 3393, 7275, 7636, 12945, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 8304, 14355, 61956, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 6692, 69, 1669, 650, 27121, 32131, 38436, 7636, 54, 69, 340, 197, 84441, 11, 6461, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnitPostQueryHelperUsesToken(t *testing.T) { accessor := getSimpleTokenAccessor() token := "token123" accessor.SetTokens(token, "", 0) var err error postQueryTest := func(_ context.Context, _ *snowflakeRestful, _ *url.Values, headers map[string]string, _ []byte, _ time.Duration, _ uuid.UUID, _ *Config) (*execResponse, error) { if headers[headerAuthorizationKey] != fmt.Sprintf(headerSnowflakeToken, token) { t.Fatalf("authorization key doesn't match, %v vs %v", headers[headerAuthorizationKey], fmt.Sprintf(headerSnowflakeToken, token)) } dd := &execResponseData{} return &execResponse{ Data: *dd, Message: "", Code: "0", Success: true, }, nil } sr := &snowflakeRestful{ FuncPost: postTestRenew, FuncPostQuery: postQueryTest, FuncRenewSession: renewSessionTest, TokenAccessor: accessor, } _, err = postRestfulQueryHelper(context.Background(), sr, &url.Values{}, make(map[string]string), []byte{0x12, 0x34}, 0, uuid.New(), &Config{}) if err != nil { t.Fatalf("err: %v", err) } }
explode_data.jsonl/44744
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 408 }
[ 2830, 3393, 4562, 4133, 2859, 5511, 68965, 3323, 1155, 353, 8840, 836, 8, 341, 197, 5211, 269, 1669, 633, 16374, 3323, 29889, 741, 43947, 1669, 330, 5839, 16, 17, 18, 698, 197, 5211, 269, 4202, 29300, 13274, 11, 7342, 220, 15, 692, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestQueryContextWait(t *testing.T) { db := newTestDB(t, "people") defer closeDB(t, db) prepares0 := numPrepares(t, db) // TODO(kardianos): convert this from using a timeout to using an explicit // cancel when the query signals that it is "executing" the query. ctx, cancel := context.WithTimeout(context.Background(), 300*time.Millisecond) defer cancel() // This will trigger the *fakeConn.Prepare method which will take time // performing the query. The ctxDriverPrepare func will check the context // after this and close the rows and return an error. _, err := db.QueryContext(ctx, "WAIT|1s|SELECT|people|age,name|") if err != context.DeadlineExceeded { t.Fatalf("expected QueryContext to error with context deadline exceeded but returned %v", err) } // Verify closed rows connection after error condition. waitForFree(t, db, 5*time.Second, 1) if prepares := numPrepares(t, db) - prepares0; prepares != 1 { // TODO(kardianos): if the context timeouts before the db.QueryContext // executes this check may fail. After adjusting how the context // is canceled above revert this back to a Fatal error. t.Logf("executed %d Prepare statements; want 1", prepares) } }
explode_data.jsonl/15952
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 366 }
[ 2830, 3393, 2859, 1972, 14190, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 501, 2271, 3506, 1155, 11, 330, 16069, 1138, 16867, 3265, 3506, 1155, 11, 2927, 340, 40346, 79, 5403, 15, 1669, 1629, 4703, 79, 5403, 1155, 11, 2927, 692, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRestoreSubscriptionMissingID(t *testing.T) { assert := assert.New(t) m := &mockSubMgr{err: fmt.Errorf("nope")} testInfo := testSubInfo("") testInfo.ID = "" _, err := restoreSubscription(m.stream, nil, testInfo) assert.EqualError(err, "No ID") }
explode_data.jsonl/10786
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 101 }
[ 2830, 3393, 56284, 33402, 25080, 915, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 2109, 1669, 609, 16712, 3136, 25567, 90, 615, 25, 8879, 13080, 445, 2152, 375, 42132, 18185, 1731, 1669, 1273, 3136, 1731, 31764, 181...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPairSearch(t *testing.T) { // Limit runtime in case of deadlocks lim := test.TimeOut(time.Second * 10) defer lim.Stop() var config AgentConfig a, err := NewAgent(&config) if err != nil { t.Fatalf("Error constructing ice.Agent") } if len(a.checklist) != 0 { t.Fatalf("TestPairSearch is only a valid test if a.validPairs is empty on construction") } cp := a.getBestAvailableCandidatePair() if cp != nil { t.Fatalf("No Candidate pairs should exist") } assert.NoError(t, a.Close()) }
explode_data.jsonl/50560
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 184 }
[ 2830, 3393, 12443, 5890, 1155, 353, 8840, 836, 8, 341, 197, 322, 28008, 15592, 304, 1142, 315, 5593, 34936, 198, 197, 4659, 1669, 1273, 16299, 2662, 9730, 32435, 353, 220, 16, 15, 340, 16867, 4568, 30213, 2822, 2405, 2193, 20713, 2648, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDriver_DecodeCoinID(t *testing.T) { tests := []struct { name string coinID []byte want string wantErr bool }{ { "ok", []byte{ 0x16, 0x8f, 0x34, 0x3a, 0xdf, 0x17, 0xe0, 0xc3, 0xa2, 0xe8, 0x88, 0x79, 0x8, 0x87, 0x17, 0xb8, 0xac, 0x93, 0x47, 0xb9, 0x66, 0xd, 0xa7, 0x4b, 0xde, 0x3e, 0x1d, 0x1f, 0x47, 0x94, 0x9f, 0xdf, // 32 byte hash 0x0, 0x0, 0x0, 0x1, // 4 byte vout }, "df9f94471f1d3ede4ba70d66b94793acb81787087988e8a2c3e017df3a348f16:1", false, }, { "bad", []byte{ 0x16, 0x8f, 0x34, 0x3a, 0xdf, 0x17, 0xe0, 0xc3, 0xa2, 0xe8, 0x88, 0x79, 0x8, 0x87, 0x17, 0xb8, 0xac, 0x93, 0x47, 0xb9, 0x66, 0xd, 0xa7, 0x4b, 0xde, 0x3e, 0x1d, 0x1f, 0x47, 0x94, 0x9f, // 31 bytes 0x0, 0x0, 0x0, 0x1, }, "", true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { d := &Driver{} got, err := d.DecodeCoinID(tt.coinID) if (err != nil) != tt.wantErr { t.Errorf("Driver.DecodeCoinID() error = %v, wantErr %v", err, tt.wantErr) return } if got != tt.want { t.Errorf("Driver.DecodeCoinID() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/71978
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 724 }
[ 2830, 3393, 11349, 78668, 534, 41180, 915, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 197, 7160, 915, 220, 3056, 3782, 198, 197, 50780, 262, 914, 198, 197, 50780, 7747, 1807, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNew(t *testing.T) { lgr, err := newLogger() if err != nil { t.Fatal(err) } if err = lgr.Critical("test: %d", 99); err != nil { t.Fatal(err) } if err = lgr.Close(); err != nil { t.Fatal(err) } }
explode_data.jsonl/38614
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 105 }
[ 2830, 3393, 3564, 1155, 353, 8840, 836, 8, 341, 8810, 901, 11, 1848, 1669, 501, 7395, 741, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 743, 1848, 284, 326, 901, 727, 14509, 445, 1944, 25, 1018, 67, 497, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestORCondition(t *testing.T) { logp.TestingSetup() configs := []ConditionConfig{ { OR: []ConditionConfig{ { Range: &ConditionFields{fields: map[string]interface{}{ "http.code.gte": 400, "http.code.lt": 500, }}, }, { Range: &ConditionFields{fields: map[string]interface{}{ "http.code.gte": 200, "http.code.lt": 300, }}, }, }, }, } conds := GetConditions(t, configs) for _, cond := range conds { logp.Debug("test", "%s", cond) } event := &beat.Event{ Timestamp: time.Now(), Fields: common.MapStr{ "bytes_in": 126, "bytes_out": 28033, "client_ip": "127.0.0.1", "client_port": 42840, "client_proc": "", "client_server": "mar.local", "http": common.MapStr{ "code": 404, "content_length": 76985, "phrase": "Not found", }, "ip": "127.0.0.1", "method": "GET", "params": "", "path": "/jszip.min.js", "port": 8000, "proc": "", "query": "GET /jszip.min.js", "responsetime": 30, "server": "mar.local", "status": "OK", "type": "http", }, } assert.True(t, conds[0].Check(event)) }
explode_data.jsonl/21717
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 672 }
[ 2830, 3393, 868, 10547, 1155, 353, 8840, 836, 8, 341, 6725, 79, 8787, 287, 21821, 2822, 25873, 82, 1669, 3056, 10547, 2648, 515, 197, 197, 515, 298, 197, 868, 25, 3056, 10547, 2648, 515, 571, 197, 515, 464, 197, 6046, 25, 609, 10547...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMultiRootDagTree(t *testing.T) { t.Run("GetNameAndDependents", func(t *testing.T) { treeNode1 := tree.NewTreeNode(models.JobSpec{ Name: "job1", }) treeNode2 := tree.NewTreeNode(models.JobSpec{ Name: "job2", }) multiRootTree := tree.NewMultiRootTree() treeNode1.AddDependent(treeNode2) treeNode2.AddDependent(treeNode1) multiRootTree.AddNodeIfNotExist(treeNode1) multiRootTree.AddNodeIfNotExist(treeNode2) err := multiRootTree.IsCyclic() assert.NotNil(t, err) assert.Contains(t, err.Error(), tree.ErrCyclicDependencyEncountered.Error()) }) t.Run("MarkRoot", func(t *testing.T) { treeNode1 := tree.NewTreeNode(models.JobSpec{ Name: "job1", }) multiRootTree := tree.NewMultiRootTree() multiRootTree.AddNode(treeNode1) multiRootTree.MarkRoot(treeNode1) rootNodes := multiRootTree.GetRootNodes() assert.Equal(t, 1, len(rootNodes)) assert.Equal(t, "job1", rootNodes[0].Data.GetName()) }) t.Run("IsCyclic", func(t *testing.T) { t.Run("should throw an error if cyclic", func(t *testing.T) { treeNode1 := tree.NewTreeNode(models.JobSpec{ Name: "job1", }) treeNode2 := tree.NewTreeNode(models.JobSpec{ Name: "job2", }) multiRootTree := tree.NewMultiRootTree() multiRootTree.AddNode(treeNode1) multiRootTree.AddNode(treeNode2) treeNode1.AddDependent(treeNode2) treeNode2.AddDependent(treeNode1) err := multiRootTree.IsCyclic() assert.NotNil(t, err) assert.Contains(t, err.Error(), "cycle dependency") }) t.Run("should not return error if not cyclic", func(t *testing.T) { treeNode1 := tree.NewTreeNode(models.JobSpec{ Name: "job1", }) treeNode2 := tree.NewTreeNode(models.JobSpec{ Name: "job2", }) multiRootTree := tree.NewMultiRootTree() multiRootTree.AddNode(treeNode1) multiRootTree.AddNode(treeNode2) treeNode1.AddDependent(treeNode2) err := multiRootTree.IsCyclic() assert.Nil(t, err) }) }) }
explode_data.jsonl/6310
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 831 }
[ 2830, 3393, 20358, 8439, 35, 351, 6533, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 49403, 3036, 7839, 408, 805, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 3244, 31583, 16, 1669, 4916, 7121, 26597, 20289, 45293, 8327, 515, 298,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFindIssuesProjectIDErr(t *testing.T) { cucm, iucm, lucm, pucm, m := prepareMocksAndRUC() c, _ := prepareHTTP(echo.GET, "/api/issues/find?title=test&projectId=test&labels=test1,test2", nil) err := m.FindIssues(c) assert.NotNil(t, err) assert.Equal(t, fmt.Errorf("strconv.Atoi: parsing \"%s\": invalid syntax", "test").Error(), err.Error()) checkAssertions(t, cucm, iucm, lucm, pucm) }
explode_data.jsonl/60171
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 170 }
[ 2830, 3393, 9885, 85828, 7849, 915, 7747, 1155, 353, 8840, 836, 8, 341, 1444, 1754, 76, 11, 600, 1754, 76, 11, 25927, 76, 11, 281, 1754, 76, 11, 296, 1669, 10549, 72577, 3036, 49, 5459, 2822, 1444, 11, 716, 1669, 10549, 9230, 2026, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTestJobs(t *testing.T) { testedConfigurations := make(map[string]struct{}) repos := map[string]struct{}{} for _, test := range tests { t.Run(test.path, func(t *testing.T) { opts := []jobsuite.Option{ jobsuite.Test(test.path, test.image), jobsuite.KymaRepo(), jobsuite.AllReleases(), } opts = append(opts, test.additionalOptions...) cfg := jobsuite.NewConfig(opts...) suite := test.suite if suite == nil { suite = tester.NewComponentSuite } ts := suite(cfg) if pathProvider, ok := ts.(jobsuite.JobConfigPathProvider); ok { testedConfigurations[path.Clean(pathProvider.JobConfigPath())] = struct{}{} } repos[cfg.Repository] = struct{}{} ts.Run(t) }) } t.Run("All Files covered by test", jobsuite.CheckFilesAreTested(repos, testedConfigurations, jobBasePath, "tests")) }
explode_data.jsonl/51386
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 346 }
[ 2830, 3393, 2271, 40667, 1155, 353, 8840, 836, 8, 341, 18185, 291, 2648, 21449, 1669, 1281, 9147, 14032, 60, 1235, 37790, 17200, 966, 1669, 2415, 14032, 60, 1235, 6257, 16094, 2023, 8358, 1273, 1669, 2088, 7032, 341, 197, 3244, 16708, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMalformedBarExceptionCode(t *testing.T) { var counter int = 0 gateway, err := testGateway.CreateGateway(t, nil, &testGateway.Options{ KnownHTTPBackends: []string{"bar"}, TestBinary: util.DefaultMainFile("example-gateway"), ConfigFiles: util.DefaultConfigFiles("example-gateway"), }) if !assert.NoError(t, err, "got bootstrap err") { return } defer gateway.Close() gateway.HTTPBackends()["bar"].HandleFunc( "POST", "/bar-path", func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(403) if _, err := w.Write([]byte("")); err != nil { t.Fatal("can't write fake response") } counter++ }, ) res, err := gateway.MakeRequest( "POST", "/bar/bar-path", nil, bytes.NewReader([]byte(`{ "request":{"stringField":"foo","boolField":true,"binaryField":"aGVsbG8=","timestamp":123,"enumField":0,"longField":123} }`)), ) if !assert.NoError(t, err, "got http error") { return } assert.Equal(t, "500 Internal Server Error", res.Status) assert.Equal(t, 1, counter) }
explode_data.jsonl/3149
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 411 }
[ 2830, 3393, 29600, 10155, 3428, 1354, 2078, 1155, 353, 8840, 836, 8, 341, 2405, 5546, 526, 284, 220, 15, 271, 3174, 12043, 11, 1848, 1669, 1273, 40709, 7251, 40709, 1155, 11, 2092, 11, 609, 1944, 40709, 22179, 515, 197, 39340, 4169, 9...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInsertNode(t *testing.T) { dir := testutils.InitTestEnv(ModuleName, t) tbl := makeTable(t, dir, 2, common.K*6) defer tbl.store.driver.Close() tbl.GetSchema().PrimaryKey = 1 bat := compute.MockBatch(tbl.GetSchema().Types(), common.K, int(tbl.GetSchema().PrimaryKey), nil) p, _ := ants.NewPool(5) var wg sync.WaitGroup var all uint64 worker := func(id uint64) func() { return func() { defer wg.Done() cnt := getNodes() nodes := make([]*insertNode, cnt) for i := 0; i < cnt; i++ { var cid common.ID cid.BlockID = id cid.Idx = uint16(i) n := NewInsertNode(tbl, tbl.store.nodesMgr, cid, tbl.store.driver) nodes[i] = n h := tbl.store.nodesMgr.Pin(n) var err error if err = n.Expand(common.K*1, func() error { n.Append(bat, 0) return nil }); err != nil { err = n.Expand(common.K*1, func() error { n.Append(bat, 0) return nil }) } if err != nil { assert.NotNil(t, err) } h.Close() } for _, n := range nodes { // n.ToTransient() n.Close() } atomic.AddUint64(&all, uint64(len(nodes))) } } idAlloc := common.NewIdAlloctor(1) for { id := idAlloc.Alloc() if id > 10 { break } wg.Add(1) p.Submit(worker(id)) } wg.Wait() t.Log(all) t.Log(tbl.store.nodesMgr.String()) t.Log(common.GPool.String()) }
explode_data.jsonl/14638
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 681 }
[ 2830, 3393, 13780, 1955, 1155, 353, 8840, 836, 8, 341, 48532, 1669, 1273, 6031, 26849, 2271, 14359, 75295, 675, 11, 259, 340, 3244, 2024, 1669, 1281, 2556, 1155, 11, 5419, 11, 220, 17, 11, 4185, 11352, 9, 21, 340, 16867, 21173, 16114,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReverseScanWithSplitAndMerge(t *testing.T) { defer leaktest.AfterTest(t)() s, _ := startNoSplitMergeServer(t) defer s.Stopper().Stop(context.TODO()) db := initReverseScanTestEnv(s, t) // Case 1: An encounter with a range split. // Split the range ["b", "e") at "c". if err := db.AdminSplit(context.TODO(), "c", "c", hlc.MaxTimestamp /* expirationTime */); err != nil { t.Fatal(err) } // The ReverseScan will run into a stale descriptor. if rows, err := db.ReverseScan(context.TODO(), "a", "d", 0); err != nil { t.Fatalf("unexpected error on ReverseScan: %s", err) } else if l := len(rows); l != 3 { t.Errorf("expected 3 rows; got %d", l) } // Case 2: encounter with range merge . // Merge the range ["e", "g") and ["g", "\xff\xff") . if err := db.AdminMerge(context.TODO(), "e"); err != nil { t.Fatal(err) } if rows, err := db.ReverseScan(context.TODO(), "d", "g", 0); err != nil { t.Fatalf("unexpected error on ReverseScan: %s", err) } else if l := len(rows); l != 3 { t.Errorf("expected 3 rows; got %d", l) } }
explode_data.jsonl/36466
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 422 }
[ 2830, 3393, 45695, 26570, 2354, 20193, 3036, 52096, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 1903, 11, 716, 1669, 1191, 2753, 20193, 52096, 5475, 1155, 340, 16867, 274, 7758, 18487, 1005, 10674, 5378, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestV1SetKeyWithTTL(t *testing.T) { tests.RunServer(func(s *server.Server) { t0 := time.Now() v := url.Values{} v.Set("value", "XXX") v.Set("ttl", "20") resp, _ := tests.PutForm(fmt.Sprintf("%s%s", s.URL(), "/v1/keys/foo/bar"), v) assert.Equal(t, resp.StatusCode, http.StatusOK) body := tests.ReadBodyJSON(resp) assert.Equal(t, body["ttl"], 20, "") // Make sure the expiration date is correct. expiration, _ := time.Parse(time.RFC3339Nano, body["expiration"].(string)) assert.Equal(t, expiration.Sub(t0)/time.Second, 20, "") }) }
explode_data.jsonl/24835
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 241 }
[ 2830, 3393, 53, 16, 1649, 1592, 2354, 51, 13470, 1155, 353, 8840, 836, 8, 341, 78216, 16708, 5475, 18552, 1141, 353, 4030, 22997, 8, 341, 197, 3244, 15, 1669, 882, 13244, 741, 197, 5195, 1669, 2515, 35145, 16094, 197, 5195, 4202, 445,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddOrphanedIgnore(t *testing.T) { fixture.EnsureCleanState(t) projectName := "proj-" + strconv.FormatInt(time.Now().Unix(), 10) _, err := fixture.AppClientset.ArgoprojV1alpha1().AppProjects(fixture.ArgoCDNamespace).Create( context.Background(), &v1alpha1.AppProject{ObjectMeta: metav1.ObjectMeta{Name: projectName}}, metav1.CreateOptions{}) if err != nil { t.Fatalf("Unable to create project %v", err) } _, err = fixture.RunCli("proj", "add-orphaned-ignore", projectName, "group", "kind", "--name", "name", ) if err != nil { t.Fatalf("Unable to add resource to orphaned ignore %v", err) } _, err = fixture.RunCli("proj", "add-orphaned-ignore", projectName, "group", "kind", "--name", "name", ) assert.Error(t, err) assert.True(t, strings.Contains(err.Error(), "already defined")) proj, err := fixture.AppClientset.ArgoprojV1alpha1().AppProjects(fixture.ArgoCDNamespace).Get(context.Background(), projectName, metav1.GetOptions{}) assert.NoError(t, err) assert.Equal(t, projectName, proj.Name) assert.Equal(t, 1, len(proj.Spec.OrphanedResources.Ignore)) assert.Equal(t, "group", proj.Spec.OrphanedResources.Ignore[0].Group) assert.Equal(t, "kind", proj.Spec.OrphanedResources.Ignore[0].Kind) assert.Equal(t, "name", proj.Spec.OrphanedResources.Ignore[0].Name) assertProjHasEvent(t, proj, "update", argo.EventReasonResourceUpdated) }
explode_data.jsonl/58446
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 549 }
[ 2830, 3393, 2212, 2195, 9943, 291, 12497, 1155, 353, 8840, 836, 8, 341, 1166, 12735, 22834, 19098, 27529, 1397, 1155, 692, 72470, 675, 1669, 330, 30386, 27651, 488, 33317, 9978, 1072, 9730, 13244, 1005, 55832, 1507, 220, 16, 15, 340, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestKeeperDB_EligibleUpkeeps_KeepersCycleAllUpkeeps(t *testing.T) { t.Parallel() store, orm, cleanup := setupKeeperDB(t) defer cleanup() db := store.DB ethKeyStore := cltest.NewKeyStore(t, store.DB).Eth() registry, _ := cltest.MustInsertKeeperRegistry(t, store, ethKeyStore) registry.NumKeepers = 5 registry.KeeperIndex = 3 require.NoError(t, store.DB.Save(&registry).Error) for i := 0; i < 1000; i++ { cltest.MustInsertUpkeepForRegistry(t, store, registry) } cltest.AssertCount(t, db, keeper.Registry{}, 1) cltest.AssertCount(t, db, &keeper.UpkeepRegistration{}, 1000) // in a full cycle, each node should be responsible for each upkeep exactly once list1, err := orm.EligibleUpkeepsForRegistry(context.Background(), registry.ContractAddress, 20, 0) // someone eligible require.NoError(t, err) list2, err := orm.EligibleUpkeepsForRegistry(context.Background(), registry.ContractAddress, 40, 0) // someone eligible require.NoError(t, err) list3, err := orm.EligibleUpkeepsForRegistry(context.Background(), registry.ContractAddress, 60, 0) // someone eligible require.NoError(t, err) list4, err := orm.EligibleUpkeepsForRegistry(context.Background(), registry.ContractAddress, 80, 0) // someone eligible require.NoError(t, err) list5, err := orm.EligibleUpkeepsForRegistry(context.Background(), registry.ContractAddress, 100, 0) // someone eligible require.NoError(t, err) totalEligible := len(list1) + len(list2) + len(list3) + len(list4) + len(list5) require.Equal(t, 1000, totalEligible) }
explode_data.jsonl/27012
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 539 }
[ 2830, 3393, 77233, 3506, 2089, 7708, 1238, 2324, 440, 7124, 62, 19434, 388, 44820, 2403, 2324, 440, 7124, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 57279, 11, 67602, 11, 21290, 1669, 6505, 77233, 3506, 1155, 340, 16867, 2129...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInterpolateMSHDiverging(t *testing.T) { type test struct { start, end msh convergeM, scalar, convergePoint float64 result msh } tests := []test{ { start: msh{M: 80, S: 1.08, H: -1.1}, end: msh{M: 80, S: 1.08, H: 0.5}, convergeM: 88, convergePoint: 0.5, scalar: 0.125, result: msh{M: 82, S: 0.81, H: -1.2402896406131008}, }, { start: msh{M: 80, S: 1.08, H: -1.1}, end: msh{M: 80, S: 1.08, H: 0.5}, convergeM: 88, convergePoint: 0.5, scalar: 0.5, result: msh{M: 88, S: 0, H: 0}, }, { start: msh{M: 80, S: 1.08, H: -1.1}, end: msh{M: 80, S: 1.08, H: 0.5}, convergeM: 88, convergePoint: 0.5, scalar: 0.75, result: msh{M: 84, S: 0.54, H: 0.7805792812262012}, }, { start: msh{M: 80, S: 1.08, H: -1.1}, end: msh{M: 80, S: 1.08, H: 0.5}, convergeM: 88, convergePoint: 0.75, scalar: 0.7499999999999999, result: msh{M: 88, S: 1.1990408665951691e-16, H: -1.6611585624524023}, }, { start: msh{M: 80, S: 1.08, H: -1.1}, end: msh{M: 80, S: 1.08, H: 0.5}, convergeM: 88, convergePoint: 0.75, scalar: 0.75, result: msh{M: 88, S: 0, H: 0}, }, { start: msh{M: 80, S: 1.08, H: -1.1}, end: msh{M: 80, S: 1.08, H: 0.5}, convergeM: 88, convergePoint: 0.75, scalar: 0.7500000000000001, result: msh{M: 88, S: 2.3980817331903383e-16, H: 1.0611585624524023}, }, } for i, test := range tests { p := newSmoothDiverging(test.start, test.end, test.convergeM) p.SetMin(0) p.SetMax(1) result := p.(*smoothDiverging).interpolateMSHDiverging(test.scalar, test.convergePoint) if result != test.result { t.Errorf("test %d: expected %v; got %v", i, test.result, result) } } }
explode_data.jsonl/422
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1230 }
[ 2830, 3393, 3306, 45429, 4826, 19147, 1524, 3173, 1155, 353, 8840, 836, 8, 341, 13158, 1273, 2036, 341, 197, 21375, 11, 835, 5180, 296, 927, 198, 197, 37203, 423, 709, 44, 11, 17274, 11, 79767, 2609, 2224, 21, 19, 198, 197, 9559, 59...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_printReport(t *testing.T) { type args struct { report Report reportType string reportFilePath string } tests := []struct { name string args args }{ // TODO: Add test cases. } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { }) } }
explode_data.jsonl/49960
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 126 }
[ 2830, 3393, 10064, 10361, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 69931, 260, 8259, 198, 197, 69931, 929, 257, 914, 198, 197, 69931, 19090, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultipleCompositeLiteralReturnStatement(t *testing.T) { p := createParser(`return assetType{name:"OrderBookgo", age:"Five"}, OrderBook{name:"Katter"}`) bvmUtils.Assert(t, isReturnStatement(p), "should detect return statement") parseReturnStatement(p) u := p.scope.Next() bvmUtils.AssertNow(t, u.Type() == ast.ReturnStatement, "wrong return type") r := u.(*ast.ReturnStatementNode) bvmUtils.AssertNow(t, len(r.Results) == 2, fmt.Sprintf("wrong result length: %d", len(r.Results))) bvmUtils.AssertNow(t, r.Results[0].Type() == ast.CompositeLiteral, "wrong result 0 type") bvmUtils.AssertNow(t, r.Results[1].Type() == ast.CompositeLiteral, "wrong result 1 type") }
explode_data.jsonl/49737
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 250 }
[ 2830, 3393, 32089, 41685, 17350, 5598, 8636, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 1855, 6570, 5809, 689, 9329, 929, 47006, 2974, 4431, 7134, 3346, 497, 4231, 2974, 37020, 14345, 7217, 7134, 47006, 2974, 42, 1650, 1, 27085, 2233, 73...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEndpointMalformed(t *testing.T) { _, buildErr := Builder(openrtb_ext.BidderAMX, config.Adapter{ Endpoint: " http://leading.space.is.invalid"}) assert.Error(t, buildErr) }
explode_data.jsonl/7170
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 73 }
[ 2830, 3393, 27380, 29600, 10155, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1936, 7747, 1669, 20626, 30981, 3342, 65, 9927, 1785, 307, 1107, 1402, 55, 11, 2193, 34190, 515, 197, 197, 27380, 25, 330, 1758, 1110, 20654, 35883, 2079, 55721, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestValidCycloneDX(t *testing.T) { imageFixture := func(t *testing.T) string { fixtureImageName := "image-pkg-coverage" imagetest.GetFixtureImage(t, "docker-archive", fixtureImageName) tarPath := imagetest.GetFixtureImageTarPath(t, fixtureImageName) return "docker-archive:" + tarPath } // TODO update image to exercise entire cyclonedx schema tests := []struct { name string subcommand string args []string fixture func(*testing.T) string assertions []traitAssertion }{ { name: "validate cyclonedx output", subcommand: "packages", args: []string{"-o", "cyclonedx-json"}, fixture: imageFixture, assertions: []traitAssertion{ assertSuccessfulReturnCode, assertValidCycloneDX, }, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { fixtureRef := test.fixture(t) args := []string{ test.subcommand, fixtureRef, "-q", } for _, a := range test.args { args = append(args, a) } cmd, stdout, stderr := runSyft(t, nil, args...) for _, traitFn := range test.assertions { traitFn(t, stdout, stderr, cmd.ProcessState.ExitCode()) } if t.Failed() { t.Log("STDOUT:\n", stdout) t.Log("STDERR:\n", stderr) t.Log("COMMAND:", strings.Join(cmd.Args, " ")) } validateCycloneDXJSON(t, stdout) }) } }
explode_data.jsonl/6194
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 586 }
[ 2830, 3393, 4088, 34, 6179, 603, 16591, 1155, 353, 8840, 836, 8, 341, 31426, 18930, 1669, 2915, 1155, 353, 8840, 836, 8, 914, 341, 197, 1166, 12735, 1906, 675, 1669, 330, 1805, 2268, 7351, 12, 54250, 698, 197, 197, 28016, 57824, 2234,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContentTypeCreateWithID(t *testing.T) { var err error assert := assert.New(t) handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { assert.Equal(r.Method, "PUT") assert.Equal(r.RequestURI, "/spaces/id1/content_types/mycontenttype") checkHeaders(r, assert) w.WriteHeader(200) fmt.Fprintln(w, string(readTestData("content_type-updated.json"))) }) // test server server := httptest.NewServer(handler) defer server.Close() // cma client cma = NewCMA(CMAToken) cma.BaseURL = server.URL // test content type ct := &ContentType{ Sys: &Sys{ ID: "mycontenttype", }, Name: "MyContentType", } cma.ContentTypes.Upsert("id1", ct) assert.Nil(err) }
explode_data.jsonl/66082
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 283 }
[ 2830, 3393, 29504, 4021, 2354, 915, 1155, 353, 8840, 836, 8, 341, 2405, 1848, 1465, 198, 6948, 1669, 2060, 7121, 1155, 692, 53326, 1669, 1758, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 6948, 12808, 2601, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSortedGetter(t *testing.T) { firstTime := timeutils.GetKeptnTimeStamp(t0.Add(-time.Second * 2)) secondTime := timeutils.GetKeptnTimeStamp(t0.Add(-time.Second)) thirdTime := timeutils.GetKeptnTimeStamp(t0) events := []*models.KeptnContextExtendedCE{ {Time: t0.Add(-time.Second)}, {Time: t0}, {Time: t0.Add(-time.Second * 2)}, } SortByTime(events) assert.Equal(t, timeutils.GetKeptnTimeStamp(events[0].Time), firstTime) assert.Equal(t, timeutils.GetKeptnTimeStamp(events[1].Time), secondTime) assert.Equal(t, timeutils.GetKeptnTimeStamp(events[2].Time), thirdTime) for _, e := range events { fmt.Println(e.Time) } }
explode_data.jsonl/75647
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 270 }
[ 2830, 3393, 51051, 31485, 1155, 353, 8840, 836, 8, 1476, 42190, 1462, 1669, 882, 6031, 2234, 6608, 417, 77, 66146, 1155, 15, 1904, 4080, 1678, 32435, 353, 220, 17, 1171, 197, 5569, 1462, 1669, 882, 6031, 2234, 6608, 417, 77, 66146, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNewDifferentImageUpdate(t *testing.T) { // this buildconfig references a different image than the one that will be updated buildcfg := mockBuildConfig("registry.com/namespace/imagename1", "registry.com/namespace/imagename1", "testImageRepo1", "testTag1") imageStream := mockImageStream("testImageRepo2", "registry.com/namespace/imagename2", map[string]string{"testTag2": "newImageID123"}) image := mockImage("testImage@id", "registry.com/namespace/imagename@id") controller := mockImageChangeController(buildcfg, imageStream, image) bcInstantiator := controller.BuildConfigInstantiator.(*buildConfigInstantiator) bcUpdater := bcInstantiator.buildConfigUpdater err := controller.HandleImageRepo(imageStream) if err != nil { t.Errorf("Unexpected error %v from HandleImageRepo", err) } if len(bcInstantiator.name) != 0 { t.Error("New build generated when a different repository was updated!") } if bcUpdater.buildcfg != nil { t.Error("BuildConfig was updated when a different repository was updated!") } }
explode_data.jsonl/69173
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 326 }
[ 2830, 3393, 3564, 69123, 1906, 4289, 1155, 353, 8840, 836, 8, 341, 197, 322, 419, 1936, 1676, 15057, 264, 2155, 2168, 1091, 279, 825, 429, 686, 387, 6049, 198, 69371, 14072, 1669, 7860, 11066, 2648, 445, 29172, 905, 14, 2231, 14, 2801...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_Section(t *testing.T) { Convey("Test CRD sections", t, func() { cfg, err := Load([]byte(_CONF_DATA), "testdata/conf.cfg") So(err, ShouldBeNil) So(cfg, ShouldNotBeNil) Convey("Get section strings", func() { So(strings.Join(cfg.SectionStrings(), ","), ShouldEqual, "DEFAULT") }) Convey("Delete a section", func() { cfg.DeleteSection("") So(len(cfg.SectionStrings()), ShouldEqual, 0) }) Convey("Create new sections", func() { cfg.NewSections("test", "test2") _, err := cfg.GetSection("test") So(err, ShouldBeNil) _, err = cfg.GetSection("test2") So(err, ShouldBeNil) }) }) }
explode_data.jsonl/55808
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 267 }
[ 2830, 3393, 1098, 11915, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 2271, 12617, 35, 14158, 497, 259, 11, 2915, 368, 341, 197, 50286, 11, 1848, 1669, 8893, 10556, 3782, 2490, 38634, 7896, 701, 330, 92425, 59241, 30481, 1138, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMsgCreateClass(t *testing.T) { _, _, addr1 := testdata.KeyTestPubAddr() _, _, addr2 := testdata.KeyTestPubAddr() tests := map[string]struct { src MsgCreateClass expErr bool }{ "valid msg": { src: MsgCreateClass{ Admin: addr1.String(), Issuers: []string{addr1.String(), addr2.String()}, CreditTypeName: "carbon", Metadata: []byte("hello"), }, expErr: false, }, "valid msg without metadata": { src: MsgCreateClass{ Admin: addr1.String(), CreditTypeName: "carbon", Issuers: []string{addr1.String(), addr2.String()}, }, expErr: false, }, "invalid without admin": { src: MsgCreateClass{}, expErr: true, }, "invalid without issuers": { src: MsgCreateClass{ Admin: addr1.String(), CreditTypeName: "carbon", }, expErr: true, }, "invalid with wrong issuers": { src: MsgCreateClass{ Admin: addr1.String(), CreditTypeName: "carbon", Issuers: []string{"xyz", "xyz1"}, }, expErr: true, }, "invalid with wrong admin": { src: MsgCreateClass{ Admin: "wrongAdmin", CreditTypeName: "carbon", Issuers: []string{addr1.String(), addr2.String()}, }, expErr: true, }, "invalid with no credit type": { src: MsgCreateClass{ Admin: addr1.String(), Issuers: []string{addr1.String(), addr2.String()}, }, expErr: true, }, "invalid metadata maxlength is exceeded": { src: MsgCreateClass{ Admin: addr1.String(), CreditTypeName: "carbon", Issuers: []string{addr1.String(), addr2.String()}, Metadata: []byte(simtypes.RandStringOfLength(r, 288)), }, expErr: true, }, } for msg, test := range tests { t.Run(msg, func(t *testing.T) { err := test.src.ValidateBasic() if test.expErr { require.Error(t, err) } else { require.NoError(t, err) } }) } }
explode_data.jsonl/65126
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 933 }
[ 2830, 3393, 6611, 4021, 1957, 1155, 353, 8840, 836, 8, 341, 197, 6878, 8358, 10789, 16, 1669, 1273, 691, 9610, 2271, 29162, 13986, 741, 197, 6878, 8358, 10789, 17, 1669, 1273, 691, 9610, 2271, 29162, 13986, 741, 78216, 1669, 2415, 14032...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStartProcessStop(t *testing.T) { c := BackendConfig{ "save_process": "HeadersParser|Debugger", "log_received_mails": true, "save_workers_size": 2, } gateway := &BackendGateway{} err := gateway.Initialize(c) mainlog, _ := log.GetLogger(log.OutputOff.String(), "debug") Svc.SetMainlog(mainlog) if err != nil { t.Error("Gateway did not init because:", err) t.Fail() } err = gateway.Start() if err != nil { t.Error("Gateway did not start because:", err) t.Fail() } if gateway.State != BackendStateRunning { t.Error("gateway.State is not in rinning state, got ", gateway.State) } // can we place an envelope on the conveyor channel? e := &mail.Envelope{ RemoteIP: "127.0.0.1", QueuedId: "abc12345", Helo: "helo.example.com", MailFrom: mail.Address{User: "test", Host: "example.com"}, TLS: true, } e.PushRcpt(mail.Address{User: "test", Host: "example.com"}) e.Data.WriteString("Subject:Test\n\nThis is a test.") notify := make(chan *notifyMsg) gateway.conveyor <- &workerMsg{e, notify, TaskSaveMail} // it should not produce any errors // headers (subject) should be parsed. select { case status := <-notify: if status.err != nil { t.Error("envelope processing failed with:", status.err) } if e.Header["Subject"][0] != "Test" { t.Error("envelope processing did not parse header") } case <-time.After(time.Second): t.Error("gateway did not respond after 1 second") t.Fail() } err = gateway.Shutdown() if err != nil { t.Error("Gateway did not shutdown") } }
explode_data.jsonl/79604
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 611 }
[ 2830, 3393, 3479, 7423, 10674, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 55260, 2648, 515, 197, 197, 1, 6628, 11305, 788, 981, 330, 10574, 6570, 91, 67239, 756, 197, 197, 47012, 40783, 717, 6209, 788, 830, 345, 197, 197, 1, 6628, 43...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestNoTwopc(t *testing.T) { txe, tsv, db := newNoTwopcExecutor(t) defer db.Close() defer tsv.StopService() testcases := []struct { desc string fun func() error }{{ desc: "Prepare", fun: func() error { return txe.Prepare(1, "aa") }, }, { desc: "CommitPrepared", fun: func() error { return txe.CommitPrepared("aa") }, }, { desc: "RollbackPrepared", fun: func() error { return txe.RollbackPrepared("aa", 1) }, }, { desc: "CreateTransaction", fun: func() error { return txe.CreateTransaction("aa", nil) }, }, { desc: "StartCommit", fun: func() error { return txe.StartCommit(1, "aa") }, }, { desc: "SetRollback", fun: func() error { return txe.SetRollback("aa", 1) }, }, { desc: "ConcludeTransaction", fun: func() error { return txe.ConcludeTransaction("aa") }, }, { desc: "ReadTransaction", fun: func() error { _, err := txe.ReadTransaction("aa") return err }, }, { desc: "ReadAllTransactions", fun: func() error { _, _, _, err := txe.ReadTwopcInflight() return err }, }} want := "2pc is not enabled" for _, tc := range testcases { err := tc.fun() require.EqualError(t, err, want) } }
explode_data.jsonl/25178
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 488 }
[ 2830, 3393, 2753, 22816, 67858, 1155, 353, 8840, 836, 8, 341, 3244, 8371, 11, 259, 3492, 11, 2927, 1669, 501, 2753, 22816, 67858, 25255, 1155, 340, 16867, 2927, 10421, 741, 16867, 259, 3492, 30213, 1860, 2822, 18185, 23910, 1669, 3056, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAccTest_update_test(t *testing.T) { teamId := os.Getenv("RUNSCOPE_TEAM_ID") bucketName := testAccRandomBucketName() test1 := &runscope.Test{} test2 := &runscope.Test{} resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, ProviderFactories: testAccProviderFactories, CheckDestroy: testAccCheckTestDestroy, Steps: []resource.TestStep{ { Config: fmt.Sprintf(testAccTestDefaultConfig, bucketName, teamId), Check: resource.ComposeTestCheckFunc( testAccCheckTestExists("runscope_test.test", test1), resource.TestCheckResourceAttr("runscope_test.test", "name", "runscope test"), resource.TestCheckResourceAttr("runscope_test.test", "description", ""), resource.TestCheckResourceAttrSet("runscope_test.test", "default_environment_id"), resource.TestCheckResourceAttrSet("runscope_test.test", "created_at"), resource.TestCheckResourceAttr("runscope_test.test", "created_by.#", "1"), resource.TestCheckResourceAttrSet("runscope_test.test", "created_by.0.id"), resource.TestCheckResourceAttrSet("runscope_test.test", "created_by.0.name"), resource.TestCheckResourceAttrSet("runscope_test.test", "created_by.0.email"), ), }, { Config: fmt.Sprintf(testAccTestCustomConfig, bucketName, teamId), Check: resource.ComposeTestCheckFunc( testAccCheckTestExists("runscope_test.test", test2), resource.TestCheckResourceAttr("runscope_test.test", "name", "runscope custom test"), resource.TestCheckResourceAttr("runscope_test.test", "description", "runscope custom test description"), resource.TestCheckResourceAttrSet("runscope_test.test", "default_environment_id"), resource.TestCheckResourceAttrSet("runscope_test.test", "created_at"), resource.TestCheckResourceAttr("runscope_test.test", "created_by.#", "1"), resource.TestCheckResourceAttrSet("runscope_test.test", "created_by.0.id"), resource.TestCheckResourceAttrSet("runscope_test.test", "created_by.0.name"), resource.TestCheckResourceAttrSet("runscope_test.test", "created_by.0.email"), resource.TestCheckResourceAttrSet("runscope_test.test", "trigger_url"), testAccCheckTestIdEqual(test1, test2), ), }, }, }) }
explode_data.jsonl/52177
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 842 }
[ 2830, 3393, 14603, 2271, 8882, 4452, 1155, 353, 8840, 836, 8, 341, 197, 9196, 764, 1669, 2643, 64883, 445, 47390, 76365, 1740, 71198, 3450, 1138, 2233, 11152, 675, 1669, 1273, 14603, 13999, 36018, 675, 741, 18185, 16, 1669, 609, 6108, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServiceOptions(t *testing.T) { t.Parallel() test := NewE2eTest(t) test.Setup(t) defer test.Teardown(t) t.Run("create and validate service with concurrency options", func(t *testing.T) { test.serviceCreateWithOptions(t, "svc1", []string{"--concurrency-limit", "250", "--concurrency-target", "300"}) test.validateServiceConcurrencyTarget(t, "svc1", "300") test.validateServiceConcurrencyLimit(t, "svc1", "250") }) t.Run("update and validate service with concurrency limit", func(t *testing.T) { test.serviceUpdate(t, "svc1", []string{"--concurrency-limit", "300"}) test.validateServiceConcurrencyLimit(t, "svc1", "300") }) t.Run("update concurrency options with invalid values for service", func(t *testing.T) { command := []string{"service", "update", "svc1", "--concurrency-limit", "-1", "--concurrency-target", "0"} _, err := test.kn.RunWithOpts(command, runOpts{NoNamespace: false, AllowError: true}) assert.ErrorContains(t, err, "invalid") }) t.Run("returns steady concurrency options for service", func(t *testing.T) { test.validateServiceConcurrencyLimit(t, "svc1", "300") test.validateServiceConcurrencyTarget(t, "svc1", "300") }) t.Run("delete service", func(t *testing.T) { test.serviceDelete(t, "svc1") }) t.Run("create and validate service with min/max scale options ", func(t *testing.T) { test.serviceCreateWithOptions(t, "svc2", []string{"--min-scale", "1", "--max-scale", "3"}) test.validateServiceMinScale(t, "svc2", "1") test.validateServiceMaxScale(t, "svc2", "3") }) t.Run("update and validate service with max scale option", func(t *testing.T) { test.serviceUpdate(t, "svc2", []string{"--max-scale", "2"}) test.validateServiceMaxScale(t, "svc2", "2") }) t.Run("delete service", func(t *testing.T) { test.serviceDelete(t, "svc2") }) t.Run("create, update and validate service with annotations", func(t *testing.T) { test.serviceCreateWithOptions(t, "svc3", []string{"--annotation", "alpha=wolf", "--annotation", "brave=horse"}) test.validateServiceAnnotations(t, "svc3", map[string]string{"alpha": "wolf", "brave": "horse"}) test.serviceUpdate(t, "svc3", []string{"--annotation", "alpha=direwolf", "--annotation", "brave-"}) test.validateServiceAnnotations(t, "svc3", map[string]string{"alpha": "direwolf", "brave": ""}) test.serviceDelete(t, "svc3") }) }
explode_data.jsonl/20856
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 881 }
[ 2830, 3393, 1860, 3798, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 18185, 1669, 1532, 36, 17, 68, 2271, 1155, 340, 18185, 39820, 1155, 340, 16867, 1273, 94849, 37496, 1155, 692, 3244, 16708, 445, 3182, 323, 9593, 2473, 448, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestApiTest_Intercept(t *testing.T) { handler := http.NewServeMux() handler.HandleFunc("/hello", func(w http.ResponseWriter, r *http.Request) { if r.URL.RawQuery != "a[]=xxx&a[]=yyy" { w.WriteHeader(http.StatusOK) return } if r.Header.Get("Auth-Token") != "12345" { w.WriteHeader(http.StatusOK) return } w.WriteHeader(http.StatusOK) }) apitest.New(). Handler(handler). Intercept(func(req *http.Request) { req.URL.RawQuery = "a[]=xxx&a[]=yyy" req.Header.Set("Auth-Token", req.Header.Get("authtoken")) }). Get("/hello"). Headers(map[string]string{"authtoken": "12345"}). Expect(t). Status(http.StatusOK). End() }
explode_data.jsonl/54814
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 294 }
[ 2830, 3393, 6563, 2271, 79717, 1484, 1155, 353, 8840, 836, 8, 341, 53326, 1669, 1758, 7121, 60421, 44, 2200, 741, 53326, 63623, 4283, 14990, 497, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 743, 435, 20893, 50575, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTest(t *testing.T) { RegisterFailHandler(Fail) rr := []Reporter{} if ginkgo_reporters.Polarion.Run { rr = append(rr, &ginkgo_reporters.Polarion) } if *junitPath != "" { junitFile := path.Join(*junitPath, "setup_junit.xml") rr = append(rr, reporters.NewJUnitReporter(junitFile)) } if *reportPath != "" { reportFile := path.Join(*reportPath, "setup_failure_report.log") reporter, err := testutils.NewReporter(reportFile) if err != nil { log.Fatalf("Failed to create log reporter %s", err) } rr = append(rr, reporter) } RunSpecsWithDefaultAndCustomReporters(t, "CNF Features e2e setup", rr) }
explode_data.jsonl/52116
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 251 }
[ 2830, 3393, 2271, 1155, 353, 8840, 836, 8, 341, 79096, 19524, 3050, 7832, 604, 692, 197, 634, 1669, 3056, 52766, 16094, 743, 342, 766, 3346, 14813, 388, 1069, 7417, 290, 16708, 341, 197, 197, 634, 284, 8737, 80818, 11, 609, 70, 766, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestDriver(t *testing.T) { if v := drv.String(); v != "rpi" { t.Fatal(v) } if v := drv.Prerequisites(); v != nil { t.Fatal(v) } if v := drv.After(); reflect.DeepEqual(v, []string{"bcm2835-gpio"}) { t.Fatal(v) } }
explode_data.jsonl/11568
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 115 }
[ 2830, 3393, 11349, 1155, 353, 8840, 836, 8, 341, 743, 348, 1669, 68770, 6431, 2129, 348, 961, 330, 81, 2493, 1, 341, 197, 3244, 26133, 3747, 340, 197, 532, 743, 348, 1669, 68770, 17947, 82301, 2129, 348, 961, 2092, 341, 197, 3244, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCoinin(t *testing.T) { client := v1.New(&v1.Config{ Key: os.Getenv("BFKEY"), Secret: os.Getenv("BFSECRET"), }) coin, err := client.Coinin(coins.NewForIn().SetPagination(10, 0, 0)) assert.NoError(t, err) for i, v := range *coin { fmt.Printf("%d: %+v\n", i, v) } }
explode_data.jsonl/41204
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 138 }
[ 2830, 3393, 41180, 258, 1155, 353, 8840, 836, 8, 341, 25291, 1669, 348, 16, 7121, 2099, 85, 16, 10753, 515, 197, 55242, 25, 262, 2643, 64883, 445, 19883, 4784, 4461, 197, 7568, 50856, 25, 2643, 64883, 445, 19883, 65310, 4461, 197, 351...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPageWithSummaryParameter(t *testing.T) { t.Parallel() assertFunc := func(t *testing.T, ext string, pages page.Pages) { p := pages[0] checkPageTitle(t, p, "SimpleWithSummaryParameter") checkPageContent(t, p, normalizeExpected(ext, "<p>Some text.</p>\n\n<p>Some more text.</p>\n"), ext) // Summary is not Asciidoctor- or RST-compatibile so don't test them if ext != "ad" && ext != "rst" { checkPageSummary(t, p, normalizeExpected(ext, "Page with summary parameter and <a href=\"http://www.example.com/\">a link</a>"), ext) } checkPageType(t, p, "page") } testAllMarkdownEnginesForPages(t, assertFunc, nil, simplePageWithSummaryParameter) }
explode_data.jsonl/60610
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 250 }
[ 2830, 3393, 2665, 2354, 19237, 4971, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 6948, 9626, 1669, 2915, 1155, 353, 8840, 836, 11, 1303, 914, 11, 6816, 2150, 68829, 8, 341, 197, 3223, 1669, 6816, 58, 15, 921, 197, 25157, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFindFitAllError(t *testing.T) { nodes := makeNodeList([]string{"3", "2", "1"}) scheduler := makeScheduler( nodes, st.RegisterQueueSortPlugin(queuesort.Name, queuesort.New), st.RegisterFilterPlugin("TrueFilter", NewTrueFilterPlugin), st.RegisterFilterPlugin("MatchFilter", NewMatchFilterPlugin), st.RegisterBindPlugin(defaultbinder.Name, defaultbinder.New), ) _, nodeToStatusMap, err := scheduler.findNodesThatFitPod(context.Background(), framework.NewCycleState(), &v1.Pod{}) if err != nil { t.Errorf("unexpected error: %v", err) } if len(nodeToStatusMap) != len(nodes) { t.Errorf("unexpected failed status map: %v", nodeToStatusMap) } for _, node := range nodes { t.Run(node.Name, func(t *testing.T) { status, found := nodeToStatusMap[node.Name] if !found { t.Errorf("failed to find node %v in %v", node.Name, nodeToStatusMap) } reasons := status.Reasons() if len(reasons) != 1 || reasons[0] != ErrReasonFake { t.Errorf("unexpected failure reasons: %v", reasons) } }) } }
explode_data.jsonl/2390
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 396 }
[ 2830, 3393, 9885, 23346, 2403, 1454, 1155, 353, 8840, 836, 8, 341, 79756, 1669, 1281, 1955, 852, 10556, 917, 4913, 18, 497, 330, 17, 497, 330, 16, 23625, 1903, 15222, 1669, 1281, 38878, 1006, 197, 79756, 345, 197, 18388, 19983, 7554, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestApp(t *testing.T) { container, err := gnomock.Start( kafka.Preset(kafka.WithTopics("events")), gnomock.WithDebugMode(), gnomock.WithLogWriter(os.Stdout), gnomock.WithContainerName("kafka"), ) require.NoError(t, err) defer func() { require.NoError(t, gnomock.Stop(container)) }() ctx, cancel := context.WithCancel(context.Background()) p := producer.New(container.Address(kafka.BrokerPort), "events") c := consumer.New(container.Address(kafka.BrokerPort), "events") r := reporter.New(ctx, c) mux := handler.Mux(p, r) s := httptest.NewServer(mux) rep := getReport(t, s.URL) require.Empty(t, rep) events := []string{"login", "order"} accounts := []string{"a", "b", "c"} for i := 0; i < 10; i++ { ev := events[i%2] acc := accounts[i%3] sendEvent(t, s.URL, ev, acc) } cancel() require.NoError(t, p.Close()) require.NoError(t, c.Close()) time.Sleep(time.Second * 1) rep = getReport(t, s.URL) require.NotEmpty(t, rep) aStats, bStats, cStats := rep["a"], rep["b"], rep["c"] require.Len(t, aStats, 2) require.Len(t, bStats, 2) require.Len(t, cStats, 2) require.Equal(t, 2, aStats["login"]) require.Equal(t, 1, aStats["order"]) require.Equal(t, 1, bStats["login"]) require.Equal(t, 2, bStats["order"]) require.Equal(t, 1, cStats["login"]) require.Equal(t, 1, cStats["order"]) }
explode_data.jsonl/70585
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 567 }
[ 2830, 3393, 2164, 1155, 353, 8840, 836, 8, 341, 53290, 11, 1848, 1669, 342, 16687, 1176, 12101, 1006, 197, 16463, 21883, 1069, 9716, 5969, 21883, 26124, 45003, 445, 12389, 30154, 197, 3174, 16687, 1176, 26124, 7939, 3636, 1507, 342, 16687...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHighLevelEncoder_isDigit(t *testing.T) { if HighLevelEncoder_isDigit('/') != false { t.Fatalf("isDigit('/') must false") } if HighLevelEncoder_isDigit('0') != true { t.Fatalf("isDigit('0') must true") } if HighLevelEncoder_isDigit('9') != true { t.Fatalf("isDigit('9') must true") } if HighLevelEncoder_isDigit(':') != false { t.Fatalf("isDigit(':') must false") } }
explode_data.jsonl/32224
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 168 }
[ 2830, 3393, 11976, 4449, 19921, 6892, 36420, 1155, 353, 8840, 836, 8, 341, 743, 5124, 4449, 19921, 6892, 36420, 47729, 961, 895, 341, 197, 3244, 30762, 445, 285, 36420, 47729, 1969, 895, 1138, 197, 532, 743, 5124, 4449, 19921, 6892, 364...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestCompileSourceOfInvalidSyntax(t *testing.T) { f, err := ioutil.TempFile("", "") assert.Nil(t, err) f.WriteString(`(print "Hello, world!"`) err = f.Close() assert.Nil(t, err) _, err = Compile(f.Name()) assert.NotNil(t, err) }
explode_data.jsonl/24272
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 107 }
[ 2830, 3393, 46126, 3608, 2124, 7928, 33890, 1155, 353, 8840, 836, 8, 341, 1166, 11, 1848, 1669, 43144, 65009, 1703, 19814, 14676, 6948, 59678, 1155, 11, 1848, 692, 1166, 44747, 5809, 7, 1350, 330, 9707, 11, 1879, 8958, 63, 692, 9859, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRemittanceOriginatorBuildingNumberAlphaNumeric(t *testing.T) { ro := mockRemittanceOriginator() ro.RemittanceData.BuildingNumber = "®" err := ro.Validate() require.EqualError(t, err, fieldError("BuildingNumber", ErrNonAlphanumeric, ro.RemittanceData.BuildingNumber).Error()) }
explode_data.jsonl/32927
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 89 }
[ 2830, 3393, 6590, 87191, 13298, 850, 30133, 2833, 19384, 36296, 1155, 353, 8840, 836, 8, 341, 197, 299, 1669, 7860, 6590, 87191, 13298, 850, 741, 197, 299, 11398, 87191, 1043, 25212, 287, 2833, 284, 330, 11909, 1837, 9859, 1669, 926, 47...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLateInitialize(t *testing.T) { type args struct { spec *v1beta1.QueueParameters in map[string]string tags map[string]string } cases := map[string]struct { args args want *v1beta1.QueueParameters }{ "AllFilledNoDiff": { args: args{ spec: sqsParams(), in: attributes(), }, want: sqsParams(), }, "AllFilledExternalDiff": { args: args{ spec: sqsParams(), in: attributes(map[string]string{ v1beta1.AttributeKmsMasterKeyID: kmsMasterKeyID, }), }, want: sqsParams(), }, "PartialFilled": { args: args{ spec: sqsParams(func(p *v1beta1.QueueParameters) { p.DelaySeconds = nil }), in: attributes(), }, want: sqsParams(), }, "PointerFields": { args: args{ spec: sqsParams(), tags: map[string]string{ tagKey: tagValue, }, }, want: sqsParams(func(p *v1beta1.QueueParameters) { p.Tags = map[string]string{ tagKey: tagValue, } }), }, } for name, tc := range cases { t.Run(name, func(t *testing.T) { LateInitialize(tc.args.spec, tc.args.in, tc.args.tags) if diff := cmp.Diff(tc.want, tc.args.spec); diff != "" { t.Errorf("LateInitializeSpec(...): -want, +got:\n%s", diff) } }) } }
explode_data.jsonl/82680
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 595 }
[ 2830, 3393, 61457, 9928, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 98100, 353, 85, 16, 19127, 16, 50251, 9706, 198, 197, 17430, 256, 2415, 14032, 30953, 198, 197, 3244, 2032, 2415, 14032, 30953, 198, 197, 532, 1444, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFiles(t *testing.T) { var sources = []string{ "package p; type T struct{}; func (T) m1() {}", "package p; func (T) m2() {}; var x interface{ m1(); m2() } = T{}", "package p; func (T) m3() {}; var y interface{ m1(); m2(); m3() } = T{}", "package p", } var conf Config fset := token.NewFileSet() pkg := NewPackage("p", "p") var info Info check := NewChecker(&conf, fset, pkg, &info) for i, src := range sources { filename := fmt.Sprintf("sources%d", i) f, err := parser.ParseFile(fset, filename, src, 0) if err != nil { t.Fatal(err) } if err := check.Files([]*ast.File{f}); err != nil { t.Error(err) } } // check InitOrder is [x y] var vars []string for _, init := range info.InitOrder { for _, v := range init.Lhs { vars = append(vars, v.Name()) } } if got, want := fmt.Sprint(vars), "[x y]"; got != want { t.Errorf("InitOrder == %s, want %s", got, want) } }
explode_data.jsonl/55545
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 399 }
[ 2830, 3393, 10809, 1155, 353, 8840, 836, 8, 341, 2405, 8173, 284, 3056, 917, 515, 197, 197, 1, 1722, 281, 26, 943, 350, 2036, 6257, 26, 2915, 320, 51, 8, 296, 16, 368, 4687, 756, 197, 197, 1, 1722, 281, 26, 2915, 320, 51, 8, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestTickMultipleExpiredDepositPeriod(t *testing.T) { app := simapp.Setup(false) ctx := app.BaseApp.NewContext(false, abci.Header{}) addrs := simapp.AddTestAddrs(app, ctx, 10, valTokens) header := abci.Header{Height: app.LastBlockHeight() + 1} app.BeginBlock(abci.RequestBeginBlock{Header: header}) govHandler := gov.NewHandler(app.GovKeeper) inactiveQueue := app.GovKeeper.InactiveProposalQueueIterator(ctx, ctx.BlockHeader().Time) require.False(t, inactiveQueue.Valid()) inactiveQueue.Close() newProposalMsg, err := types.NewMsgSubmitProposal( types.ContentFromProposalType("test", "test", types.ProposalTypeText), sdk.Coins{sdk.NewInt64Coin(sdk.DefaultBondDenom, 5)}, addrs[0], ) require.NoError(t, err) res, err := govHandler(ctx, newProposalMsg) require.NoError(t, err) require.NotNil(t, res) inactiveQueue = app.GovKeeper.InactiveProposalQueueIterator(ctx, ctx.BlockHeader().Time) require.False(t, inactiveQueue.Valid()) inactiveQueue.Close() newHeader := ctx.BlockHeader() newHeader.Time = ctx.BlockHeader().Time.Add(time.Duration(2) * time.Second) ctx = ctx.WithBlockHeader(newHeader) inactiveQueue = app.GovKeeper.InactiveProposalQueueIterator(ctx, ctx.BlockHeader().Time) require.False(t, inactiveQueue.Valid()) inactiveQueue.Close() newProposalMsg2, err := types.NewMsgSubmitProposal( types.ContentFromProposalType("test2", "test2", types.ProposalTypeText), sdk.Coins{sdk.NewInt64Coin(sdk.DefaultBondDenom, 5)}, addrs[0], ) require.NoError(t, err) res, err = govHandler(ctx, newProposalMsg2) require.NoError(t, err) require.NotNil(t, res) newHeader = ctx.BlockHeader() newHeader.Time = ctx.BlockHeader().Time.Add(app.GovKeeper.GetDepositParams(ctx).MaxDepositPeriod).Add(time.Duration(-1) * time.Second) ctx = ctx.WithBlockHeader(newHeader) inactiveQueue = app.GovKeeper.InactiveProposalQueueIterator(ctx, ctx.BlockHeader().Time) require.True(t, inactiveQueue.Valid()) inactiveQueue.Close() gov.EndBlocker(ctx, app.GovKeeper) inactiveQueue = app.GovKeeper.InactiveProposalQueueIterator(ctx, ctx.BlockHeader().Time) require.False(t, inactiveQueue.Valid()) inactiveQueue.Close() newHeader = ctx.BlockHeader() newHeader.Time = ctx.BlockHeader().Time.Add(time.Duration(5) * time.Second) ctx = ctx.WithBlockHeader(newHeader) inactiveQueue = app.GovKeeper.InactiveProposalQueueIterator(ctx, ctx.BlockHeader().Time) require.True(t, inactiveQueue.Valid()) inactiveQueue.Close() gov.EndBlocker(ctx, app.GovKeeper) inactiveQueue = app.GovKeeper.InactiveProposalQueueIterator(ctx, ctx.BlockHeader().Time) require.False(t, inactiveQueue.Valid()) inactiveQueue.Close() }
explode_data.jsonl/51028
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 959 }
[ 2830, 3393, 22213, 32089, 54349, 78982, 23750, 1155, 353, 8840, 836, 8, 341, 28236, 1669, 1643, 676, 39820, 3576, 340, 20985, 1669, 906, 13018, 2164, 7121, 1972, 3576, 11, 668, 5855, 15753, 37790, 12718, 5428, 1669, 1643, 676, 1904, 2271,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInvalidFlagDefaultCanBeOverridden(t *testing.T) { app := New("test", "") app.Flag("a", "").Default("invalid").Bool() _, err := app.Parse([]string{}) assert.Error(t, err) }
explode_data.jsonl/74860
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 72 }
[ 2830, 3393, 7928, 12135, 3675, 69585, 1918, 42185, 1155, 353, 8840, 836, 8, 341, 28236, 1669, 1532, 445, 1944, 497, 14676, 28236, 80911, 445, 64, 497, 35229, 3675, 445, 11808, 1827, 11233, 741, 197, 6878, 1848, 1669, 906, 8937, 10556, 9...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestVolumeTypesList(t *testing.T) { client, err := clients.NewBlockStorageV1Client() if err != nil { t.Fatalf("Unable to create a blockstorage client: %v", err) } allPages, err := volumetypes.List(client).AllPages() if err != nil { t.Fatalf("Unable to retrieve volume types: %v", err) } allVolumeTypes, err := volumetypes.ExtractVolumeTypes(allPages) if err != nil { t.Fatalf("Unable to extract volume types: %v", err) } for _, volumeType := range allVolumeTypes { tools.PrintResource(t, volumeType) } }
explode_data.jsonl/32332
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 191 }
[ 2830, 3393, 18902, 4173, 852, 1155, 353, 8840, 836, 8, 341, 25291, 11, 1848, 1669, 8239, 7121, 4713, 5793, 53, 16, 2959, 741, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 17075, 311, 1855, 264, 2504, 16172, 2943, 25, 1018, 85, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestDeleteServiceConnections(t *testing.T) { fcmd := exec.FakeCmd{ CombinedOutputScript: []exec.FakeCombinedOutputAction{ func() ([]byte, error) { return []byte("1 flow entries have been deleted"), nil }, func() ([]byte, error) { return []byte("1 flow entries have been deleted"), nil }, func() ([]byte, error) { return []byte(""), fmt.Errorf("conntrack v1.4.2 (conntrack-tools): 0 flow entries have been deleted.") }, }, } fexec := exec.FakeExec{ CommandScript: []exec.FakeCommandAction{ func(cmd string, args ...string) exec.Cmd { return exec.InitFakeCmd(&fcmd, cmd, args...) }, func(cmd string, args ...string) exec.Cmd { return exec.InitFakeCmd(&fcmd, cmd, args...) }, func(cmd string, args ...string) exec.Cmd { return exec.InitFakeCmd(&fcmd, cmd, args...) }, }, LookPathFunc: func(cmd string) (string, error) { return cmd, nil }, } fakeProxier := Proxier{exec: &fexec} testCases := [][]string{ { "10.240.0.3", "10.240.0.5", }, { "10.240.0.4", }, } svcCount := 0 for i := range testCases { fakeProxier.deleteServiceConnections(testCases[i]) for _, ip := range testCases[i] { expectCommand := fmt.Sprintf("conntrack -D --orig-dst %s -p udp", ip) execCommand := strings.Join(fcmd.CombinedOutputLog[svcCount], " ") if expectCommand != execCommand { t.Errorf("Exepect comand: %s, but executed %s", expectCommand, execCommand) } svcCount += 1 } if svcCount != fexec.CommandCalls { t.Errorf("Exepect comand executed %d times, but got %d", svcCount, fexec.CommandCalls) } } }
explode_data.jsonl/9289
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 622 }
[ 2830, 3393, 6435, 1860, 54751, 1155, 353, 8840, 836, 8, 341, 1166, 8710, 1669, 3883, 991, 726, 15613, 515, 197, 197, 94268, 5097, 5910, 25, 3056, 11748, 991, 726, 94268, 5097, 2512, 515, 298, 29244, 368, 34923, 3782, 11, 1465, 8, 314,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPaintClippedRect(t *testing.T) { run(t, func(o *op.Ops) { clip.RRect{Rect: f32.Rect(25, 25, 60, 60)}.Add(o) paint.FillShape(o, colornames.Red, clip.Rect(image.Rect(0, 0, 50, 50)).Op()) }, func(r result) { r.expect(0, 0, colornames.White) r.expect(24, 35, colornames.White) r.expect(25, 35, colornames.Red) r.expect(50, 0, colornames.White) r.expect(10, 50, colornames.White) }) }
explode_data.jsonl/18103
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 197 }
[ 2830, 3393, 18098, 5066, 6450, 4415, 1155, 353, 8840, 836, 8, 341, 56742, 1155, 11, 2915, 10108, 353, 453, 8382, 1690, 8, 341, 197, 197, 7974, 2013, 4415, 90, 4415, 25, 282, 18, 17, 32153, 7, 17, 20, 11, 220, 17, 20, 11, 220, 21...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServerListenerClosed(t *testing.T) { var ( ctx = context.Background() server = mustServer(t)(NewServer()) _, listener = newTestListener(t) errs = make(chan error, 1) ) go func() { errs <- server.Serve(ctx, listener) }() if err := listener.Close(); err != nil { t.Fatal(err) } err := <-errs if err == nil { t.Fatal(err) } }
explode_data.jsonl/41077
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 169 }
[ 2830, 3393, 5475, 2743, 26884, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 20985, 260, 284, 2266, 19047, 741, 197, 41057, 414, 284, 1969, 5475, 1155, 2376, 3564, 5475, 2398, 197, 197, 6878, 11446, 284, 501, 2271, 2743, 1155, 340, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWriteMultipartDocument(t *testing.T) { rt := NewRestTester(t, nil) defer rt.Close() reqHeaders := map[string]string{ "MIME-Version": "1.0", "Content-Type": "multipart/related; boundary=0123456789"} bodyText := `--0123456789 Content-Type: application/json {"key":"foo","value":"bar"} --0123456789--` response := rt.SendAdminRequestWithHeaders(http.MethodPut, "/db/doc1", bodyText, reqHeaders) assertStatus(t, response, http.StatusCreated) response = rt.SendAdminRequestWithHeaders(http.MethodGet, "/db/doc1", "", reqHeaders) log.Printf("response: %v", string(response.BodyBytes())) assertStatus(t, response, http.StatusOK) }
explode_data.jsonl/56944
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 239 }
[ 2830, 3393, 7985, 44, 18204, 7524, 1155, 353, 8840, 836, 8, 341, 55060, 1669, 1532, 12416, 58699, 1155, 11, 2092, 340, 16867, 16677, 10421, 2822, 24395, 10574, 1669, 2415, 14032, 30953, 515, 197, 197, 73527, 5660, 83902, 788, 330, 16, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeriveSecretsAndChallengeGolden(t *testing.T) { goldenFilepath := filepath.Join("testdata", t.Name()+".golden") if *update { t.Logf("Updating golden test vector file %s", goldenFilepath) data := createGoldenTestVectors(t) cmn.WriteFile(goldenFilepath, []byte(data), 0644) } f, err := os.Open(goldenFilepath) if err != nil { log.Fatal(err) } defer f.Close() scanner := bufio.NewScanner(f) for scanner.Scan() { line := scanner.Text() params := strings.Split(line, ",") randSecretVector, err := hex.DecodeString(params[0]) require.Nil(t, err) randSecret := new([32]byte) copy((*randSecret)[:], randSecretVector) locIsLeast, err := strconv.ParseBool(params[1]) require.Nil(t, err) expectedRecvSecret, err := hex.DecodeString(params[2]) require.Nil(t, err) expectedSendSecret, err := hex.DecodeString(params[3]) require.Nil(t, err) expectedChallenge, err := hex.DecodeString(params[4]) require.Nil(t, err) recvSecret, sendSecret, challenge := deriveSecretAndChallenge(randSecret, locIsLeast) require.Equal(t, expectedRecvSecret, (*recvSecret)[:], "Recv Secrets aren't equal") require.Equal(t, expectedSendSecret, (*sendSecret)[:], "Send Secrets aren't equal") require.Equal(t, expectedChallenge, (*challenge)[:], "challenges aren't equal") } }
explode_data.jsonl/39913
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 497 }
[ 2830, 3393, 22171, 533, 19773, 82, 3036, 62078, 59790, 1155, 353, 8840, 836, 8, 341, 3174, 813, 268, 1703, 2343, 1669, 26054, 22363, 445, 92425, 497, 259, 2967, 17140, 3263, 97235, 1138, 743, 353, 2386, 341, 197, 3244, 98954, 445, 46910...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetNodeIDFromNode(t *testing.T) { tests := []struct { name string annotations map[string]string expectedID string expectError bool }{ { name: "single key", annotations: map[string]string{"csi.volume.kubernetes.io/nodeid": "{\"foo/bar\": \"MyNodeID\"}"}, expectedID: "MyNodeID", expectError: false, }, { name: "multiple keys", annotations: map[string]string{ "csi.volume.kubernetes.io/nodeid": "{\"foo/bar\": \"MyNodeID\", \"-foo/bar\": \"MyNodeID2\", \"foo/bar-\": \"MyNodeID3\"}", }, expectedID: "MyNodeID", expectError: false, }, { name: "no annotations", annotations: nil, expectedID: "", expectError: true, }, { name: "invalid JSON", annotations: map[string]string{"csi.volume.kubernetes.io/nodeid": "\"foo/bar\": \"MyNodeID\""}, expectedID: "", expectError: true, }, { name: "annotations for another driver", annotations: map[string]string{ "csi.volume.kubernetes.io/nodeid": "{\"-foo/bar\": \"MyNodeID2\", \"foo/bar-\": \"MyNodeID3\"}", }, expectedID: "", expectError: true, }, } for _, test := range tests { node := &v1.Node{ ObjectMeta: metav1.ObjectMeta{ Name: "abc", Annotations: test.annotations, }, } nodeID, err := GetNodeIDFromNode(driverName, node) if err == nil && test.expectError { t.Errorf("test %s: expected error, got none", test.name) } if err != nil && !test.expectError { t.Errorf("test %s: got error: %s", test.name, err) } if !test.expectError && nodeID != test.expectedID { t.Errorf("test %s: unexpected NodeID: %s", test.name, nodeID) } } }
explode_data.jsonl/53369
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 750 }
[ 2830, 3393, 1949, 1955, 915, 3830, 1955, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 286, 914, 198, 197, 197, 39626, 2415, 14032, 30953, 198, 197, 42400, 915, 220, 914, 198, 197, 24952, 1454, 1807, 198, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestSuccessfulWearAndTakeOff(t *testing.T) { ctrl := gomock.NewController(t) char := character.New() item := mock_character.NewMockItem(ctrl) char.AddItem(item) item.EXPECT().Wear(char).Times(1).Return(nil) assert.NoError(t, char.WearOrTakeOff(0)) assert.Equal(t, item, char.Wearing()) item.EXPECT().TakeOff(char).Times(1).Return(nil) assert.NoError(t, char.WearOrTakeOff(0)) assert.Equal(t, nil, char.Wearing()) }
explode_data.jsonl/20227
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 174 }
[ 2830, 3393, 36374, 54, 682, 3036, 17814, 4596, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 7450, 1669, 3668, 7121, 2822, 22339, 1669, 7860, 40988, 7121, 11571, 1234, 62100, 340, 7450, 91402, 5393, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDownloadStats(t *testing.T) { when := time.Date(2022, time.May, 21, 9, 4, 0, 0, time.UTC) bs, err := json.Marshal(&DownloadStats{ SDNs: 1, Errors: []error{ errors.New("bad thing"), }, RefreshedAt: when, }) require.NoError(t, err) var wrapper struct { SDNs int Errors []string Timestamp time.Time } err = json.NewDecoder(bytes.NewReader(bs)).Decode(&wrapper) require.NoError(t, err) require.Equal(t, 1, wrapper.SDNs) require.Len(t, wrapper.Errors, 1) require.Equal(t, when, wrapper.Timestamp) }
explode_data.jsonl/29461
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 231 }
[ 2830, 3393, 11377, 16635, 1155, 353, 8840, 836, 8, 341, 60180, 1669, 882, 8518, 7, 17, 15, 17, 17, 11, 882, 1321, 352, 11, 220, 17, 16, 11, 220, 24, 11, 220, 19, 11, 220, 15, 11, 220, 15, 11, 882, 87069, 340, 93801, 11, 1848, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestChanAlloc(t *testing.T) { // Note: for a chan int, the return Value must be allocated, so we // use a chan *int instead. c := ValueOf(make(chan *int, 1)) v := ValueOf(new(int)) allocs := testing.AllocsPerRun(100, func() { c.Send(v) _, _ = c.Recv() }) if allocs < 0.5 || allocs > 1.5 { t.Errorf("allocs per chan send/recv: want 1 got %f", allocs) } // Note: there is one allocation in reflect.recv which seems to be // a limitation of escape analysis. If that is ever fixed the // allocs < 0.5 condition will trigger and this test should be fixed. }
explode_data.jsonl/29635
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 213 }
[ 2830, 3393, 46019, 25154, 1155, 353, 8840, 836, 8, 341, 197, 322, 7036, 25, 369, 264, 26023, 526, 11, 279, 470, 5162, 1969, 387, 20204, 11, 773, 582, 198, 197, 322, 990, 264, 26023, 353, 396, 4518, 624, 1444, 1669, 5162, 2124, 36944...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGeneratePredictStmt(t *testing.T) { if test.GetEnv("SQLFLOW_TEST_DB", "mysql") == "hive" { t.Skip(fmt.Sprintf("%s: skip Hive test", test.GetEnv("SQLFLOW_TEST_DB", "mysql"))) } a := assert.New(t) predSQL := `SELECT * FROM iris.test TO PREDICT iris.predict.class USING sqlflow_models.mymodel;` r, e := parser.ParseStatement("mysql", predSQL) a.NoError(e) // need to save a model first because predict SQL will read the train SQL // from saved model cwd, e := ioutil.TempDir("/tmp", "sqlflow_models") a.Nil(e) defer os.RemoveAll(cwd) a.NoError(model.MockInDB(cwd, `SELECT * FROM iris.train TO TRAIN DNNClassifier WITH model.n_classes=3, model.hidden_units=[10,20] LABEL class INTO sqlflow_models.mymodel;`, "sqlflow_models.mymodel")) predStmt, err := GeneratePredictStmt(r.SQLFlowSelectStmt, database.GetTestingDBSingleton().URL(), "", cwd, true) a.NoError(err) a.Equal("iris.predict", predStmt.ResultTable) a.Equal("class", predStmt.TrainStmt.Label.GetFieldDesc()[0].Name) a.Equal("DNNClassifier", predStmt.TrainStmt.Estimator) nc, ok := predStmt.TrainStmt.Features["feature_columns"][0].(*NumericColumn) a.True(ok) a.Equal("sepal_length", nc.FieldDesc.Name) a.Equal("sqlflow_models.mymodel", predStmt.Using) }
explode_data.jsonl/45949
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 513 }
[ 2830, 3393, 31115, 53544, 31063, 1155, 353, 8840, 836, 8, 341, 743, 1273, 2234, 14359, 445, 6688, 46060, 11641, 16310, 497, 330, 12272, 899, 621, 330, 88568, 1, 341, 197, 3244, 57776, 28197, 17305, 4430, 82, 25, 10706, 68178, 1273, 497,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSDKInitiatorCreate(t *testing.T) { sdk, err := dsdk.NewSDK(nil, true) if err != nil { t.Error(err) } ctxt := sdk.NewContext() ro := &dsdk.InitiatorsCreateRequest{ Ctxt: ctxt, Id: fmt.Sprintf("iqn.1993-08.org.debian:01:%s", dsdk.RandString(12)), Name: dsdk.RandString(12), } var init *dsdk.Initiator if init, _, err = sdk.Initiators.Create(ro); err != nil { t.Errorf("%s", err) } if _, _, err = init.Delete(&dsdk.InitiatorDeleteRequest{ Ctxt: ctxt, }); err != nil { t.Errorf("%s", err) } }
explode_data.jsonl/19680
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 249 }
[ 2830, 3393, 31534, 3803, 36122, 4021, 1155, 353, 8840, 836, 8, 341, 1903, 7584, 11, 1848, 1669, 11472, 7584, 7121, 31534, 27907, 11, 830, 340, 743, 1848, 961, 2092, 341, 197, 3244, 6141, 3964, 340, 197, 532, 197, 77492, 1669, 45402, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestOverrideReminder(t *testing.T) { t.Run("override data", func(t *testing.T) { testActorsRuntime := newTestActorsRuntime() actorType, actorID := getTestActorTypeAndID() reminder := createReminder(actorID, actorType, "reminder1", "1s", "1s", "a") err := testActorsRuntime.CreateReminder(&reminder) assert.Nil(t, err) reminder2 := createReminder(actorID, actorType, "reminder1", "1s", "1s", "b") testActorsRuntime.CreateReminder(&reminder2) reminders, err := testActorsRuntime.getRemindersForActorType(actorType) assert.Nil(t, err) assert.Equal(t, "b", reminders[0].Data) }) t.Run("override dueTime", func(t *testing.T) { testActorsRuntime := newTestActorsRuntime() actorType, actorID := getTestActorTypeAndID() reminder := createReminder(actorID, actorType, "reminder1", "1s", "1s", "") err := testActorsRuntime.CreateReminder(&reminder) assert.Nil(t, err) reminder2 := createReminder(actorID, actorType, "reminder1", "1s", "2s", "") testActorsRuntime.CreateReminder(&reminder2) reminders, err := testActorsRuntime.getRemindersForActorType(actorType) assert.Nil(t, err) assert.Equal(t, "2s", reminders[0].DueTime) }) t.Run("override period", func(t *testing.T) { testActorsRuntime := newTestActorsRuntime() actorType, actorID := getTestActorTypeAndID() reminder := createReminder(actorID, actorType, "reminder1", "1s", "1s", "") err := testActorsRuntime.CreateReminder(&reminder) assert.Nil(t, err) reminder2 := createReminder(actorID, actorType, "reminder1", "2s", "1s", "") testActorsRuntime.CreateReminder(&reminder2) reminders, err := testActorsRuntime.getRemindersForActorType(actorType) assert.Nil(t, err) assert.Equal(t, "2s", reminders[0].Period) }) }
explode_data.jsonl/12878
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 664 }
[ 2830, 3393, 2177, 95359, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 9199, 821, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 18185, 2414, 1087, 15123, 1669, 501, 2271, 2414, 1087, 15123, 741, 197, 93410, 929, 11, 12089, 915, 1669...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBroadcastInvokeFailed(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() invokers := make([]*mock.MockInvoker, 0) mockResult := &protocol.RPCResult{Rest: clusterpkg.Rest{Tried: 0, Success: true}} mockFailedResult := &protocol.RPCResult{Err: errors.New("just failed")} for i := 0; i < 10; i++ { invoker := mock.NewMockInvoker(ctrl) invokers = append(invokers, invoker) invoker.EXPECT().Invoke(gomock.Any()).Return(mockResult) } { invoker := mock.NewMockInvoker(ctrl) invokers = append(invokers, invoker) invoker.EXPECT().Invoke(gomock.Any()).Return(mockFailedResult) } for i := 0; i < 10; i++ { invoker := mock.NewMockInvoker(ctrl) invokers = append(invokers, invoker) invoker.EXPECT().Invoke(gomock.Any()).Return(mockResult) } clusterInvoker := registerBroadcast(invokers...) result := clusterInvoker.Invoke(context.Background(), &invocation.RPCInvocation{}) assert.Equal(t, mockFailedResult.Err, result.Error()) }
explode_data.jsonl/80725
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 380 }
[ 2830, 3393, 43362, 17604, 9408, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 197, 14057, 40681, 1669, 1281, 85288, 16712, 24664, 47668, 11, 220, 15, 692, 77333, 2077, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNewNode(t *testing.T) { n := NewNode("graphite010-g5", "a") if n.KeyValue() != "('graphite010-g5', 'a')" { t.Error("NewNode() did not produce a tuple string format") } if NewNode("graphite011-g5", "").KeyValue() != "('graphite011-g5', None)" { t.Error("NewNode() did not handle a None instance value") } if n.Server != "graphite010-g5" { t.Error("Node type can't store servers properly") } if n.Instance != "a" { t.Error("Node type can't store instances properly") } if n.String() != "graphite010-g5:a" { t.Error("Node string representation is broken") } }
explode_data.jsonl/51136
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 3564, 1955, 1155, 353, 8840, 836, 8, 341, 9038, 1669, 1532, 1955, 445, 4439, 632, 15, 16, 15, 2371, 20, 497, 330, 64, 1138, 743, 308, 9610, 1130, 368, 961, 82368, 4439, 632, 15, 16, 15, 2371, 20, 516, 364, 64, 44307, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestProcessFeeUpdate(t *testing.T) { const ( // height is a non-zero height that can be used for htlcs // heights. height = 200 // nextHeight is a constant that we use for the next height in // all unit tests. nextHeight = 400 // feePerKw is the fee we start all of our unit tests with. feePerKw = 1 // ourFeeUpdateAmt is an amount that we update fees to expressed // in msat. ourFeeUpdateAmt = 20000 // ourFeeUpdatePerSat is the fee rate *in satoshis* that we // expect if we update to ourFeeUpdateAmt. ourFeeUpdatePerSat = chainfee.SatPerKWeight(20) ) tests := []struct { name string startHeights heights expectedHeights heights remoteChain bool mutate bool expectedFee chainfee.SatPerKWeight }{ { // Looking at local chain, local add is non-zero so // the update has been applied already; no fee change. name: "non-zero local height, fee unchanged", startHeights: heights{ localAdd: height, localRemove: 0, remoteAdd: 0, remoteRemove: height, }, expectedHeights: heights{ localAdd: height, localRemove: 0, remoteAdd: 0, remoteRemove: height, }, remoteChain: false, mutate: false, expectedFee: feePerKw, }, { // Looking at local chain, local add is zero so the // update has not been applied yet; we expect a fee // update. name: "zero local height, fee changed", startHeights: heights{ localAdd: 0, localRemove: 0, remoteAdd: height, remoteRemove: 0, }, expectedHeights: heights{ localAdd: 0, localRemove: 0, remoteAdd: height, remoteRemove: 0, }, remoteChain: false, mutate: false, expectedFee: ourFeeUpdatePerSat, }, { // Looking at remote chain, the remote add height is // zero, so the update has not been applied so we expect // a fee change. name: "zero remote height, fee changed", startHeights: heights{ localAdd: height, localRemove: 0, remoteAdd: 0, remoteRemove: 0, }, expectedHeights: heights{ localAdd: height, localRemove: 0, remoteAdd: 0, remoteRemove: 0, }, remoteChain: true, mutate: false, expectedFee: ourFeeUpdatePerSat, }, { // Looking at remote chain, the remote add height is // non-zero, so the update has been applied so we expect // no fee change. name: "non-zero remote height, no fee change", startHeights: heights{ localAdd: height, localRemove: 0, remoteAdd: height, remoteRemove: 0, }, expectedHeights: heights{ localAdd: height, localRemove: 0, remoteAdd: height, remoteRemove: 0, }, remoteChain: true, mutate: false, expectedFee: feePerKw, }, { // Local add height is non-zero, so the update has // already been applied; we do not expect fee to // change or any mutations to be applied. name: "non-zero local height, mutation not applied", startHeights: heights{ localAdd: height, localRemove: 0, remoteAdd: 0, remoteRemove: height, }, expectedHeights: heights{ localAdd: height, localRemove: 0, remoteAdd: 0, remoteRemove: height, }, remoteChain: false, mutate: true, expectedFee: feePerKw, }, { // Local add is zero and we are looking at our local // chain, so the update has not been applied yet. We // expect the local add and remote heights to be // mutated. name: "zero height, fee changed, mutation applied", startHeights: heights{ localAdd: 0, localRemove: 0, remoteAdd: 0, remoteRemove: 0, }, expectedHeights: heights{ localAdd: nextHeight, localRemove: nextHeight, remoteAdd: 0, remoteRemove: 0, }, remoteChain: false, mutate: true, expectedFee: ourFeeUpdatePerSat, }, } for _, test := range tests { test := test t.Run(test.name, func(t *testing.T) { // Create a fee update with add and remove heights as // set in the test. heights := test.startHeights update := &PaymentDescriptor{ Amount: ourFeeUpdateAmt, addCommitHeightRemote: heights.remoteAdd, addCommitHeightLocal: heights.localAdd, removeCommitHeightRemote: heights.remoteRemove, removeCommitHeightLocal: heights.localRemove, EntryType: FeeUpdate, } view := &htlcView{ feePerKw: chainfee.SatPerKWeight(feePerKw), } processFeeUpdate( update, nextHeight, test.remoteChain, test.mutate, view, ) if view.feePerKw != test.expectedFee { t.Fatalf("expected fee: %v, got: %v", test.expectedFee, feePerKw) } checkHeights(t, update, test.expectedHeights) }) } }
explode_data.jsonl/72517
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2063 }
[ 2830, 3393, 7423, 41941, 4289, 1155, 353, 8840, 836, 8, 341, 4777, 2399, 197, 197, 322, 2608, 374, 264, 2477, 36929, 2608, 429, 646, 387, 1483, 369, 305, 11544, 4837, 198, 197, 197, 322, 35294, 624, 197, 30500, 284, 220, 17, 15, 15,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRunningCodeInContextAfterThrowingError(t *testing.T) { t.Parallel() ctx := NewIsolate().NewContext() _, err := ctx.Eval(` function fail(a,b) { this.c = a+b; throw "some failure"; } function work(a,b) { this.c = a+b+2; } x = new fail(3,5);`, "file1.js") if err == nil { t.Fatal("Expected an exception.") } res, err := ctx.Eval(`y = new work(3,6); y.c`, "file2.js") if err != nil { t.Fatal("Expected it to work, but got:", err) } if num := res.Int64(); num != 11 { t.Errorf("Expected 11, got: %v (%v)", num, res) } }
explode_data.jsonl/81555
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 260 }
[ 2830, 3393, 18990, 2078, 641, 1972, 6025, 23079, 287, 1454, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 20985, 1669, 1532, 3872, 33066, 1005, 3564, 1972, 741, 197, 6878, 1848, 1669, 5635, 5142, 831, 61528, 197, 7527, 3690, 287...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDurationIsFinishedFunc(t *testing.T) { lastSyncDate, _ := time.Parse("2006-01-02", "2819-07-01") generator := &DurationHttpRequestGenerator{ Duration: Day, Offset: 0, LastTime: lastSyncDate, IgnoreWeekend: true, ParametersFunc: getDurationHttpRequestParametersFunc(), DurationIsFinishedFunc: func() bool { return false }, } start1, _ := generator.NextDuration() start2, _ := generator.NextDuration() assert.Equal(t, start1, start2) }
explode_data.jsonl/25571
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 194 }
[ 2830, 3393, 12945, 3872, 24890, 9626, 1155, 353, 8840, 836, 8, 341, 33096, 12154, 1916, 11, 716, 1669, 882, 8937, 445, 17, 15, 15, 21, 12, 15, 16, 12, 15, 17, 497, 330, 17, 23, 16, 24, 12, 15, 22, 12, 15, 16, 1138, 3174, 15312...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test0001KCP(t *testing.T) { c, err := NewConn("kcp") if err != nil { fmt.Println(err) return } cc, err := c.Listen("127.0.0.1:58080") if err != nil { fmt.Println(err) return } go func() { _, err := cc.Accept() if err != nil { fmt.Println(err) return } fmt.Println("accept done") }() time.Sleep(time.Second) fmt.Println("start close") cc.Close() time.Sleep(time.Second) }
explode_data.jsonl/19977
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 197 }
[ 2830, 3393, 15, 15, 15, 16, 42, 7123, 1155, 353, 8840, 836, 8, 341, 1444, 11, 1848, 1669, 1532, 9701, 445, 74, 4672, 1138, 743, 1848, 961, 2092, 341, 197, 11009, 12419, 3964, 340, 197, 853, 198, 197, 630, 63517, 11, 1848, 1669, 27...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSchedulerErrorWithLongBinding(t *testing.T) { stop := make(chan struct{}) defer close(stop) firstPod := podWithPort("foo", "", 8080) conflictPod := podWithPort("bar", "", 8080) pods := map[string]*v1.Pod{firstPod.Name: firstPod, conflictPod.Name: conflictPod} for _, test := range []struct { name string Expected map[string]bool CacheTTL time.Duration BindingDuration time.Duration }{ { name: "long cache ttl", Expected: map[string]bool{firstPod.Name: true}, CacheTTL: 100 * time.Millisecond, BindingDuration: 300 * time.Millisecond, }, { name: "short cache ttl", Expected: map[string]bool{firstPod.Name: true}, CacheTTL: 10 * time.Second, BindingDuration: 300 * time.Millisecond, }, } { t.Run(test.name, func(t *testing.T) { queuedPodStore := clientcache.NewFIFO(clientcache.MetaNamespaceKeyFunc) scache := internalcache.New(test.CacheTTL, stop) node := v1.Node{ObjectMeta: metav1.ObjectMeta{Name: "machine1", UID: types.UID("machine1")}} scache.AddNode(&node) client := clientsetfake.NewSimpleClientset(&node) informerFactory := informers.NewSharedInformerFactory(client, 0) predicateMap := map[string]predicates.FitPredicate{"PodFitsHostPorts": predicates.PodFitsHostPorts} scheduler, bindingChan := setupTestSchedulerLongBindingWithRetry( queuedPodStore, scache, informerFactory, predicateMap, stop, test.BindingDuration) informerFactory.Start(stop) informerFactory.WaitForCacheSync(stop) scheduler.Run() queuedPodStore.Add(firstPod) queuedPodStore.Add(conflictPod) resultBindings := map[string]bool{} waitChan := time.After(5 * time.Second) for finished := false; !finished; { select { case b := <-bindingChan: resultBindings[b.Name] = true p := pods[b.Name] p.Spec.NodeName = b.Target.Name scache.AddPod(p) case <-waitChan: finished = true } } if !reflect.DeepEqual(resultBindings, test.Expected) { t.Errorf("Result binding are not equal to expected. %v != %v", resultBindings, test.Expected) } }) } }
explode_data.jsonl/24712
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 902 }
[ 2830, 3393, 38878, 1454, 2354, 6583, 15059, 1155, 353, 8840, 836, 8, 341, 62644, 1669, 1281, 35190, 2036, 37790, 16867, 3265, 60170, 692, 42190, 23527, 1669, 7509, 2354, 7084, 445, 7975, 497, 7342, 220, 23, 15, 23, 15, 340, 67850, 21242...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestTerragruntAfterHook(t *testing.T) { t.Parallel() cleanupTerraformFolder(t, TEST_FIXTURE_HOOKS_AFTER_ONLY_PATH) tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_HOOKS_AFTER_ONLY_PATH) rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_HOOKS_AFTER_ONLY_PATH) runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s", rootPath)) _, exception := ioutil.ReadFile(rootPath + "/file.out") assert.NoError(t, exception) }
explode_data.jsonl/10066
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 202 }
[ 2830, 3393, 51402, 68305, 3850, 6025, 31679, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 1444, 60639, 51, 13886, 627, 13682, 1155, 11, 13602, 42635, 41486, 82251, 50, 72339, 31263, 7944, 340, 20082, 14359, 1820, 1669, 2975, 127...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestProtocol_HandleCreateStake(t *testing.T) { require := require.New(t) ctrl := gomock.NewController(t) defer ctrl.Finish() sm := newMockStateManager(ctrl) _, err := sm.PutState( &totalBucketCount{count: 0}, protocol.NamespaceOption(StakingNameSpace), protocol.KeyOption(TotalBucketKey), ) require.NoError(err) // create protocol p, err := NewProtocol(depositGas, genesis.Default.Staking) require.NoError(err) // set up candidate candidate := testCandidates[0].d.Clone() require.NoError(putCandidate(sm, candidate)) candidateName := candidate.Name candidateAddr := candidate.Owner ctx := protocol.WithBlockchainCtx(context.Background(), protocol.BlockchainCtx{ Genesis: genesis.Default, }) v, err := p.Start(ctx, sm) require.NoError(err) cc, ok := v.(CandidateCenter) require.True(ok) require.NoError(sm.WriteView(protocolID, cc)) stakerAddr := identityset.Address(1) tests := []struct { // action fields initBalance int64 candName string amount string duration uint32 autoStake bool gasPrice *big.Int gasLimit uint64 nonce uint64 // block context blkHeight uint64 blkTimestamp time.Time blkGasLimit uint64 // expected result err error status iotextypes.ReceiptStatus }{ { 10, candidateName, "100000000000000000000", 1, false, big.NewInt(unit.Qev), 10000, 1, 1, time.Now(), 10000, nil, iotextypes.ReceiptStatus_ErrNotEnoughBalance, }, { 100, "notExist", "100000000000000000000", 1, false, big.NewInt(unit.Qev), 10000, 1, 1, time.Now(), 10000, ErrInvalidCanName, iotextypes.ReceiptStatus_ErrCandidateNotExist, }, { 101, candidateName, "10000000000000000000", 1, false, big.NewInt(unit.Qev), 10000, 1, 1, time.Now(), 10000, ErrInvalidAmount, iotextypes.ReceiptStatus_Failure, }, { 101, candidateName, "100000000000000000000", 1, false, big.NewInt(unit.Qev), 10000, 1, 1, time.Now(), 10000, nil, iotextypes.ReceiptStatus_Success, }, } for _, test := range tests { require.NoError(setupAccount(sm, stakerAddr, test.initBalance)) ctx := protocol.WithActionCtx(context.Background(), protocol.ActionCtx{ Caller: stakerAddr, GasPrice: test.gasPrice, IntrinsicGas: test.gasLimit, Nonce: test.nonce, }) ctx = protocol.WithBlockCtx(ctx, protocol.BlockCtx{ BlockHeight: test.blkHeight, BlockTimeStamp: test.blkTimestamp, GasLimit: test.blkGasLimit, }) act, err := action.NewCreateStake(test.nonce, test.candName, test.amount, test.duration, test.autoStake, nil, test.gasLimit, test.gasPrice) require.NoError(err) err = p.Validate(ctx, act, sm) if test.err != nil { require.EqualError(test.err, errors.Cause(err).Error()) continue } r, err := p.Handle(ctx, act, sm) require.NoError(err) require.Equal(uint64(test.status), r.Status) if test.status == iotextypes.ReceiptStatus_Success { // test bucket index and bucket bucketIndices, err := getCandBucketIndices(sm, candidateAddr) require.NoError(err) require.Equal(1, len(*bucketIndices)) bucketIndices, err = getVoterBucketIndices(sm, stakerAddr) require.NoError(err) require.Equal(1, len(*bucketIndices)) indices := *bucketIndices bucket, err := getBucket(sm, indices[0]) require.NoError(err) require.Equal(candidateAddr, bucket.Candidate) require.Equal(stakerAddr, bucket.Owner) require.Equal(test.amount, bucket.StakedAmount.String()) // test candidate candidate, err := getCandidate(sm, candidateAddr) require.NoError(err) require.LessOrEqual(test.amount, candidate.Votes.String()) csm, err := NewCandidateStateManager(sm, cc) require.NoError(err) candidate = csm.GetByOwner(candidateAddr) require.NotNil(candidate) require.LessOrEqual(test.amount, candidate.Votes.String()) // test staker's account caller, err := accountutil.LoadAccount(sm, hash.BytesToHash160(stakerAddr.Bytes())) require.NoError(err) actCost, err := act.Cost() require.NoError(err) require.Equal(unit.ConvertIotxToRau(test.initBalance), big.NewInt(0).Add(caller.Balance, actCost)) require.Equal(test.nonce, caller.Nonce) } } }
explode_data.jsonl/64529
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1850 }
[ 2830, 3393, 20689, 42714, 4021, 623, 726, 1155, 353, 8840, 836, 8, 341, 17957, 1669, 1373, 7121, 1155, 692, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 741, 72023, 1669, 501, 11571, 83132, 62100, 340, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAttachTagsToSpan(t *testing.T) { o := &Options{Provider: "zipkin", Tags: map[string]string{"test": "test"}} if o.AttachTagsToSpan() { t.Error("expected false") } o.setAttachTags() if !o.AttachTagsToSpan() { t.Error("expected true") } }
explode_data.jsonl/63661
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 104 }
[ 2830, 3393, 30485, 15930, 1249, 12485, 1155, 353, 8840, 836, 8, 1476, 22229, 1669, 609, 3798, 90, 5179, 25, 330, 9964, 7989, 497, 27683, 25, 2415, 14032, 30953, 4913, 1944, 788, 330, 1944, 95642, 743, 297, 88284, 15930, 1249, 12485, 368...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAddTransceiver(t *testing.T) { lim := test.TimeOut(time.Second * 30) defer lim.Stop() report := test.CheckRoutines(t) defer report() for _, testCase := range []struct { expectSender, expectReceiver bool direction RTPTransceiverDirection }{ {true, true, RTPTransceiverDirectionSendrecv}, // Go and WASM diverge // {true, false, RTPTransceiverDirectionSendonly}, // {false, true, RTPTransceiverDirectionRecvonly}, } { pc, err := NewPeerConnection(Configuration{}) assert.NoError(t, err) transceiver, err := pc.AddTransceiverFromKind(RTPCodecTypeVideo, RTPTransceiverInit{ Direction: testCase.direction, }) assert.NoError(t, err) if testCase.expectReceiver { assert.NotNil(t, transceiver.Receiver()) } else { assert.Nil(t, transceiver.Receiver()) } if testCase.expectSender { assert.NotNil(t, transceiver.Sender()) } else { assert.Nil(t, transceiver.Sender()) } offer, err := pc.CreateOffer(nil) assert.NoError(t, err) assert.True(t, offerMediaHasDirection(offer, RTPCodecTypeVideo, testCase.direction)) assert.NoError(t, pc.Close()) } }
explode_data.jsonl/8655
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 453 }
[ 2830, 3393, 2212, 3167, 12862, 1155, 353, 8840, 836, 8, 341, 197, 4659, 1669, 1273, 16299, 2662, 9730, 32435, 353, 220, 18, 15, 340, 16867, 4568, 30213, 2822, 69931, 1669, 1273, 10600, 49, 28628, 1155, 340, 16867, 1895, 2822, 2023, 8358...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestMarshalCmd(t *testing.T) { t.Parallel() tests := []struct { name string id interface{} cmd interface{} expected string }{ { name: "include all parameters", id: 1, cmd: btcjson.NewGetNetworkHashPSCmd(btcjson.Int(100), btcjson.Int(2000)), expected: `{"jsonrpc":"1.0","method":"getnetworkhashps","params":[100,2000],"id":1}`, }, { name: "include padding null parameter", id: 1, cmd: btcjson.NewGetNetworkHashPSCmd(nil, btcjson.Int(2000)), expected: `{"jsonrpc":"1.0","method":"getnetworkhashps","params":[null,2000],"id":1}`, }, { name: "omit single unnecessary null parameter", id: 1, cmd: btcjson.NewGetNetworkHashPSCmd(btcjson.Int(100), nil), expected: `{"jsonrpc":"1.0","method":"getnetworkhashps","params":[100],"id":1}`, }, { name: "omit unnecessary null parameters", id: 1, cmd: btcjson.NewGetNetworkHashPSCmd(nil, nil), expected: `{"jsonrpc":"1.0","method":"getnetworkhashps","params":[],"id":1}`, }, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { bytes, err := btcjson.MarshalCmd(btcjson.RpcVersion1, test.id, test.cmd) if err != nil { t.Errorf("Test #%d (%s) wrong error - got %T (%v)", i, test.name, err, err) continue } marshalled := string(bytes) if marshalled != test.expected { t.Errorf("Test #%d (%s) mismatched marshall result - got "+ "%v, want %v", i, test.name, marshalled, test.expected) continue } } }
explode_data.jsonl/54007
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 686 }
[ 2830, 3393, 55438, 15613, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 15710, 981, 3749, 16094, 197, 25920, 414, 3749, 16094, 197, 42400, 914, 198, 197, 59403, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestNamespaceFetchWideEntryShardNotOwned(t *testing.T) { ctx := context.NewBackground() defer ctx.Close() ns, closer := newTestNamespace(t) defer closer() for i := range ns.shards { ns.shards[i] = nil } _, err := ns.FetchWideEntry(ctx, ident.StringID("foo"), xtime.Now(), nil) require.Error(t, err) }
explode_data.jsonl/35345
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 126 }
[ 2830, 3393, 22699, 20714, 60970, 5874, 2016, 567, 2623, 57641, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 7121, 8706, 741, 16867, 5635, 10421, 2822, 84041, 11, 12128, 1669, 501, 2271, 22699, 1155, 340, 16867, 12128, 2822, 2023, 600,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestConfigParseFromMultipleDirs(t *testing.T) { f := NewFixture(t, model.UserConfigState{}) defer f.TearDown() f.File("Tiltfile", ` config.define_string_list('foo') cfg = config.parse() include('inc/Tiltfile') `) f.File("inc/Tiltfile", ` cfg = config.parse() `) _, err := f.ExecFile("Tiltfile") require.Error(t, err) require.Contains(t, err.Error(), "config.parse can only be called from one Tiltfile working directory per run") require.Contains(t, err.Error(), f.Path()) require.Contains(t, err.Error(), f.JoinPath("inc")) }
explode_data.jsonl/65240
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 203 }
[ 2830, 3393, 2648, 14463, 3830, 32089, 97384, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 1532, 18930, 1155, 11, 1614, 7344, 2648, 1397, 37790, 16867, 282, 836, 682, 4454, 2822, 1166, 8576, 445, 51, 2963, 1192, 497, 22074, 1676, 16756, 390...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_info_Measurement(t *testing.T) { t.Parallel() type args struct { ctx context.Context } type fields struct { name string fullname string info metrics.Int64Measure kvs map[metrics.Key]string } type want struct { want []metrics.Measurement err error } type test struct { name string args args fields fields want want checkFunc func(want, []metrics.Measurement, error) error beforeFunc func(args) afterFunc func(args) } defaultCheckFunc := func(w want, got []metrics.Measurement, err error) error { if !errors.Is(err, w.err) { return errors.Errorf("got_error: \"%#v\",\n\t\t\t\twant: \"%#v\"", err, w.err) } if !reflect.DeepEqual(got, w.want) { return errors.Errorf("got: \"%#v\",\n\t\t\t\twant: \"%#v\"", got, w.want) } return nil } tests := []test{ { name: "always returns empty measurement", args: args{ ctx: nil, }, fields: fields{ name: "", fullname: "", info: *metrics.Int64(metrics.ValdOrg+"/test", "test", metrics.UnitDimensionless), kvs: nil, }, want: want{ want: []metrics.Measurement{}, err: nil, }, checkFunc: defaultCheckFunc, }, } for _, tc := range tests { test := tc t.Run(test.name, func(tt *testing.T) { tt.Parallel() defer goleak.VerifyNone(tt, goleak.IgnoreCurrent()) if test.beforeFunc != nil { test.beforeFunc(test.args) } if test.afterFunc != nil { defer test.afterFunc(test.args) } if test.checkFunc == nil { test.checkFunc = defaultCheckFunc } i := &info{ name: test.fields.name, fullname: test.fields.fullname, info: test.fields.info, kvs: test.fields.kvs, } got, err := i.Measurement(test.args.ctx) if err := test.checkFunc(test.want, got, err); err != nil { tt.Errorf("error = %v", err) } }) } }
explode_data.jsonl/70578
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 891 }
[ 2830, 3393, 3109, 1245, 68, 24359, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 13158, 2827, 2036, 341, 197, 20985, 2266, 9328, 198, 197, 532, 13158, 5043, 2036, 341, 197, 11609, 257, 914, 198, 197, 94042, 606, 914, 198, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPostgresRole_CreateRole(t *testing.T) { srv := setupVaultServer() defer srv.Close() cfg := vaultapi.DefaultConfig() cfg.Address = srv.URL cl, err := vaultapi.NewClient(cfg) if !assert.Nil(t, err, "failed to create vault client") { return } testData := []struct { testName string pgClient *PostgresRole expectedErr bool }{ { testName: "Create Role successful", pgClient: &PostgresRole{ pgRole: &api.PostgresRole{ ObjectMeta: metav1.ObjectMeta{ Name: "pg-read", Namespace: "pg", }, Spec: api.PostgresRoleSpec{ DatabaseRef: &corev1.LocalObjectReference{ Name: "postgres", }, CreationStatements: []string{"create table"}, }, }, vaultClient: cl, databasePath: "database", }, expectedErr: false, }, { testName: "Create Role failed", pgClient: &PostgresRole{ pgRole: &api.PostgresRole{ ObjectMeta: metav1.ObjectMeta{ Name: "pg-read", Namespace: "pg", }, Spec: api.PostgresRoleSpec{ DatabaseRef: &corev1.LocalObjectReference{ Name: "", }, CreationStatements: []string{"create table"}, }, }, vaultClient: cl, databasePath: "database", }, expectedErr: true, }, } for _, test := range testData { t.Run(test.testName, func(t *testing.T) { p := test.pgClient err := p.CreateRole() if test.expectedErr { assert.NotNil(t, err) } else { assert.Nil(t, err) } }) } }
explode_data.jsonl/24984
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 705 }
[ 2830, 3393, 4133, 17818, 9030, 34325, 9030, 1155, 353, 8840, 836, 8, 341, 1903, 10553, 1669, 6505, 79177, 5475, 741, 16867, 43578, 10421, 2822, 50286, 1669, 34584, 2068, 13275, 2648, 741, 50286, 26979, 284, 43578, 20893, 271, 39407, 11, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPrepareBadSQLFailure(t *testing.T) { t.Parallel() conn := mustConnectString(t, os.Getenv("PGX_TEST_DATABASE")) defer closeConn(t, conn) if _, err := conn.Prepare(context.Background(), "badSQL", "select foo"); err == nil { t.Fatal("Prepare should have failed with syntax error") } ensureConnValid(t, conn) }
explode_data.jsonl/40014
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 120 }
[ 2830, 3393, 50590, 17082, 6688, 17507, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 32917, 1669, 1969, 14611, 703, 1155, 11, 2643, 64883, 445, 11383, 55, 11641, 45510, 5455, 16867, 3265, 9701, 1155, 11, 4534, 692, 743, 8358, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDefineThis(t *testing.T) { defines := config.ProcessDefines(map[string]config.DefineData{ "this": { DefineFunc: func(args config.DefineArgs) js_ast.E { return &js_ast.ENumber{Value: 1} }, }, "this.foo": { DefineFunc: func(args config.DefineArgs) js_ast.E { return &js_ast.ENumber{Value: 2} }, }, "this.foo.bar": { DefineFunc: func(args config.DefineArgs) js_ast.E { return &js_ast.ENumber{Value: 3} }, }, }) default_suite.expectBundled(t, bundled{ files: map[string]string{ "/entry.js": ` ok( // These should be fully substituted this, this.foo, this.foo.bar, // Should just substitute "this.foo" this.foo.baz, // This should not be substituted this.bar, ); // This code should be the same as above (() => { ok( this, this.foo, this.foo.bar, this.foo.baz, this.bar, ); })(); // Nothing should be substituted in this code (function() { doNotSubstitute( this, this.foo, this.foo.bar, this.foo.baz, this.bar, ); })(); `, }, entryPaths: []string{"/entry.js"}, options: config.Options{ Mode: config.ModeBundle, AbsOutputFile: "/out.js", Defines: &defines, }, }) }
explode_data.jsonl/38593
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 665 }
[ 2830, 3393, 35338, 1986, 1155, 353, 8840, 836, 8, 341, 7452, 1543, 1669, 2193, 29012, 73816, 9147, 14032, 60, 1676, 49947, 482, 1043, 515, 197, 197, 1, 574, 788, 341, 298, 197, 35338, 9626, 25, 2915, 7356, 2193, 49947, 482, 4117, 8, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_NotSupport(t *testing.T) { func() { defer func() { if r := recover(); r == nil { t.Fail() } }() NewIter(testIter{}) }() func() { defer func() { if r := recover(); r == nil { t.Fail() } }() NewIter(&struct { }{}) }() }
explode_data.jsonl/52936
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 136 }
[ 2830, 3393, 60816, 7916, 1155, 353, 8840, 836, 8, 341, 29244, 368, 341, 197, 16867, 2915, 368, 341, 298, 743, 435, 1669, 11731, 2129, 435, 621, 2092, 341, 571, 3244, 57243, 741, 298, 197, 532, 197, 197, 69826, 197, 197, 3564, 8537, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestParseIgnoredWords(t *testing.T) { deck := ` Deck Commander Sideboard 1 Llanowar Elves (WAR) 223 Deck Commander 2 Elf Scout (WAR) 224 ` r := strings.NewReader(deck) cards, err := parseDeck(r) if err != nil { t.Fatalf("failed to parse deck: %v", err) } if len(cards) > 2 { t.Fatalf("Wrong amount of cards. want 2, got %d", len(cards)) } }
explode_data.jsonl/64510
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 155 }
[ 2830, 3393, 14463, 43337, 3018, 23489, 1155, 353, 8840, 836, 8, 341, 197, 33425, 1669, 22074, 197, 39368, 198, 97493, 261, 198, 7568, 577, 2482, 271, 197, 16, 444, 10715, 363, 277, 97604, 320, 45458, 8, 220, 17, 17, 18, 198, 197, 39...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestWithError_SetsSpecifiedErrorOnPipe(t *testing.T) { t.Parallel() fakeErr := errors.New("oh no") p := script.NewPipe().WithError(fakeErr) if p.Error() != fakeErr { t.Errorf("want %q, got %q", fakeErr, p.Error()) } }
explode_data.jsonl/51530
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 99 }
[ 2830, 3393, 66102, 1098, 1415, 8327, 1870, 1454, 1925, 34077, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 1166, 726, 7747, 1669, 5975, 7121, 445, 2267, 902, 1138, 3223, 1669, 5316, 7121, 34077, 1005, 66102, 74138, 7747, 340, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNewTemplateContext(t *testing.T) { context, _ := newTemplateContext([]string{"test/values.yaml"}, []string{"Foo=baz"}, []string{}) if context["Foo"] != "baz" { t.Errorf("Got %v, expected baz", context["foo"]) } }
explode_data.jsonl/73839
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 86 }
[ 2830, 3393, 3564, 7275, 1972, 1155, 353, 8840, 836, 8, 341, 28413, 11, 716, 1669, 501, 7275, 1972, 10556, 917, 4913, 1944, 96985, 33406, 14345, 3056, 917, 4913, 40923, 22086, 1370, 14345, 3056, 917, 37790, 743, 2266, 1183, 40923, 1341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestLiveUpdateInSecondImageOfImageDependency(t *testing.T) { f := newBDFixture(t, k8s.EnvDockerDesktop, container.RuntimeDocker) defer f.TearDown() steps := []model.LiveUpdateSyncStep{model.LiveUpdateSyncStep{ Source: f.JoinPath("sancho"), Dest: "/go/src/github.com/tilt-dev/sancho", }} lu := assembleLiveUpdate(steps, SanchoRunSteps, true, nil, f) tCase := testCase{ manifest: NewSanchoDockerBuildMultiStageManifestWithLiveUpdate(f, lu), changedFiles: []string{"sancho/a.txt"}, expectDockerCopyCount: 1, expectDockerExecCount: 1, expectDockerRestartCount: 1, logsContain: []string{f.JoinPath("sancho/a.txt")}, } runTestCase(t, f, tCase) }
explode_data.jsonl/35181
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 318 }
[ 2830, 3393, 20324, 4289, 641, 15666, 1906, 2124, 1906, 36387, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 33, 5262, 12735, 1155, 11, 595, 23, 82, 81214, 35, 13659, 23597, 11, 5476, 16706, 35, 13659, 340, 16867, 282, 836, 682, 4454,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWorkflowStatusMetric(t *testing.T) { ctx := context.Background() wf := unmarshalWF(workflowStatusMetric) woc := newWoc(*wf) woc.operate(ctx) // Must only be two (completed: true), (podRunning: true) assert.Len(t, woc.wf.Status.Conditions, 2) }
explode_data.jsonl/71015
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 103 }
[ 2830, 3393, 62768, 2522, 54310, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 6692, 69, 1669, 650, 27121, 32131, 31470, 4965, 2522, 54310, 340, 6692, 509, 1669, 501, 54, 509, 4071, 43083, 340, 6692, 509, 71521, 349, 7502,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Pagination_PrevNext_2NextLinksWithSameHref(t *testing.T) { doc := testutil.CreateHTML() body := dom.QuerySelector(doc, "body") root := testutil.CreateDiv(0) dom.AppendChild(body, root) anchor1 := testutil.CreateAnchor("page2", "dummy link") anchor2 := testutil.CreateAnchor("page2", "next page") dom.AppendChild(root, anchor1) assertDefaultDocumenOutlink(t, doc, nil, nil) // anchor1 is not a confident next page link, but anchor2 is due to the link text. dom.AppendChild(root, anchor2) assertDefaultDocumenOutlink(t, doc, nil, anchor1) }
explode_data.jsonl/10826
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 203 }
[ 2830, 3393, 1088, 10353, 1088, 7282, 5847, 62, 17, 5847, 3939, 16056, 19198, 64919, 1155, 353, 8840, 836, 8, 341, 59536, 1669, 1273, 1314, 7251, 5835, 741, 35402, 1669, 4719, 15685, 5877, 19153, 11, 330, 2599, 5130, 33698, 1669, 1273, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Resolve_Operations_Pass_BuiltinType(t *testing.T) { data := ` http: test: some_url: endpoint: GET /some/url/{id:string} query: the_query: string header: The-Header: string response: ok: empty ` old, err := unmarshalSpec([]byte(data)) assert.Equal(t, err, nil) errors := enrichSpec(old) assert.Equal(t, len(errors), 0) }
explode_data.jsonl/79846
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 233 }
[ 2830, 3393, 62, 56808, 2232, 712, 804, 1088, 395, 1668, 25628, 929, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 22074, 1254, 510, 262, 1273, 510, 286, 1045, 2903, 510, 310, 14887, 25, 7890, 608, 14689, 57254, 9388, 307, 22423, 532, 310,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseEventHubMetadata(t *testing.T) { // Test first with valid resolved environment for _, testData := range parseEventHubMetadataDataset { _, err := parseAzureEventHubMetadata(&ScalerConfig{TriggerMetadata: testData.metadata, ResolvedEnv: sampleEventHubResolvedEnv, AuthParams: map[string]string{}}) if err != nil && !testData.isError { t.Errorf("Expected success but got error: %s", err) } if testData.isError && err == nil { t.Error("Expected error and got success") } } }
explode_data.jsonl/13466
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 174 }
[ 2830, 3393, 14463, 1556, 19316, 14610, 1155, 353, 8840, 836, 8, 341, 197, 322, 3393, 1156, 448, 2697, 19673, 4573, 198, 2023, 8358, 67348, 1669, 2088, 4715, 1556, 19316, 14610, 33363, 341, 197, 197, 6878, 1848, 1669, 4715, 78107, 1556, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestSingleSQLError(t *testing.T) { //Reset streamList := []string{"ldemo"} handleStream(false, streamList, t) //Data setup var tests = []ruleTest{ { name: `TestSingleSQLErrorRule1`, sql: `SELECT color, ts FROM ldemo where size >= 3`, r: [][]map[string]interface{}{ {{ "color": "red", "ts": float64(1541152486013), }}, {{ "error": "run Where error: invalid operation string(string) >= int64(3)", }}, {{ "ts": float64(1541152487632), }}, }, m: map[string]interface{}{ "op_preprocessor_ldemo_0_exceptions_total": int64(0), "op_preprocessor_ldemo_0_process_latency_ms": int64(0), "op_preprocessor_ldemo_0_records_in_total": int64(5), "op_preprocessor_ldemo_0_records_out_total": int64(5), "op_project_0_exceptions_total": int64(1), "op_project_0_process_latency_ms": int64(0), "op_project_0_records_in_total": int64(3), "op_project_0_records_out_total": int64(2), "sink_mockSink_0_exceptions_total": int64(0), "sink_mockSink_0_records_in_total": int64(3), "sink_mockSink_0_records_out_total": int64(3), "source_ldemo_0_exceptions_total": int64(0), "source_ldemo_0_records_in_total": int64(5), "source_ldemo_0_records_out_total": int64(5), "op_filter_0_exceptions_total": int64(1), "op_filter_0_process_latency_ms": int64(0), "op_filter_0_records_in_total": int64(5), "op_filter_0_records_out_total": int64(2), }, }, { name: `TestSingleSQLErrorRule2`, sql: `SELECT size * 5 FROM ldemo`, r: [][]map[string]interface{}{ {{ "rengine_field_0": float64(15), }}, {{ "error": "run Select error: invalid operation string(string) * int64(5)", }}, {{ "rengine_field_0": float64(15), }}, {{ "rengine_field_0": float64(10), }}, {{}}, }, m: map[string]interface{}{ "op_preprocessor_ldemo_0_exceptions_total": int64(0), "op_preprocessor_ldemo_0_process_latency_ms": int64(0), "op_preprocessor_ldemo_0_records_in_total": int64(5), "op_preprocessor_ldemo_0_records_out_total": int64(5), "op_project_0_exceptions_total": int64(1), "op_project_0_process_latency_ms": int64(0), "op_project_0_records_in_total": int64(5), "op_project_0_records_out_total": int64(4), "sink_mockSink_0_exceptions_total": int64(0), "sink_mockSink_0_records_in_total": int64(5), "sink_mockSink_0_records_out_total": int64(5), "source_ldemo_0_exceptions_total": int64(0), "source_ldemo_0_records_in_total": int64(5), "source_ldemo_0_records_out_total": int64(5), }, }, } handleStream(true, streamList, t) doRuleTest(t, tests, 0, &api.RuleOption{ BufferLength: 100, }) }
explode_data.jsonl/76172
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1374 }
[ 2830, 3393, 10888, 64308, 94618, 1155, 353, 8840, 836, 8, 341, 197, 322, 14828, 198, 44440, 852, 1669, 3056, 917, 4913, 507, 6726, 16707, 53822, 3027, 3576, 11, 4269, 852, 11, 259, 340, 197, 322, 1043, 6505, 198, 2405, 7032, 284, 3056...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAutoOrientation(t *testing.T) { toBW := func(img image.Image) []byte { b := img.Bounds() data := make([]byte, 0, b.Dx()*b.Dy()) for x := b.Min.X; x < b.Max.X; x++ { for y := b.Min.Y; y < b.Max.Y; y++ { c := color.GrayModel.Convert(img.At(x, y)).(color.Gray) if c.Y < 128 { data = append(data, 1) } else { data = append(data, 0) } } } return data } f, err := os.Open("testdata/orientation_0.jpg") if err != nil { t.Fatalf("os.Open(%q): %v", "testdata/orientation_0.jpg", err) } orig, _, err := image.Decode(f) if err != nil { t.Fatalf("image.Decode(%q): %v", "testdata/orientation_0.jpg", err) } origBW := toBW(orig) testCases := []struct { path string }{ {"testdata/orientation_0.jpg"}, {"testdata/orientation_1.jpg"}, {"testdata/orientation_2.jpg"}, {"testdata/orientation_3.jpg"}, {"testdata/orientation_4.jpg"}, {"testdata/orientation_5.jpg"}, {"testdata/orientation_6.jpg"}, {"testdata/orientation_7.jpg"}, {"testdata/orientation_8.jpg"}, } for _, tc := range testCases { img, err := Open(tc.path, AutoOrientation(true)) if err != nil { t.Fatal(err) } if img.Bounds() != orig.Bounds() { t.Fatalf("%s: got bounds %v want %v", tc.path, img.Bounds(), orig.Bounds()) } imgBW := toBW(img) if !bytes.Equal(imgBW, origBW) { t.Fatalf("%s: got bw data %v want %v", tc.path, imgBW, origBW) } } if _, err := Decode(strings.NewReader("invalid data"), AutoOrientation(true)); err == nil { t.Fatal("expected error got nil") } }
explode_data.jsonl/2438
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 706 }
[ 2830, 3393, 13253, 22332, 1155, 353, 8840, 836, 8, 341, 31709, 77563, 1669, 2915, 11022, 2168, 7528, 8, 3056, 3782, 341, 197, 2233, 1669, 4964, 72133, 741, 197, 8924, 1669, 1281, 10556, 3782, 11, 220, 15, 11, 293, 909, 87, 25010, 65, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestConfirmECS(t *testing.T) { t.Parallel() ECStest := []byte(EncryptConfirmString) if !ConfirmECS(ECStest) { t.Errorf("Test failed. TestConfirmECS: Error finding ECS.") } }
explode_data.jsonl/11028
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 77 }
[ 2830, 3393, 16728, 36, 6412, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 7498, 623, 477, 1669, 3056, 3782, 7, 61520, 16728, 703, 340, 743, 753, 16728, 36, 6412, 86062, 623, 477, 8, 341, 197, 3244, 13080, 445, 2271, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestConfigTxContext_UpdateAdmin(t *testing.T) { t.Skip("Update admin is a config update, TODO in issue: https://github.com/hyperledger-labs/orion-server/issues/148") clientCryptoDir := testutils.GenerateTestClientCrypto(t, []string{"admin", "admin2", "adminUpdated", "server"}) testServer, _, _, err := SetupTestServer(t, clientCryptoDir) defer func() { if testServer != nil { _ = testServer.Stop() } }() require.NoError(t, err) StartTestServer(t, testServer) serverPort, err := testServer.Port() require.NoError(t, err) admin2Cert, _ := testutils.LoadTestClientCrypto(t, clientCryptoDir, "admin2") adminUpdatedCert, _ := testutils.LoadTestClientCrypto(t, clientCryptoDir, "adminUpdated") admin2 := &types.Admin{Id: "admin2", Certificate: admin2Cert.Raw} bcdb := createDBInstance(t, clientCryptoDir, serverPort) session1 := openUserSession(t, bcdb, "admin", clientCryptoDir) // Add admin2 tx1, err := session1.ConfigTx() require.NoError(t, err) require.NotNil(t, tx1) err = tx1.AddAdmin(admin2) require.NoError(t, err) txID, receipt, err := tx1.Commit(true) require.NoError(t, err) require.NotNil(t, txID) require.NotNil(t, receipt) // Update an admin session2 := openUserSession(t, bcdb, "admin2", clientCryptoDir) tx2, err := session2.ConfigTx() require.NoError(t, err) err = tx2.UpdateAdmin(&types.Admin{Id: "admin", Certificate: adminUpdatedCert.Raw}) require.NoError(t, err) err = tx2.UpdateAdmin(&types.Admin{Id: "non-admin", Certificate: []byte("bad-cert")}) require.EqualError(t, err, "admin does not exist in current config: non-admin") txID, receipt, err = tx2.Commit(true) require.NoError(t, err) require.NotNil(t, txID) require.NotNil(t, receipt) tx, err := session2.ConfigTx() require.NoError(t, err) clusterConfig, err := tx.GetClusterConfig() require.NoError(t, err) require.NotNil(t, clusterConfig) require.Len(t, clusterConfig.Admins, 2) found, index := AdminExists("admin", clusterConfig.Admins) require.True(t, found) require.EqualValues(t, clusterConfig.Admins[index].Certificate, adminUpdatedCert.Raw) // session1 by updated admin cannot execute additional transactions, need to recreate session tx3, err := session1.ConfigTx() require.EqualError(t, err, "error handling request, server returned: status: 401 Unauthorized, message: signature verification failed") require.Nil(t, tx3) // need to recreate session with new credentials session3, err := bcdb.Session(&sdkConfig.SessionConfig{ UserConfig: &sdkConfig.UserConfig{ UserID: "admin", CertPath: path.Join(clientCryptoDir, "adminUpdated.pem"), PrivateKeyPath: path.Join(clientCryptoDir, "adminUpdated.key"), }, }) require.NoError(t, err) tx3, err = session3.ConfigTx() require.NoError(t, err) require.NotNil(t, tx3) }
explode_data.jsonl/65329
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1012 }
[ 2830, 3393, 2648, 31584, 1972, 47393, 7210, 1155, 353, 8840, 836, 8, 341, 3244, 57776, 445, 4289, 3986, 374, 264, 2193, 2647, 11, 5343, 304, 4265, 25, 3703, 1110, 5204, 905, 7530, 39252, 50704, 2852, 3435, 5144, 290, 26300, 38745, 14, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMatCheckRange(t *testing.T) { mat1 := NewMatWithSize(101, 102, MatTypeCV8U) ret := CheckRange(mat1) if !ret { t.Error("TestCheckRange error.") } }
explode_data.jsonl/81732
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 66 }
[ 2830, 3393, 11575, 3973, 6046, 1155, 353, 8840, 836, 8, 341, 59874, 16, 1669, 1532, 11575, 2354, 1695, 7, 16, 15, 16, 11, 220, 16, 15, 17, 11, 6867, 929, 19589, 23, 52, 340, 11262, 1669, 4248, 6046, 33397, 16, 340, 743, 753, 2122,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestFilter_LoadNetWordDict(t *testing.T) { type fields struct { trie *Trie noise *regexp.Regexp } type args struct { url string } tests := []struct { name string fields fields args args wantErr bool }{ // TODO: Add test cases. } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { filter := &Filter{ trie: tt.fields.trie, noise: tt.fields.noise, } if err := filter.LoadNetWordDict(tt.args.url); (err != nil) != tt.wantErr { t.Errorf("Filter.LoadNetWordDict() error = %v, wantErr %v", err, tt.wantErr) } }) } }
explode_data.jsonl/81001
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 279 }
[ 2830, 3393, 5632, 19553, 6954, 10879, 13448, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 197, 8927, 220, 353, 51, 7231, 198, 197, 197, 52218, 353, 55796, 8989, 4580, 198, 197, 532, 13158, 2827, 2036, 341, 197, 19320, 91...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDeleteCredentialsError(t *testing.T) { c := &internal.MockedConnection{ MockSend: func(query []byte, options types.QueryOptions) *types.KuzzleResponse { return &types.KuzzleResponse{Error: types.KuzzleError{Message: "Unit test error"}} }, } k, _ := kuzzle.NewKuzzle(c, nil) err := k.Security.DeleteCredentials("strategy", "id", nil) assert.NotNil(t, err) }
explode_data.jsonl/58818
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 139 }
[ 2830, 3393, 6435, 27025, 1454, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 609, 10481, 24664, 291, 4526, 515, 197, 9209, 1176, 11505, 25, 2915, 10741, 3056, 3782, 11, 2606, 4494, 15685, 3798, 8, 353, 9242, 11352, 14945, 2582, 341, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_instance_isReadyToAttach(t *testing.T) { //now := time.Now() tenMinutesAgo := time.Now().Add(-10 * time.Minute) tests := []struct { name string instance instance asg *autoScalingGroup want bool }{ { name: "pending instance", instance: instance{ Instance: &ec2.Instance{ InstanceId: aws.String("i-123"), LaunchTime: &tenMinutesAgo, State: &ec2.InstanceState{ Name: aws.String(ec2.InstanceStateNamePending), }, }, }, asg: &autoScalingGroup{ name: "my-asg", Group: &autoscaling.Group{ HealthCheckGracePeriod: aws.Int64(3600), }, }, want: false, }, { name: "not-ready running instance", instance: instance{ Instance: &ec2.Instance{ InstanceId: aws.String("i-123"), LaunchTime: &tenMinutesAgo, State: &ec2.InstanceState{ Name: aws.String(ec2.InstanceStateNameRunning), }, }, }, asg: &autoScalingGroup{ name: "my-asg", Group: &autoscaling.Group{ HealthCheckGracePeriod: aws.Int64(3600), }, }, want: false, }, { name: "ready running instance", instance: instance{ Instance: &ec2.Instance{ InstanceId: aws.String("i-123"), LaunchTime: &tenMinutesAgo, State: &ec2.InstanceState{ Name: aws.String(ec2.InstanceStateNameRunning), }, }, }, asg: &autoScalingGroup{ name: "my-asg", Group: &autoscaling.Group{ HealthCheckGracePeriod: aws.Int64(300), }, }, want: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := tt.instance.isReadyToAttach(tt.asg); got != tt.want { t.Errorf("instance.isReadyToAttach() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/55208
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 836 }
[ 2830, 3393, 11904, 6892, 19202, 1249, 30485, 1155, 353, 8840, 836, 8, 341, 197, 322, 3328, 1669, 882, 13244, 741, 197, 1960, 27720, 32, 3346, 1669, 882, 13244, 1005, 2212, 4080, 16, 15, 353, 882, 75770, 692, 78216, 1669, 3056, 1235, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2