text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestWFLevelHostAliases(t *testing.T) { ctx := context.Background() woc := newWoc() woc.execWf.Spec.HostAliases = []apiv1.HostAlias{ {IP: "127.0.0.1"}, {IP: "127.0.0.1"}, } tmplCtx, err := woc.createTemplateContext(wfv1.ResourceScopeLocal, "") assert.NoError(t, err) _, err = woc.executeContainer(ctx, woc.execWf.Spec.Entrypoint, tmplCtx.GetTemplateScope(), &woc.execWf.Spec.Templates[0], &wfv1.WorkflowStep{}, &executeTemplateOpts{}) assert.NoError(t, err) pods, err := listPods(woc) assert.NoError(t, err) assert.Len(t, pods.Items, 1) pod := pods.Items[0] assert.NotNil(t, pod.Spec.HostAliases) }
explode_data.jsonl/75391
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 275 }
[ 2830, 3393, 32131, 4449, 9296, 95209, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 6692, 509, 1669, 501, 54, 509, 741, 6692, 509, 15776, 54, 69, 36473, 29840, 95209, 284, 3056, 391, 344, 16, 29840, 22720, 515, 197, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSocketWriter_Write_reconnect(t *testing.T) { listener, err := net.Listen("tcp", "127.0.0.1:0") require.NoError(t, err) sw := newSocketWriter() sw.Address = "tcp://" + listener.Addr().String() err = sw.Connect() require.NoError(t, err) sw.Conn.(*net.TCPConn).SetReadBuffer(256) lconn, err := listener.Accept() require.NoError(t, err) lconn.(*net.TCPConn).SetWriteBuffer(256) lconn.Close() sw.Conn = nil wg := sync.WaitGroup{} wg.Add(1) var lerr error go func() { lconn, lerr = listener.Accept() wg.Done() }() metrics := []telegraf.Metric{testutil.TestMetric(1, "testerr")} err = sw.Write(metrics) require.NoError(t, err) wg.Wait() assert.NoError(t, lerr) mbsout, _ := sw.Serialize(metrics[0]) buf := make([]byte, 256) n, err := lconn.Read(buf) require.NoError(t, err) assert.Equal(t, string(mbsout), string(buf[:n])) }
explode_data.jsonl/34022
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 376 }
[ 2830, 3393, 10286, 6492, 31825, 1288, 6459, 1155, 353, 8840, 836, 8, 341, 14440, 798, 11, 1848, 1669, 4179, 68334, 445, 27161, 497, 330, 16, 17, 22, 13, 15, 13, 15, 13, 16, 25, 15, 1138, 17957, 35699, 1155, 11, 1848, 692, 77295, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestProgressNoop(t *testing.T) { for key, progress := range makeProgressIndicators() { // check initial state assert.False(t, progress.IsDone(), "[%s] Progress indicator should not be finished yet", key) assert.Equal(t, 0, progress.GetCompletionPercent(), "[%s] Progress indicator should be at 0%", key) // set total number of steps progress.SetTotal(200) assert.False(t, progress.IsDone(), "[%s] Progress indicator should not be finished yet", key) // advance for i := 0; i < 200; i++ { expectedPercent := int(math.Floor(float64(i) / 2.0)) assert.Equal(t, expectedPercent, progress.GetCompletionPercent(), "[%s] Progress indicator should be at %d percent", key, expectedPercent) progress.Advance() } // check completion assert.Equal(t, 100, progress.GetCompletionPercent(), "[%s] Progress indicator should be at 100%", key) assert.False(t, progress.IsDone(), "[%s] Progress indicator should not be finished yet", key) progress.Done() assert.True(t, progress.IsDone(), "[%s] Progress indicator should be finished", key) } }
explode_data.jsonl/32260
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 343 }
[ 2830, 3393, 9496, 2753, 453, 1155, 353, 8840, 836, 8, 341, 2023, 1376, 11, 5098, 1669, 2088, 1281, 9496, 1425, 42052, 368, 341, 197, 197, 322, 1779, 2856, 1584, 198, 197, 6948, 50757, 1155, 11, 5098, 4506, 17453, 1507, 94090, 82, 60, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetChannelMembers(t *testing.T) { th := Setup().InitBasic().InitSystemAdmin() defer th.TearDown() Client := th.Client members, resp := Client.GetChannelMembers(th.BasicChannel.Id, 0, 60, "") CheckNoError(t, resp) if len(*members) != 3 { t.Fatal("should only be 3 users in channel") } members, resp = Client.GetChannelMembers(th.BasicChannel.Id, 0, 2, "") CheckNoError(t, resp) if len(*members) != 2 { t.Fatal("should only be 2 users") } members, resp = Client.GetChannelMembers(th.BasicChannel.Id, 1, 1, "") CheckNoError(t, resp) if len(*members) != 1 { t.Fatal("should only be 1 user") } members, resp = Client.GetChannelMembers(th.BasicChannel.Id, 1000, 100000, "") CheckNoError(t, resp) if len(*members) != 0 { t.Fatal("should be 0 users") } _, resp = Client.GetChannelMembers("", 0, 60, "") CheckBadRequestStatus(t, resp) _, resp = Client.GetChannelMembers("junk", 0, 60, "") CheckBadRequestStatus(t, resp) _, resp = Client.GetChannelMembers(model.NewId(), 0, 60, "") CheckForbiddenStatus(t, resp) Client.Logout() _, resp = Client.GetChannelMembers(th.BasicChannel.Id, 0, 60, "") CheckUnauthorizedStatus(t, resp) user := th.CreateUser() Client.Login(user.Email, user.Password) _, resp = Client.GetChannelMembers(th.BasicChannel.Id, 0, 60, "") CheckForbiddenStatus(t, resp) _, resp = th.SystemAdminClient.GetChannelMembers(th.BasicChannel.Id, 0, 60, "") CheckNoError(t, resp) }
explode_data.jsonl/65657
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 534 }
[ 2830, 3393, 1949, 9629, 24371, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1005, 3803, 15944, 1005, 3803, 2320, 7210, 741, 16867, 270, 836, 682, 4454, 741, 71724, 1669, 270, 11716, 271, 2109, 7062, 11, 9039, 1669, 8423, 2234, 9629...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestChangefeedSchemaChangeAllowBackfill(t *testing.T) { defer leaktest.AfterTest(t)() testFn := func(t *testing.T, db *gosql.DB, f testfeedFactory) { sqlDB := sqlutils.MakeSQLRunner(db) t.Run(`add column with default`, func(t *testing.T) { sqlDB.Exec(t, `CREATE TABLE add_column_def (a INT PRIMARY KEY)`) sqlDB.Exec(t, `INSERT INTO add_column_def VALUES (1)`) sqlDB.Exec(t, `INSERT INTO add_column_def VALUES (2)`) addColumnDef := f.Feed(t, `CREATE CHANGEFEED FOR add_column_def`) defer addColumnDef.Close(t) assertPayloads(t, addColumnDef, []string{ `add_column_def: [1]->{"a": 1}`, `add_column_def: [2]->{"a": 2}`, }) sqlDB.Exec(t, `ALTER TABLE add_column_def ADD COLUMN b STRING DEFAULT 'd'`) assertPayloads(t, addColumnDef, []string{ // TODO(dan): Track duplicates more precisely in sinklessFeed/tableFeed. // `add_column_def: [1]->{"a": 1}`, // `add_column_def: [2]->{"a": 2}`, `add_column_def: [1]->{"a": 1, "b": "d"}`, `add_column_def: [2]->{"a": 2, "b": "d"}`, }) }) t.Run(`add column computed`, func(t *testing.T) { sqlDB.Exec(t, `CREATE TABLE add_col_comp (a INT PRIMARY KEY, b INT AS (a + 5) STORED)`) sqlDB.Exec(t, `INSERT INTO add_col_comp VALUES (1)`) sqlDB.Exec(t, `INSERT INTO add_col_comp (a) VALUES (2)`) addColComp := f.Feed(t, `CREATE CHANGEFEED FOR add_col_comp`) defer addColComp.Close(t) assertPayloads(t, addColComp, []string{ `add_col_comp: [1]->{"a": 1, "b": 6}`, `add_col_comp: [2]->{"a": 2, "b": 7}`, }) sqlDB.Exec(t, `ALTER TABLE add_col_comp ADD COLUMN c INT AS (a + 10) STORED`) assertPayloads(t, addColComp, []string{ // TODO(dan): Track duplicates more precisely in sinklessFeed/tableFeed. // `add_col_comp: [1]->{"a": 1, "b": 6}`, // `add_col_comp: [2]->{"a": 2, "b": 7}`, `add_col_comp: [1]->{"a": 1, "b": 6, "c": 11}`, `add_col_comp: [2]->{"a": 2, "b": 7, "c": 12}`, }) }) t.Run(`drop column`, func(t *testing.T) { sqlDB.Exec(t, `CREATE TABLE drop_column (a INT PRIMARY KEY, b STRING)`) sqlDB.Exec(t, `INSERT INTO drop_column VALUES (1, '1')`) sqlDB.Exec(t, `INSERT INTO drop_column VALUES (2, '2')`) dropColumn := f.Feed(t, `CREATE CHANGEFEED FOR drop_column`) defer dropColumn.Close(t) assertPayloads(t, dropColumn, []string{ `drop_column: [1]->{"a": 1, "b": "1"}`, `drop_column: [2]->{"a": 2, "b": "2"}`, }) sqlDB.Exec(t, `ALTER TABLE drop_column DROP COLUMN b`) sqlDB.Exec(t, `INSERT INTO drop_column VALUES (3)`) // Dropped columns are immediately invisible. assertPayloads(t, dropColumn, []string{ `drop_column: [1]->{"a": 1}`, `drop_column: [2]->{"a": 2}`, `drop_column: [3]->{"a": 3}`, // TODO(dan): Track duplicates more precisely in sinklessFeed/tableFeed. // `drop_column: [1]->{"a": 1}`, // `drop_column: [2]->{"a": 2}`, }) }) t.Run(`multiple alters`, func(t *testing.T) { sqlDB.Exec(t, `CREATE TABLE multiple_alters (a INT PRIMARY KEY, b STRING)`) sqlDB.Exec(t, `INSERT INTO multiple_alters VALUES (1, '1')`) sqlDB.Exec(t, `INSERT INTO multiple_alters VALUES (2, '2')`) // Set up a hook to pause the changfeed on the next emit. var wg sync.WaitGroup waitSinkHook := func() error { wg.Wait() return nil } knobs := f.Server().(*server.TestServer).Cfg.TestingKnobs. DistSQL.(*distsqlrun.TestingKnobs). Changefeed.(*TestingKnobs) knobs.BeforeEmitRow = waitSinkHook multipleAlters := f.Feed(t, `CREATE CHANGEFEED FOR multiple_alters`) defer multipleAlters.Close(t) assertPayloads(t, multipleAlters, []string{ `multiple_alters: [1]->{"a": 1, "b": "1"}`, `multiple_alters: [2]->{"a": 2, "b": "2"}`, }) // Wait on the next emit, queue up three ALTERs. The next poll process // will see all of them at once. wg.Add(1) waitForSchemaChange(t, sqlDB, `ALTER TABLE multiple_alters DROP COLUMN b`) waitForSchemaChange(t, sqlDB, `ALTER TABLE multiple_alters ADD COLUMN c STRING DEFAULT 'cee'`) waitForSchemaChange(t, sqlDB, `ALTER TABLE multiple_alters ADD COLUMN d STRING DEFAULT 'dee'`) wg.Done() assertPayloads(t, multipleAlters, []string{ // Backfill no-ops for DROP. Dropped columns are immediately invisible. `multiple_alters: [1]->{"a": 1}`, `multiple_alters: [2]->{"a": 2}`, // Scan output for DROP // TODO(dan): Track duplicates more precisely in sinklessFeed/tableFeed. // `multiple_alters: [1]->{"a": 1}`, // `multiple_alters: [2]->{"a": 2}`, // Backfill no-ops for column C // TODO(dan): Track duplicates more precisely in sinklessFeed/tableFeed. // `multiple_alters: [1]->{"a": 1}`, // `multiple_alters: [2]->{"a": 2}`, // Scan output for column C `multiple_alters: [1]->{"a": 1, "c": "cee"}`, `multiple_alters: [2]->{"a": 2, "c": "cee"}`, // Backfill no-ops for column D (C schema change is complete) // TODO(dan): Track duplicates more precisely in sinklessFeed/tableFeed. // `multiple_alters: [1]->{"a": 1, "c": "cee"}`, // `multiple_alters: [2]->{"a": 2, "c": "cee"}`, // Scan output for column C `multiple_alters: [1]->{"a": 1, "c": "cee", "d": "dee"}`, `multiple_alters: [2]->{"a": 2, "c": "cee", "d": "dee"}`, }) }) } t.Run(`sinkless`, sinklessTest(testFn)) t.Run(`enterprise`, enterpriseTest(testFn)) t.Run(`rangefeed`, rangefeedTest(sinklessTest, testFn)) }
explode_data.jsonl/21280
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2448 }
[ 2830, 3393, 1143, 524, 823, 12051, 8632, 4072, 18605, 3707, 7559, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 2822, 18185, 24911, 1669, 2915, 1155, 353, 8840, 836, 11, 2927, 353, 34073, 1470, 22537, 11, 282, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPutGet_Integration(t *testing.T) { t.Parallel() sethost() repo := New() for _, tc := range []struct { name string id string body *models.Thing want *models.Thing }{ { name: "happy path", id: "1", body: &models.Thing{ID: "1", Name: "test"}, want: &models.Thing{ID: "1", Name: "test"}, }, } { tc := tc t.Run(tc.name, func(t *testing.T) { t.Parallel() if err := repo.Put(context.Background(), tc.body); err != nil { t.Fatal(err) } thing, err := repo.Get(context.Background(), tc.id) if err != nil { t.Fatal(err) } if tc.want != nil { if !reflect.DeepEqual(thing, tc.want) { t.Fatal(pretty.Compare(thing, tc.want)) } } }) } }
explode_data.jsonl/5542
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 348 }
[ 2830, 3393, 19103, 1949, 32054, 17376, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 8196, 3790, 741, 17200, 5368, 1669, 1532, 741, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 11609, 914, 198, 197, 15710, 256, 914, 198,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestScreenshot(t *testing.T) { ctx := context.Background() d, err := CreateSession(ctx, wdAddress(), 3, nil) if err != nil { t.Fatal(err) } defer d.Quit(ctx) img, err := d.Screenshot(ctx) if err != nil { t.Fatal(err) } if img == nil { t.Fatal("got nil, expected an image.Image") } }
explode_data.jsonl/68736
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 130 }
[ 2830, 3393, 62522, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 2822, 2698, 11, 1848, 1669, 4230, 5283, 7502, 11, 45404, 4286, 1507, 220, 18, 11, 2092, 340, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestEntry_FocusWithPopUp(t *testing.T) { entry, window := setupImageTest(false) defer teardownImageTest(window) c := window.Canvas() test.TapSecondaryAt(entry, fyne.NewPos(1, 1)) test.AssertImageMatches(t, "entry/focus_with_popup_initial.png", c.Capture()) test.TapCanvas(c, fyne.NewPos(20, 20)) test.AssertImageMatches(t, "entry/focus_with_popup_entry_selected.png", c.Capture()) test.TapSecondaryAt(entry, fyne.NewPos(1, 1)) test.AssertImageMatches(t, "entry/focus_with_popup_initial.png", c.Capture()) test.TapCanvas(c, fyne.NewPos(5, 5)) test.AssertImageMatches(t, "entry/focus_with_popup_dismissed.png", c.Capture()) }
explode_data.jsonl/57285
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 258 }
[ 2830, 3393, 5874, 1400, 3384, 2354, 11598, 2324, 1155, 353, 8840, 836, 8, 341, 48344, 11, 3241, 1669, 6505, 1906, 2271, 3576, 340, 16867, 49304, 1906, 2271, 15906, 340, 1444, 1669, 3241, 54121, 2822, 18185, 836, 391, 48963, 1655, 18238, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateTargetContainer(t *testing.T) { testSandboxID := "test-sandbox-uid" // The existing container that will be targeted. testTargetContainerID := "test-target-container" testTargetContainerPID := uint32(4567) // A container that has finished running and cannot be targeted. testStoppedContainerID := "stopped-target-container" testStoppedContainerPID := uint32(6789) // A container from another pod. testOtherContainerSandboxID := "other-sandbox-uid" testOtherContainerID := "other-target-container" testOtherContainerPID := uint32(7890) // Container create/start/stop times. createdAt := time.Now().Add(-15 * time.Second).UnixNano() startedAt := time.Now().Add(-10 * time.Second).UnixNano() finishedAt := time.Now().Add(-5 * time.Second).UnixNano() c := newTestCRIService() // Create a target container. err := addContainer(c, testTargetContainerID, testSandboxID, testTargetContainerPID, createdAt, startedAt, 0) require.NoError(t, err, "error creating test target container") // Create a stopped container. err = addContainer(c, testStoppedContainerID, testSandboxID, testStoppedContainerPID, createdAt, startedAt, finishedAt) require.NoError(t, err, "error creating test stopped container") // Create a container in another pod. err = addContainer(c, testOtherContainerID, testOtherContainerSandboxID, testOtherContainerPID, createdAt, startedAt, 0) require.NoError(t, err, "error creating test container in other pod") for desc, test := range map[string]struct { targetContainerID string expectError bool }{ "target container in pod": { targetContainerID: testTargetContainerID, expectError: false, }, "target stopped container in pod": { targetContainerID: testStoppedContainerID, expectError: true, }, "target container does not exist": { targetContainerID: "no-container-with-this-id", expectError: true, }, "target container in other pod": { targetContainerID: testOtherContainerID, expectError: true, }, } { t.Run(desc, func(t *testing.T) { targetContainer, err := c.validateTargetContainer(testSandboxID, test.targetContainerID) if test.expectError { require.Error(t, err, "target should have been invalid but no error") return } require.NoErrorf(t, err, "target should have been valid but got error") assert.Equal(t, test.targetContainerID, targetContainer.ID, "returned target container does not have expected ID") }) } }
explode_data.jsonl/8833
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 818 }
[ 2830, 3393, 17926, 6397, 4502, 1155, 353, 8840, 836, 8, 341, 18185, 50, 31536, 915, 1669, 330, 1944, 1331, 31536, 12, 2423, 1837, 197, 322, 576, 6350, 5476, 429, 686, 387, 17112, 624, 18185, 6397, 4502, 915, 1669, 330, 1944, 18489, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDiscoveryMechanismTypeMarshalJSON(t *testing.T) { tests := []struct { name string typ DiscoveryMechanismType want string }{ { name: "eds", typ: DiscoveryMechanismTypeEDS, want: `"EDS"`, }, { name: "dns", typ: DiscoveryMechanismTypeLogicalDNS, want: `"LOGICAL_DNS"`, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got, err := json.Marshal(tt.typ); err != nil || string(got) != tt.want { t.Fatalf("DiscoveryMechanismTypeEDS.MarshalJSON() = (%v, %v), want (%s, nil)", string(got), err, tt.want) } }) } }
explode_data.jsonl/52606
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 265 }
[ 2830, 3393, 67400, 57067, 67813, 929, 55438, 5370, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 25314, 220, 38093, 57067, 67813, 929, 198, 197, 50780, 914, 198, 197, 59403, 197, 197, 515, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDefaultConfig_env(t *testing.T) { t.Parallel() url := "http://1.2.3.4:5678" auth := []string{"nomaduser", "12345"} region := "test" namespace := "dev" token := "foobar" os.Setenv("NOMAD_ADDR", url) defer os.Setenv("NOMAD_ADDR", "") os.Setenv("NOMAD_REGION", region) defer os.Setenv("NOMAD_REGION", "") os.Setenv("NOMAD_NAMESPACE", namespace) defer os.Setenv("NOMAD_NAMESPACE", "") os.Setenv("NOMAD_HTTP_AUTH", strings.Join(auth, ":")) defer os.Setenv("NOMAD_HTTP_AUTH", "") os.Setenv("NOMAD_TOKEN", token) defer os.Setenv("NOMAD_TOKEN", "") config := DefaultConfig() if config.Address != url { t.Errorf("expected %q to be %q", config.Address, url) } if config.Region != region { t.Errorf("expected %q to be %q", config.Region, region) } if config.Namespace != namespace { t.Errorf("expected %q to be %q", config.Namespace, namespace) } if config.HttpAuth.Username != auth[0] { t.Errorf("expected %q to be %q", config.HttpAuth.Username, auth[0]) } if config.HttpAuth.Password != auth[1] { t.Errorf("expected %q to be %q", config.HttpAuth.Password, auth[1]) } if config.SecretID != token { t.Errorf("Expected %q to be %q", config.SecretID, token) } }
explode_data.jsonl/65017
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 504 }
[ 2830, 3393, 3675, 2648, 15879, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 19320, 1669, 330, 1254, 1110, 16, 13, 17, 13, 18, 13, 19, 25, 20, 21, 22, 23, 698, 78011, 1669, 3056, 917, 4913, 16687, 329, 872, 497, 330, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestWriterReadFrom(t *testing.T) { ws := []func(io.Writer) io.Writer{ func(w io.Writer) io.Writer { return onlyWriter{w} }, func(w io.Writer) io.Writer { return w }, } rs := []func(io.Reader) io.Reader{ iotest.DataErrReader, func(r io.Reader) io.Reader { return r }, } for ri, rfunc := range rs { for wi, wfunc := range ws { input := createTestInput(8192) b := new(bytes.Buffer) w := NewWriter(wfunc(b)) r := rfunc(bytes.NewReader(input)) if n, err := w.ReadFrom(r); err != nil || n != int64(len(input)) { t.Errorf("ws[%d],rs[%d]: w.ReadFrom(r) = %d, %v, want %d, nil", wi, ri, n, err, len(input)) continue } if err := w.Flush(); err != nil { t.Errorf("Flush returned %v", err) continue } if got, want := b.String(), string(input); got != want { t.Errorf("ws[%d], rs[%d]:\ngot %q\nwant %q\n", wi, ri, got, want) } } } }
explode_data.jsonl/22876
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 412 }
[ 2830, 3393, 6492, 4418, 3830, 1155, 353, 8840, 836, 8, 341, 6692, 82, 1669, 3056, 2830, 37258, 47838, 8, 6399, 47838, 515, 197, 29244, 3622, 6399, 47838, 8, 6399, 47838, 314, 470, 1172, 6492, 90, 86, 92, 1153, 197, 29244, 3622, 6399, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBuilder_BuildTCPFilter(t *testing.T) { service := newService("foo.a.svc.cluster.local", nil, t) testCases := []struct { name string policies []*model.Config wantRules bool wantRuleWithPolicies bool wantShadowRules bool }{ { name: "HTTP rule", policies: []*model.Config{ policy.SimpleClusterRbacConfig(), policy.SimpleRole("role-1", "a", "foo"), policy.SimpleBinding("binding-1", "a", "role-1"), }, wantRules: true, wantRuleWithPolicies: false, }, { name: "normal rule", policies: []*model.Config{ policy.SimpleClusterRbacConfig(), }, wantRules: true, }, { name: "normal shadow rule", policies: []*model.Config{ simpleGlobalPermissiveMode(), }, wantShadowRules: true, }, } for _, tc := range testCases { p := policy.NewAuthzPolicies(tc.policies, t) b := NewBuilder(trustdomain.NewTrustDomainBundle("", nil), service, nil, "a", p) t.Run(tc.name, func(t *testing.T) { filters := b.BuildTCPFilters() if len(filters) != 1 { t.Fatalf("want 1 filter but got %d", len(filters)) } got := filters[0] if got.Name != authzModel.RBACTCPFilterName { t.Errorf("got filter name %q but want %q", got.Name, authzModel.RBACTCPFilterName) } rbacConfig := &envoyRbacTcpPb.RBAC{} if err := ptypes.UnmarshalAny(got.GetTypedConfig(), rbacConfig); err != nil { t.Fatalf("failed to unmarshal config: %s", err) } if rbacConfig.StatPrefix != authzModel.RBACTCPFilterStatPrefix { t.Errorf("got filter stat prefix %q but want %q", rbacConfig.StatPrefix, authzModel.RBACTCPFilterStatPrefix) } if len(rbacConfig.GetRules().GetPolicies()) > 0 != tc.wantRuleWithPolicies { t.Errorf("got rules with policies %v but want %v", len(rbacConfig.GetRules().GetPolicies()) > 0, tc.wantRuleWithPolicies) } if (rbacConfig.GetRules() != nil) != tc.wantRules { t.Errorf("got rules %v but want %v", rbacConfig.GetRules() != nil, tc.wantRules) } if (rbacConfig.GetShadowRules() != nil) != tc.wantShadowRules { t.Errorf("got shadow rules %v but want %v", rbacConfig.GetShadowRules() != nil, tc.wantShadowRules) } }) } }
explode_data.jsonl/21238
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 993 }
[ 2830, 3393, 3297, 96686, 49896, 5632, 1155, 353, 8840, 836, 8, 341, 52934, 1669, 501, 1860, 445, 7975, 5849, 514, 7362, 40501, 11033, 497, 2092, 11, 259, 692, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 338, 914, 198, 197, 3223, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestResolutionErrorUnwrap(t *testing.T) { originalError := &TestError{} resolutionError := NewError("", originalError) if !errors.Is(resolutionError, &TestError{}) { t.Errorf("resolution error expected to unwrap successfully") } }
explode_data.jsonl/8324
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 75 }
[ 2830, 3393, 38106, 1454, 1806, 10097, 1155, 353, 8840, 836, 8, 341, 197, 9889, 1454, 1669, 609, 2271, 1454, 16094, 10202, 3214, 1454, 1669, 1532, 1454, 19814, 4024, 1454, 340, 743, 753, 7650, 4506, 4590, 3214, 1454, 11, 609, 2271, 1454,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestLoggingMetricsExporterNoErrors(t *testing.T) { const exporterName = "test_metrics_exporter" lme, err := NewMetricsExporter(exporterName, zap.NewNop()) if err != nil { t.Fatalf("Wanted nil got %v", err) } md := consumerdata.MetricsData{ Metrics: make([]*metricspb.Metric, 7), } if err := lme.ConsumeMetricsData(context.Background(), md); err != nil { t.Fatalf("Wanted nil got %v", err) } if lme.Name() != exporterName { t.Errorf("Wanted %q got %q", exporterName, lme.Name()) } }
explode_data.jsonl/38413
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 206 }
[ 2830, 3393, 34575, 27328, 88025, 2753, 13877, 1155, 353, 8840, 836, 8, 341, 4777, 57378, 675, 284, 330, 1944, 37686, 27114, 261, 698, 8810, 2660, 11, 1848, 1669, 1532, 27328, 88025, 7, 1533, 261, 675, 11, 32978, 7121, 45, 453, 2398, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAbsCollection_Last(t *testing.T) { intColl := NewIntCollection([]int{1, 2, 3, 4, 3, 2}) last, err := intColl.Last().ToInt() if err != nil { t.Fatal("last get error") } if last != 2 { t.Fatal("last 获取错误") } last, err = intColl.Last(func(item interface{}, key int) bool { i := item.(int) return i > 2 }).ToInt() if err != nil { t.Fatal("last get error") } if last != 3 { t.Fatal("last 获取错误") } }
explode_data.jsonl/66435
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 203 }
[ 2830, 3393, 27778, 6482, 84672, 1155, 353, 8840, 836, 8, 341, 2084, 15265, 1669, 1532, 1072, 6482, 10556, 396, 90, 16, 11, 220, 17, 11, 220, 18, 11, 220, 19, 11, 220, 18, 11, 220, 17, 3518, 33096, 11, 1848, 1669, 526, 15265, 24682...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSubmitAndUpdateMeasurementWithClosedReport(t *testing.T) { if testing.Short() { t.Skip("skip test in short mode") } sess := newSessionForTesting(t) defer sess.Close() builder, err := sess.NewExperimentBuilder("example") if err != nil { t.Fatal(err) } exp := builder.NewExperiment() m := new(model.Measurement) err = exp.SubmitAndUpdateMeasurement(m) if err == nil { t.Fatal("expected an error here") } }
explode_data.jsonl/26324
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 160 }
[ 2830, 3393, 8890, 56365, 76548, 2354, 26884, 10361, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 445, 20599, 1273, 304, 2805, 3856, 1138, 197, 532, 1903, 433, 1669, 501, 5283, 2461, 16451, 1155, 340, 16867...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestProtocol_HandleUnstake(t *testing.T) { require := require.New(t) ctrl := gomock.NewController(t) defer ctrl.Finish() sm, p, candidate, candidate2, cc := initAll(t, ctrl) initCreateStake(t, sm, identityset.Address(2), 100, big.NewInt(unit.Qev), 10000, 1, 1, time.Now(), 10000, p, candidate2, "100000000000000000000", false) callerAddr := identityset.Address(1) tests := []struct { // creat stake fields caller address.Address amount string autoStake bool afterUnstake string initBalance int64 selfstaking bool // action fields index uint64 gasPrice *big.Int gasLimit uint64 nonce uint64 // block context blkHeight uint64 blkTimestamp time.Time ctxTimestamp time.Time blkGasLimit uint64 // clear flag for inMemCandidates clear bool // need new p newProtocol bool err error // expected result status iotextypes.ReceiptStatus }{ // fetchCaller ErrNotEnoughBalance { callerAddr, "100990000000000000000", false, "", 101, false, 0, big.NewInt(unit.Qev), 10000, 1, 1, time.Now(), time.Now(), 10000, false, true, nil, iotextypes.ReceiptStatus_ErrNotEnoughBalance, }, // fetchBucket, bucket.Owner is not equal to actionCtx.Caller { identityset.Address(12), "100000000000000000000", false, "", 101, false, 0, big.NewInt(unit.Qev), 10000, 1, 1, time.Now(), time.Now(), 10000, false, false, nil, iotextypes.ReceiptStatus_ErrUnauthorizedOperator, }, // fetchBucket,ReceiptStatus_ErrInvalidBucketType cannot happen,because allowSelfStaking is true // fetchBucket and updateBucket call getbucket, ReceiptStatus_ErrInvalidBucketIndex { identityset.Address(33), "100000000000000000000", false, "", 101, false, 1, big.NewInt(unit.Qev), 10000, 1, 1, time.Now(), time.Now(), 10000, false, true, nil, iotextypes.ReceiptStatus_ErrInvalidBucketIndex, }, // inMemCandidates.GetByOwner,ErrInvalidOwner { callerAddr, "100000000000000000000", false, "", 101, false, 0, big.NewInt(unit.Qev), 10000, 1, 1, time.Now(), time.Now(), 10000, true, true, nil, iotextypes.ReceiptStatus_ErrCandidateNotExist, }, // unstake before maturity { callerAddr, "100000000000000000000", false, "", 101, false, 0, big.NewInt(unit.Qev), 10000, 1, 1, time.Now(), time.Now(), 10000, false, true, nil, iotextypes.ReceiptStatus_ErrUnstakeBeforeMaturity, }, // unstake with autoStake bucket { callerAddr, "100000000000000000000", true, "0", 101, false, 0, big.NewInt(unit.Qev), 10000, 1, 1, time.Now(), time.Now().Add(time.Duration(1) * 24 * time.Hour), 10000, false, true, nil, iotextypes.ReceiptStatus_ErrInvalidBucketType, }, // Upsert error cannot happen,because collision cannot happen // success { callerAddr, "100000000000000000000", false, "0", 101, false, 0, big.NewInt(unit.Qev), 10000, 1, 1, time.Now(), time.Now().Add(time.Duration(1) * 24 * time.Hour), 10000, false, true, nil, iotextypes.ReceiptStatus_Success, }, } for _, test := range tests { if test.newProtocol { sm, p, candidate, _, cc = initAll(t, ctrl) } else { candidate = candidate2 } ctx, createCost := initCreateStake(t, sm, test.caller, test.initBalance, big.NewInt(unit.Qev), test.gasLimit, test.nonce, test.blkHeight, test.blkTimestamp, test.blkGasLimit, p, candidate, test.amount, test.autoStake) act, err := action.NewUnstake(test.nonce, test.index, nil, test.gasLimit, test.gasPrice) require.NoError(err) if test.blkTimestamp != test.ctxTimestamp { blkCtx := protocol.MustGetBlockCtx(ctx) blkCtx.BlockTimeStamp = test.ctxTimestamp ctx = protocol.WithBlockCtx(ctx, blkCtx) } var r *action.Receipt if test.clear { csm, err := NewCandidateStateManager(sm, cc) require.NoError(err) center, ok := cc.(*candCenter) require.True(ok) center.deleteForTestOnly(test.caller) require.False(csm.ContainsOwner(test.caller)) r, err = p.handle(ctx, act, csm) require.Equal(test.err, errors.Cause(err)) } else { r, err = p.Handle(ctx, act, sm) require.Equal(test.err, errors.Cause(err)) } if r != nil { require.Equal(uint64(test.status), r.Status) } else { require.Equal(test.status, iotextypes.ReceiptStatus_Success) } if test.err == nil && test.status == iotextypes.ReceiptStatus_Success { // test bucket index and bucket bucketIndices, err := getCandBucketIndices(sm, candidate.Owner) require.NoError(err) require.Equal(1, len(*bucketIndices)) bucketIndices, err = getVoterBucketIndices(sm, candidate.Owner) require.NoError(err) require.Equal(1, len(*bucketIndices)) indices := *bucketIndices bucket, err := getBucket(sm, indices[0]) require.NoError(err) require.Equal(candidate.Owner.String(), bucket.Candidate.String()) require.Equal(test.caller.String(), bucket.Owner.String()) require.Equal(test.amount, bucket.StakedAmount.String()) // test candidate candidate, err = getCandidate(sm, candidate.Owner) require.NoError(err) require.Equal(test.afterUnstake, candidate.Votes.String()) csm, err := NewCandidateStateManager(sm, cc) require.NoError(err) candidate = csm.GetByOwner(candidate.Owner) require.NotNil(candidate) require.Equal(test.afterUnstake, candidate.Votes.String()) // test staker's account caller, err := accountutil.LoadAccount(sm, hash.BytesToHash160(test.caller.Bytes())) require.NoError(err) actCost, err := act.Cost() require.NoError(err) require.Equal(test.nonce, caller.Nonce) total := big.NewInt(0) require.Equal(unit.ConvertIotxToRau(test.initBalance), total.Add(total, caller.Balance).Add(total, actCost).Add(total, createCost)) } } }
explode_data.jsonl/64532
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2677 }
[ 2830, 3393, 20689, 42714, 1806, 267, 726, 1155, 353, 8840, 836, 8, 341, 17957, 1669, 1373, 7121, 1155, 340, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 72023, 11, 281, 11, 9144, 11, 9144, 17, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestEnvironmentVariableSliceArgumentString(t *testing.T) { var args struct { Foo []string `arg:"env"` } setenv(t, "FOO", `bar,"baz, qux"`) MustParse(&args) assert.Equal(t, []string{"bar", "baz, qux"}, args.Foo) }
explode_data.jsonl/13040
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 97 }
[ 2830, 3393, 12723, 7827, 33236, 9171, 703, 1155, 353, 8840, 836, 8, 341, 2405, 2827, 2036, 341, 197, 12727, 2624, 3056, 917, 1565, 858, 2974, 3160, 8805, 197, 532, 8196, 3160, 1155, 11, 330, 3788, 46, 497, 1565, 2257, 1335, 42573, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValid(t *testing.T) { for _, tcase := range validSQL { t.Run(tcase.input, func(t *testing.T) { if tcase.output == "" { tcase.output = tcase.input } tree, err := Parse(tcase.input) require.NoError(t, err, tcase.input) out := String(tree) if tcase.output != out { t.Errorf("Parsing failed. \nExpected/Got:\n%s\n%s", tcase.output, out) } // Some statements currently only have 5.7 specifications. // For mysql 8.0 syntax, the query is not entirely parsed. // Add more structs as we go on adding full parsing support for DDL constructs for 5.7 syntax. switch x := tree.(type) { case DBDDLStatement: assert.Equal(t, !tcase.partialDDL, x.IsFullyParsed()) case DDLStatement: assert.Equal(t, !tcase.partialDDL, x.IsFullyParsed()) } // This test just exercises the tree walking functionality. // There's no way automated way to verify that a node calls // all its children. But we can examine code coverage and // ensure that all walkSubtree functions were called. Walk(func(node SQLNode) (bool, error) { return true, nil }, tree) }) } }
explode_data.jsonl/27178
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 433 }
[ 2830, 3393, 4088, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 259, 5638, 1669, 2088, 2697, 6688, 341, 197, 3244, 16708, 1155, 5638, 10046, 11, 2915, 1155, 353, 8840, 836, 8, 341, 298, 743, 259, 5638, 13413, 621, 1591, 341, 571, 3244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestShutdown(t *testing.T) { c, err := NewContainer(ContainerName) if err != nil { t.Errorf(err.Error()) } if err := c.Shutdown(30 * time.Second); err != nil { t.Errorf(err.Error()) } c.Wait(STOPPED, 30*time.Second) if c.Running() { t.Errorf("Shutting down the container failed...") } }
explode_data.jsonl/2810
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 127 }
[ 2830, 3393, 62004, 1155, 353, 8840, 836, 8, 341, 1444, 11, 1848, 1669, 1532, 4502, 75145, 675, 340, 743, 1848, 961, 2092, 341, 197, 3244, 13080, 3964, 6141, 2398, 197, 630, 743, 1848, 1669, 272, 10849, 18452, 7, 18, 15, 353, 882, 32...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestStdErrorHander(t *testing.T) { const src = "@\n" + // illegal character, cause an error "@ @\n" + // two errors on the same line "//line File2:20\n" + "@\n" + // different file, but same line "//line File2:1\n" + "@ @\n" + // same file, decreasing line number "//line File1:1\n" + "@ @ @" // original file, line 1 again var list ErrorList eh := func(pos token.Position, msg string) { list.Add(pos, msg) } var s Scanner s.Init(fset.AddFile("File1", fset.Base(), len(src)), []byte(src), eh, dontInsertSemis) for { if _, tok, _ := s.Scan(); tok == token.EOF { break } } if len(list) != s.ErrorCount { t.Errorf("found %d errors, expected %d", len(list), s.ErrorCount) } if len(list) != 9 { t.Errorf("found %d raw errors, expected 9", len(list)) PrintError(os.Stderr, list) } list.Sort() if len(list) != 9 { t.Errorf("found %d sorted errors, expected 9", len(list)) PrintError(os.Stderr, list) } list.RemoveMultiples() if len(list) != 4 { t.Errorf("found %d one-per-line errors, expected 4", len(list)) PrintError(os.Stderr, list) } }
explode_data.jsonl/1842
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 456 }
[ 2830, 3393, 22748, 1454, 2314, 261, 1155, 353, 8840, 836, 8, 341, 4777, 2286, 284, 8428, 59, 77, 1, 488, 442, 11816, 3668, 11, 5240, 458, 1465, 198, 197, 197, 96270, 569, 59, 77, 1, 488, 442, 1378, 5975, 389, 279, 1852, 1555, 198,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConnectionMissingPort(t *testing.T) { irccon := IRC("go-eventirc", "go-eventirc") err := irccon.Connect("chat.freenode.net:") if err == nil { t.Fatal("missing port not detected") } }
explode_data.jsonl/63344
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 76 }
[ 2830, 3393, 4526, 25080, 7084, 1155, 353, 8840, 836, 8, 341, 197, 2437, 443, 1669, 59328, 445, 3346, 39687, 2437, 497, 330, 3346, 39687, 2437, 1138, 9859, 1669, 79923, 443, 43851, 445, 9686, 833, 4442, 534, 5071, 34403, 743, 1848, 621, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestPeerIDRandomPeerID(t *testing.T) { p1 := RandomPeerID() p2 := RandomPeerID() if bytes.Compare(p1, p2) == 0 { panic("first time") } var err error _, err = PeerIDFromBytes(p1) _, err = PeerIDFromBytes(p2) if err != nil { t.Fatal(err) } }
explode_data.jsonl/54175
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 119 }
[ 2830, 3393, 30888, 915, 13999, 30888, 915, 1155, 353, 8840, 836, 8, 341, 3223, 16, 1669, 10612, 30888, 915, 741, 3223, 17, 1669, 10612, 30888, 915, 2822, 743, 5820, 32377, 1295, 16, 11, 281, 17, 8, 621, 220, 15, 341, 197, 30764, 445...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBrokerGetCondition(t *testing.T) { tests := []struct { name string bs *BrokerStatus condQuery apis.ConditionType want *apis.Condition }{{ name: "single condition", bs: &BrokerStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{ brokerConditionReady, }, }, }, condQuery: apis.ConditionReady, want: &brokerConditionReady, }, { name: "multiple conditions", bs: &BrokerStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{ brokerConditionIngress, brokerConditionTriggerChannel, brokerConditionFilter, }, }, }, condQuery: BrokerConditionFilter, want: &brokerConditionFilter, }, { name: "multiple conditions, condition false", bs: &BrokerStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{ brokerConditionTriggerChannel, brokerConditionFilter, brokerConditionAddressable, }, }, }, condQuery: BrokerConditionAddressable, want: &brokerConditionAddressable, }, { name: "unknown condition", bs: &BrokerStatus{ Status: duckv1.Status{ Conditions: []apis.Condition{ brokerConditionAddressable, brokerConditionReady, }, }, }, condQuery: apis.ConditionType("foo"), want: nil, }} for _, test := range tests { t.Run(test.name, func(t *testing.T) { got := test.bs.GetCondition(test.condQuery) if diff := cmp.Diff(test.want, got); diff != "" { t.Error("unexpected condition (-want, +got) =", diff) } }) } }
explode_data.jsonl/46024
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 652 }
[ 2830, 3393, 65545, 1949, 10547, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 414, 914, 198, 197, 93801, 286, 353, 65545, 2522, 198, 197, 197, 1297, 2859, 97723, 75134, 929, 198, 197, 50780, 414, 353, 13725, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPodSpecForCSIWithCustomPortworxImage(t *testing.T) { fakeClient := fakek8sclient.NewSimpleClientset() coreops.SetInstance(coreops.New(fakeClient)) fakeClient.Discovery().(*fakediscovery.FakeDiscovery).FakedServerVersion = &version.Info{ GitVersion: "v1.13.0", } // PX_IMAGE env var gets precedence over spec.image. // We verify that by checking that CSI registrar is using old driver name. cluster := &corev1.StorageCluster{ ObjectMeta: metav1.ObjectMeta{ Name: "px-cluster", Namespace: "kube-system", }, Spec: corev1.StorageClusterSpec{ Image: "portworx/oci-monitor:2.2", FeatureGates: map[string]string{ string(pxutil.FeatureCSI): "true", }, CommonConfig: corev1.CommonConfig{ Env: []v1.EnvVar{ { Name: "PX_IMAGE", Value: "portworx/oci-monitor:2.1.1-rc1", }, }, }, }, Status: corev1.StorageClusterStatus{ DesiredImages: &corev1.ComponentImages{ CSINodeDriverRegistrar: "quay.io/k8scsi/csi-node-driver-registrar:v1.1.0", }, }, } nodeName := "testNode" driver := portworx{} actual, err := driver.GetStoragePodSpec(cluster, nodeName) assert.NoError(t, err, "Unexpected error on GetStoragePodSpec") assert.Equal(t, actual.Containers[1].Args[2], "--kubelet-registration-path=/var/lib/kubelet/csi-plugins/com.openstorage.pxd/csi.sock", ) // If version cannot be found from the Portworx image tag, then check the annotation // for version. This is useful in testing when your image tag does not have version. cluster.Spec.Image = "portworx/oci-monitor:custom_oci_tag" cluster.Spec.Env[0].Value = "portworx/oci-monitor:custom_px_tag" cluster.Annotations = map[string]string{ pxutil.AnnotationPXVersion: "2.1", } actual, err = driver.GetStoragePodSpec(cluster, nodeName) assert.NoError(t, err, "Unexpected error on GetStoragePodSpec") assert.Equal(t, actual.Containers[1].Args[2], "--kubelet-registration-path=/var/lib/kubelet/csi-plugins/com.openstorage.pxd/csi.sock", ) // If valid version is not found from the image or the annotation, then assume latest // Portworx version. Verify this by checking the new CSI driver name in registrar. cluster.Annotations = map[string]string{ pxutil.AnnotationPXVersion: "portworx/oci-monitor:invalid", } actual, err = driver.GetStoragePodSpec(cluster, nodeName) assert.NoError(t, err, "Unexpected error on GetStoragePodSpec") assert.Equal(t, actual.Containers[1].Args[2], "--kubelet-registration-path=/var/lib/kubelet/csi-plugins/pxd.portworx.com/csi.sock", ) }
explode_data.jsonl/55462
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1011 }
[ 2830, 3393, 23527, 8327, 2461, 48407, 2354, 10268, 7084, 49710, 87, 1906, 1155, 353, 8840, 836, 8, 341, 1166, 726, 2959, 1669, 12418, 74, 23, 82, 2972, 7121, 16374, 2959, 746, 741, 71882, 3721, 4202, 2523, 47867, 3721, 7121, 74138, 2959...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateRecord(t *testing.T) { setupDataFileForTest() router := setupAPI() var jsonStr = []byte(`{"title":"new record"}`) w := httptest.NewRecorder() req, _ := http.NewRequest("POST", "/notes", bytes.NewBuffer(jsonStr)) req.Header.Set("Content-Type", "application/json") router.ServeHTTP(w, req) if w.Code != 201 { t.Errorf("Response was incorrect, got: %d, want: 201.", w.Code) } }
explode_data.jsonl/81912
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 151 }
[ 2830, 3393, 4021, 6471, 1155, 353, 8840, 836, 8, 341, 84571, 1043, 1703, 2461, 2271, 741, 67009, 1669, 6505, 7082, 2822, 2405, 2951, 2580, 284, 3056, 3782, 5809, 4913, 2102, 3252, 931, 3255, 1, 5541, 692, 6692, 1669, 54320, 70334, 7121,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPrincipalRoleBadRole(t *testing.T) { msp1, err := setup("testdata/idemix/MSP1OU1", "MSP1OU1") assert.NoError(t, err) id1, err := getDefaultSigner(msp1) assert.NoError(t, err) // Make principal for nonexisting role 1234 principalBytes, err := proto.Marshal(&msp.MSPRole{Role: 1234, MspIdentifier: id1.GetMSPIdentifier()}) assert.NoError(t, err) principal := &msp.MSPPrincipal{ PrincipalClassification: msp.MSPPrincipal_ROLE, Principal: principalBytes} err = id1.SatisfiesPrincipal(principal) assert.Error(t, err, "Role MSP principal should have failed for a bad Role") assert.Contains(t, err.Error(), "invalid MSP role type") }
explode_data.jsonl/46051
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 265 }
[ 2830, 3393, 31771, 9030, 17082, 9030, 1155, 353, 8840, 836, 8, 341, 47691, 79, 16, 11, 1848, 1669, 6505, 445, 92425, 38146, 336, 941, 10270, 4592, 16, 11922, 16, 497, 330, 44, 4592, 16, 11922, 16, 1138, 6948, 35699, 1155, 11, 1848, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPodSpecPatch(t *testing.T) { wf := wfv1.MustUnmarshalWorkflow(helloWorldWfWithPatch) ctx := context.Background() woc := newWoc(*wf) mainCtr := woc.execWf.Spec.Templates[0].Container pod, _ := woc.createWorkflowPod(ctx, wf.Name, []apiv1.Container{*mainCtr}, &wf.Spec.Templates[0], &createWorkflowPodOpts{}) assert.Equal(t, "0.800", pod.Spec.Containers[1].Resources.Limits.Cpu().AsDec().String()) wf = wfv1.MustUnmarshalWorkflow(helloWorldWfWithWFPatch) woc = newWoc(*wf) mainCtr = woc.execWf.Spec.Templates[0].Container pod, _ = woc.createWorkflowPod(ctx, wf.Name, []apiv1.Container{*mainCtr}, &wf.Spec.Templates[0], &createWorkflowPodOpts{}) assert.Equal(t, "0.800", pod.Spec.Containers[1].Resources.Limits.Cpu().AsDec().String()) wf = wfv1.MustUnmarshalWorkflow(helloWorldWfWithWFYAMLPatch) woc = newWoc(*wf) mainCtr = woc.execWf.Spec.Templates[0].Container pod, _ = woc.createWorkflowPod(ctx, wf.Name, []apiv1.Container{*mainCtr}, &wf.Spec.Templates[0], &createWorkflowPodOpts{}) assert.Equal(t, "0.800", pod.Spec.Containers[1].Resources.Limits.Cpu().AsDec().String()) assert.Equal(t, "104857600", pod.Spec.Containers[1].Resources.Limits.Memory().AsDec().String()) wf = wfv1.MustUnmarshalWorkflow(helloWorldWfWithInvalidPatchFormat) woc = newWoc(*wf) mainCtr = woc.execWf.Spec.Templates[0].Container _, err := woc.createWorkflowPod(ctx, wf.Name, []apiv1.Container{*mainCtr}, &wf.Spec.Templates[0], &createWorkflowPodOpts{}) assert.EqualError(t, err, "Failed to merge the workflow PodSpecPatch with the template PodSpecPatch due to invalid format") }
explode_data.jsonl/75395
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 652 }
[ 2830, 3393, 23527, 8327, 43622, 1155, 353, 8840, 836, 8, 341, 6692, 69, 1669, 289, 27890, 16, 50463, 1806, 27121, 62768, 3203, 4791, 10134, 54, 69, 2354, 43622, 340, 20985, 1669, 2266, 19047, 741, 6692, 509, 1669, 501, 54, 509, 4071, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPersistingPropertyAndRemovedTagWithSameName(t *testing.T) { dimension := types.Dimension{ Properties: map[string]string{ "shared_name": "property_value", }, Tags: map[string]bool{ "shared_name": false, }, } metadataUpdate := dimensionToMetadataUpdate(dimension) expectedMetadataToUpdate := map[string]string{ "shared_name": "property_value", } assert.Equal(t, expectedMetadataToUpdate, metadataUpdate.MetadataToUpdate) assert.Empty(t, metadataUpdate.MetadataToAdd) expectedMetadataToRemove := map[string]string{ "shared_name": "", } assert.Equal(t, expectedMetadataToRemove, metadataUpdate.MetadataToRemove) }
explode_data.jsonl/6187
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 221 }
[ 2830, 3393, 61267, 287, 3052, 3036, 42642, 5668, 2354, 19198, 675, 1155, 353, 8840, 836, 8, 341, 2698, 18161, 1669, 4494, 53234, 515, 197, 197, 7903, 25, 2415, 14032, 30953, 515, 298, 197, 1, 6100, 1269, 788, 330, 3699, 3142, 756, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLRUOverflow(t *testing.T) { c := NewLRUExpireCache(4) c.Add("elem1", "1", 10*time.Hour) c.Add("elem2", "2", 10*time.Hour) c.Add("elem3", "3", 10*time.Hour) c.Add("elem4", "4", 10*time.Hour) c.Add("elem5", "5", 10*time.Hour) expectNotEntry(t, c, "elem1") expectEntry(t, c, "elem2", "2") expectEntry(t, c, "elem3", "3") expectEntry(t, c, "elem4", "4") expectEntry(t, c, "elem5", "5") }
explode_data.jsonl/44520
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 212 }
[ 2830, 3393, 20117, 52, 42124, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 1532, 20117, 52, 8033, 554, 8233, 7, 19, 340, 1444, 1904, 445, 18871, 16, 497, 330, 16, 497, 220, 16, 15, 77053, 73550, 340, 1444, 1904, 445, 18871, 17, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_dkTags_getAttributeVal(t *testing.T) { type fields struct { tags map[string]string replaceTags map[string]string } type args struct { keyName string } tests := []struct { name string fields fields args args want string }{ { name: "case1", fields: fields{tags: allTag}, args: args{keyName: otelResourceServiceKey}, want: "service", }, { name: "case2", fields: fields{tags: allTag}, args: args{keyName: otelResourceHTTPMethodKey}, want: "POST", }, { name: "case3", fields: fields{tags: allTag}, args: args{keyName: otelResourceContainerNameKey}, want: "hostName", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { dt := &dkTags{ tags: tt.fields.tags, replaceTags: tt.fields.replaceTags, } if got := dt.getAttributeVal(tt.args.keyName); got != tt.want { t.Errorf("getAttributeVal() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/14407
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 455 }
[ 2830, 3393, 814, 74, 15930, 3062, 3907, 2208, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 3244, 2032, 286, 2415, 14032, 30953, 198, 197, 197, 8156, 15930, 2415, 14032, 30953, 198, 197, 532, 13158, 2827, 2036, 341, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFieldExcludes(t *testing.T) { validate := New() type StringTest struct { Foo string `validate:"fieldexcludes=Bar"` Bar string } stringTest := &StringTest{ Foo: "foobar", Bar: "bar", } errs := validate.Struct(stringTest) NotEqual(t, errs, nil) AssertError(t, errs, "StringTest.Foo", "StringTest.Foo", "Foo", "Foo", "fieldexcludes") stringTest = &StringTest{ Foo: "foo", Bar: "bar", } errs = validate.Struct(stringTest) Equal(t, errs, nil) errs = validate.VarWithValue("foo", "bar", "fieldexcludes") Equal(t, errs, nil) errs = validate.VarWithValue("bar", "foobarfoo", "fieldexcludes") Equal(t, errs, nil) errs = validate.VarWithValue("foobarfoo", "bar", "fieldexcludes") NotEqual(t, errs, nil) AssertError(t, errs, "", "", "", "", "fieldexcludes") type StringTestMissingField struct { Foo string `validate:"fieldexcludes=Bar"` } stringTestMissingField := &StringTestMissingField{ Foo: "foo", } errs = validate.Struct(stringTestMissingField) Equal(t, errs, nil) }
explode_data.jsonl/77300
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 408 }
[ 2830, 3393, 1877, 840, 7396, 1155, 353, 8840, 836, 8, 341, 197, 7067, 1669, 1532, 2822, 13158, 923, 2271, 2036, 341, 197, 12727, 2624, 914, 1565, 7067, 2974, 2566, 327, 7396, 28, 3428, 8805, 197, 197, 3428, 914, 198, 197, 630, 11357, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMysqlGetDSNTag(t *testing.T) { tests := []struct { input string output string }{ { "", "127.0.0.1:3306", }, { "localhost", "127.0.0.1:3306", }, { "127.0.0.1", "127.0.0.1:3306", }, { "tcp(192.168.1.1:3306)/", "192.168.1.1:3306", }, { "tcp(localhost)/", "localhost:3306", }, { "root:passwd@tcp(192.168.1.1:3306)/?tls=false", "192.168.1.1:3306", }, { "root@tcp(127.0.0.1:3306)/?tls=false", "127.0.0.1:3306", }, { "root:passwd@tcp(localhost:3036)/dbname?allowOldPasswords=1", "localhost:3036", }, { "root:foo@bar@tcp(192.1.1.1:3306)/?tls=false", "192.1.1.1:3306", }, { "root:f00@b4r@tcp(192.1.1.1:3306)/?tls=false", "192.1.1.1:3306", }, { "root:fl!p11@tcp(192.1.1.1:3306)/?tls=false", "192.1.1.1:3306", }, } for _, test := range tests { output := getDSNTag(test.input) if output != test.output { t.Errorf("Input: %s Expected %s, got %s\n", test.input, test.output, output) } } }
explode_data.jsonl/71239
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 609 }
[ 2830, 3393, 44, 14869, 1949, 5936, 45, 5668, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 22427, 220, 914, 198, 197, 21170, 914, 198, 197, 59403, 197, 197, 515, 298, 197, 39680, 298, 197, 1, 16, 17, 22, 13, 15,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestValueBinaryMarshaling(t *testing.T) { tests := []struct { name string v Value }{ {"null", NewNullValue()}, {"bool", NewBoolValue(true)}, {"integer", NewIntegerValue(-10)}, {"double", NewDoubleValue(-3.14)}, {"text", NewTextValue("foo")}, {"blob", NewBlobValue([]byte("bar"))}, {"array", NewArrayValue(NewValueBuffer( NewBoolValue(true), NewIntegerValue(55), NewIntegerValue(56), NewIntegerValue(57), NewDoubleValue(789.58), NewDoubleValue(790.58), NewDoubleValue(791.58), NewArrayValue(NewValueBuffer( NewBoolValue(false), NewIntegerValue(100), NewTextValue("baz"), )), NewArrayValue(NewValueBuffer( NewBoolValue(true), NewIntegerValue(101), NewTextValue("bax"), )), NewBlobValue([]byte("coc")), NewBlobValue([]byte("ori")), NewBlobValue([]byte("co!")), NewDocumentValue( NewFieldBuffer(). Add("foo1", NewBoolValue(true)). Add("foo2", NewIntegerValue(55)). Add("foo3", NewArrayValue(NewValueBuffer( NewBoolValue(false), NewIntegerValue(100), NewTextValue("baz"), ))), ), ))}, {"document", NewDocumentValue( NewFieldBuffer(). Add("foo1", NewBoolValue(true)). Add("foo2", NewIntegerValue(55)). Add("foo3", NewArrayValue(NewValueBuffer( NewBoolValue(false), NewIntegerValue(100), NewTextValue("baz"), ))). Add("foo4", NewDocumentValue( NewFieldBuffer(). Add("foo1", NewBoolValue(true)). Add("foo2", NewIntegerValue(55)). Add("foo3", NewArrayValue(NewValueBuffer( NewBoolValue(false), NewIntegerValue(100), NewTextValue("baz"), ))), )), )}, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { b, err := test.v.MarshalBinary() require.NoError(t, err) got := Value{Type: test.v.Type} err = got.UnmarshalBinary(b) require.NoError(t, err) require.Equal(t, test.v, got) }) } }
explode_data.jsonl/59748
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 888 }
[ 2830, 3393, 1130, 21338, 79712, 6132, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 5195, 262, 5162, 198, 197, 59403, 197, 197, 4913, 2921, 497, 1532, 3280, 1130, 78108, 197, 197, 4913, 2641, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestModule(t *testing.T) { var stashedValue lua.LValue api := map[string]lua.LGFunction{ "add": WrapLuaFunction(func(state *lua.LState) error { state.Push(state.CheckNumber(1) + state.CheckNumber(2)) return nil }), "stash": WrapLuaFunction(func(state *lua.LState) error { stashedValue = state.CheckAny(1) return nil }), } ctx := context.Background() sandbox, err := newService(&observation.TestContext).CreateSandbox(ctx, CreateOptions{ Modules: map[string]lua.LGFunction{ "testmod": CreateModule(api), }, }) if err != nil { t.Fatalf("unexpected error creating sandbox: %s", err) } defer sandbox.Close() script := ` local testmod = require("testmod") testmod.stash(testmod.add(3, testmod.add(6, 9))) return testmod.add(38, 4) ` retValue, err := sandbox.RunScript(ctx, RunOptions{}, script) if err != nil { t.Fatalf("unexpected error running script: %s", err) } if lua.LVAsNumber(retValue) != 42 { t.Errorf("unexpected return value. want=%d have=%v", 42, retValue) } if lua.LVAsNumber(stashedValue) != 18 { t.Errorf("unexpected stashed value. want=%d have=%d", 18, stashedValue) } }
explode_data.jsonl/21500
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 453 }
[ 2830, 3393, 3332, 1155, 353, 8840, 836, 8, 341, 2405, 357, 13571, 1130, 20357, 1214, 1130, 271, 54299, 1669, 2415, 14032, 60, 27623, 1214, 38, 5152, 515, 197, 197, 1, 718, 788, 42187, 58020, 5152, 18552, 8390, 353, 27623, 1214, 1397, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeviceCodeReturnsErrorIfSendingFails(t *testing.T) { sender := mocks.NewSender() sender.SetError(fmt.Errorf("this is an error")) _, err := InitiateDeviceAuth(sender, TestOAuthConfig, TestClientID, TestResource) if err == nil || !strings.Contains(err.Error(), errCodeSendingFails) { t.Fatalf("adal: failed to get correct error expected(%s) actual(%s)", errCodeSendingFails, err.Error()) } }
explode_data.jsonl/27497
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 142 }
[ 2830, 3393, 6985, 2078, 16446, 1454, 2679, 49282, 37, 6209, 1155, 353, 8840, 836, 8, 341, 1903, 1659, 1669, 68909, 7121, 20381, 741, 1903, 1659, 4202, 1454, 28197, 13080, 445, 574, 374, 458, 1465, 28075, 197, 6878, 1848, 1669, 15690, 64...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestStoreGetCachedReceipts(t *testing.T) { logger.SetTestMode(t) block, expect := fakeReceipts() store := cachedStore() store.SetRawReceipts(block, expect) got, _ := store.GetRawReceipts(block) assert.EqualValues(t, expect, got) }
explode_data.jsonl/51277
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 96 }
[ 2830, 3393, 6093, 1949, 70293, 67461, 82, 1155, 353, 8840, 836, 8, 341, 17060, 4202, 2271, 3636, 1155, 692, 47996, 11, 1720, 1669, 12418, 67461, 82, 741, 57279, 1669, 20579, 6093, 741, 57279, 4202, 20015, 67461, 82, 18682, 11, 1720, 692...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestWERDialogue(t *testing.T) { if os.Getenv("TESTING_WER_DIALOGUE") == "1" { defer os.Exit(0) *runtime.TestingWER = true const EXCEPTION_NONCONTINUABLE = 1 mod := syscall.MustLoadDLL("kernel32.dll") proc := mod.MustFindProc("RaiseException") proc.Call(0xbad, EXCEPTION_NONCONTINUABLE, 0, 0) println("RaiseException should not return") return } cmd := exec.Command(os.Args[0], "-test.run=TestWERDialogue") cmd.Env = []string{"TESTING_WER_DIALOGUE=1"} // Child process should not open WER dialogue, but return immediately instead. cmd.CombinedOutput() }
explode_data.jsonl/54670
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 226 }
[ 2830, 3393, 39351, 82890, 1155, 353, 8840, 836, 8, 341, 743, 2643, 64883, 445, 10033, 1718, 2763, 640, 42802, 2230, 899, 621, 330, 16, 1, 341, 197, 16867, 2643, 34358, 7, 15, 692, 197, 197, 9, 22255, 8787, 287, 39351, 284, 830, 198,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_Mock_AssertCalled_WithAnythingOfTypeArgument(t *testing.T) { var mockedService = new(TestExampleImplementation) mockedService. On("Test_Mock_AssertCalled_WithAnythingOfTypeArgument", Anything, Anything, Anything). Return() mockedService.Called(1, "two", []uint8("three")) assert.True(t, mockedService.AssertCalled(t, "Test_Mock_AssertCalled_WithAnythingOfTypeArgument", AnythingOfType("int"), AnythingOfType("string"), AnythingOfType("[]uint8"))) }
explode_data.jsonl/8608
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 173 }
[ 2830, 3393, 1245, 1176, 62222, 529, 20960, 62, 2354, 77303, 34696, 9171, 1155, 353, 8840, 836, 8, 8022, 2405, 46149, 1860, 284, 501, 31159, 13314, 36850, 7229, 2109, 67385, 1860, 3224, 197, 86391, 445, 2271, 1245, 1176, 62222, 529, 20960,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIntArray_Merge(t *testing.T) { gtest.C(t, func(t *gtest.T) { func1 := func(v1, v2 interface{}) int { if gconv.Int(v1) < gconv.Int(v2) { return 0 } return 1 } n1 := []int{0, 1, 2, 3} n2 := []int{4, 5, 6, 7} i1 := []interface{}{"1", "2"} s1 := []string{"a", "b", "c"} s2 := []string{"e", "f"} a1 := garray.NewIntArrayFrom(n1) a2 := garray.NewIntArrayFrom(n2) a3 := garray.NewArrayFrom(i1) a4 := garray.NewStrArrayFrom(s1) a5 := garray.NewSortedStrArrayFrom(s2) a6 := garray.NewSortedIntArrayFrom([]int{1, 2, 3}) a7 := garray.NewSortedStrArrayFrom(s1) a8 := garray.NewSortedArrayFrom([]interface{}{4, 5}, func1) t.Assert(a1.Merge(a2).Slice(), []int{0, 1, 2, 3, 4, 5, 6, 7}) t.Assert(a1.Merge(a3).Len(), 10) t.Assert(a1.Merge(a4).Len(), 13) t.Assert(a1.Merge(a5).Len(), 15) t.Assert(a1.Merge(a6).Len(), 18) t.Assert(a1.Merge(a7).Len(), 21) t.Assert(a1.Merge(a8).Len(), 23) }) }
explode_data.jsonl/47600
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 504 }
[ 2830, 3393, 95338, 1245, 10080, 1155, 353, 8840, 836, 8, 341, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 29244, 16, 1669, 2915, 3747, 16, 11, 348, 17, 3749, 28875, 526, 341, 298, 743, 342, 12027, 7371, 3747...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestBuildDirFromRepoRoot(t *testing.T) { testCases := []struct { name string instance *Instance isBazel bool expectedBuildDir string }{ { name: "empty repoRoot", instance: &Instance{ opts: &Options{ Version: "fakeVersion", }, }, expectedBuildDir: "_output", }, { name: "non-empty repoRoot, bazel", instance: &Instance{ opts: &Options{ Version: "fakeVersion", RepoRoot: "/fake/repo/root", }, }, isBazel: true, expectedBuildDir: "/fake/repo/root/bazel-bin/build", }, } for _, tc := range testCases { tc := tc t.Run(tc.name, func(t *testing.T) { t.Parallel() setupBuildDir(tc.instance, tc.isBazel) if tc.instance.opts.BuildDir != tc.expectedBuildDir { t.Errorf("buildDir mismatched, got: %v, want: %v", tc.instance.opts.BuildDir, tc.expectedBuildDir) } }) } }
explode_data.jsonl/79725
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 427 }
[ 2830, 3393, 11066, 6184, 3830, 25243, 8439, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 1797, 914, 198, 197, 56256, 260, 353, 2523, 198, 197, 19907, 33, 68326, 688, 1807, 198, 197, 42400, 11066, 6184, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNewPropertyV(t *testing.T) { // GIVEN graphName := "mygraph" g := NewGraph(graphName) require.NotNil(t, g) v := NewVertexG(g) require.NotNil(t, v) // WHEN p := NewPropertyV(v) // THEN assert.NotNil(t, p) assert.Equal(t, graphName, p.String()) }
explode_data.jsonl/38213
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 121 }
[ 2830, 3393, 3564, 3052, 53, 1155, 353, 8840, 836, 8, 341, 197, 322, 89836, 198, 66616, 675, 1669, 330, 2408, 4439, 698, 3174, 1669, 1532, 11212, 24312, 675, 340, 17957, 93882, 1155, 11, 342, 340, 5195, 1669, 1532, 8320, 38, 3268, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsDNS1123Label(t *testing.T) { goodValues := []string{ "a", "ab", "abc", "a1", "a-1", "a--1--2--b", "0", "01", "012", "1a", "1-a", "1--a--b--2", strings.Repeat("a", 63), } for _, val := range goodValues { if msgs := IsDNS1123Label(val); len(msgs) != 0 { t.Errorf("expected true for '%s': %v", val, msgs) } } badValues := []string{ "", "A", "ABC", "aBc", "A1", "A-1", "1-A", "-", "a-", "-a", "1-", "-1", "_", "a_", "_a", "a_b", "1_", "_1", "1_2", ".", "a.", ".a", "a.b", "1.", ".1", "1.2", " ", "a ", " a", "a b", "1 ", " 1", "1 2", strings.Repeat("a", 64), } for _, val := range badValues { if msgs := IsDNS1123Label(val); len(msgs) == 0 { t.Errorf("expected false for '%s'", val) } } }
explode_data.jsonl/11820
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 380 }
[ 2830, 3393, 3872, 61088, 16, 16, 17, 18, 2476, 1155, 353, 8840, 836, 8, 341, 3174, 1386, 6227, 1669, 3056, 917, 515, 197, 197, 56693, 497, 330, 370, 497, 330, 13683, 497, 330, 64, 16, 497, 330, 64, 12, 16, 497, 330, 64, 313, 16,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestClientBasicAuth(t *testing.T) { ts := createAuthServer(t) defer ts.Close() c := dc() c.SetBasicAuth("myuser", "basicauth"). SetHostURL(ts.URL). SetTLSClientConfig(&tls.Config{InsecureSkipVerify: true}) resp, err := c.R(). SetResult(&AuthSuccess{}). Post("/login") assertError(t, err) assertEqual(t, http.StatusOK, resp.StatusCode()) t.Logf("Result Success: %q", resp.Result().(*AuthSuccess)) logResponse(t, resp) }
explode_data.jsonl/39332
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 174 }
[ 2830, 3393, 2959, 15944, 5087, 1155, 353, 8840, 836, 8, 341, 57441, 1669, 1855, 5087, 5475, 1155, 340, 16867, 10591, 10421, 2822, 1444, 1669, 19402, 741, 1444, 4202, 15944, 5087, 445, 2408, 872, 497, 330, 17797, 3001, 940, 38609, 197, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestResumption(t *testing.T) { t.Run("TLSv12", func(t *testing.T) { testResumption(t, VersionTLS12) }) t.Run("TLSv13", func(t *testing.T) { testResumption(t, VersionTLS13) }) }
explode_data.jsonl/27715
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 78 }
[ 2830, 3393, 1061, 60574, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 45439, 85, 16, 17, 497, 2915, 1155, 353, 8840, 836, 8, 314, 1273, 1061, 60574, 1155, 11, 6079, 45439, 16, 17, 8, 2751, 3244, 16708, 445, 45439, 85, 16, 18, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestAESEncrypt(t *testing.T) { encrypted := AESEncrypt("The quick brown fox jumps over the lazy dog", AESCipherKey) assert.Equal(t, "3781dU72kqM+ulqyVv7aQlEoowO5jjGkTIjNNPKILa06LZ61DrAl7bhFFR20Ioao", encrypted) }
explode_data.jsonl/7647
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 102 }
[ 2830, 3393, 13669, 925, 1016, 3571, 1155, 353, 8840, 836, 8, 341, 262, 24455, 1669, 42108, 925, 1016, 3571, 445, 785, 3974, 13876, 38835, 34208, 916, 279, 15678, 5562, 497, 38841, 79460, 1592, 340, 262, 2060, 12808, 1155, 11, 330, 18, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParsersDockerLogsFiltering(t *testing.T) { env := newInputTestingEnvironment(t) testlogName := "test.log" inp := env.mustCreateInput(map[string]interface{}{ "paths": []string{env.abspath(testlogName)}, "prospector.scanner.check_interval": "1ms", "parsers": []map[string]interface{}{ map[string]interface{}{ "ndjson": map[string]interface{}{ "message_key": "log", "target": "", }, }, }, "exclude_lines": []string{"main"}, }) testline := []byte(`{"log":"Fetching main repository github.com/elastic/beats...\n","stream":"stdout","time":"2016-03-02T22:58:51.338462311Z"} {"log":"Fetching dependencies...\n","stream":"stdout","time":"2016-03-02T22:59:04.609292428Z"} {"log":"Execute /scripts/packetbeat_before_build.sh\n","stream":"stdout","time":"2016-03-02T22:59:04.617434682Z"} `) env.mustWriteLinesToFile(testlogName, testline) ctx, cancelInput := context.WithCancel(context.Background()) env.startInput(ctx, inp) env.waitUntilEventCount(2) env.requireOffsetInRegistry(testlogName, len(testline)) env.requireEventContents(0, "time", "2016-03-02T22:59:04.609292428Z") env.requireEventContents(0, "stream", "stdout") cancelInput() env.waitUntilInputStops() }
explode_data.jsonl/14913
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 518 }
[ 2830, 3393, 47, 40488, 35, 13659, 51053, 5632, 287, 1155, 353, 8840, 836, 8, 341, 57538, 1669, 501, 2505, 16451, 12723, 1155, 692, 18185, 839, 675, 1669, 330, 1944, 1665, 698, 17430, 79, 1669, 6105, 69419, 4021, 2505, 9147, 14032, 31344...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClient_DeleteServer_validation(t *testing.T) { var err error err = testClient.DeleteServer(&DeleteServerInput{ ServiceID: "", }) if err != ErrMissingServiceID { t.Errorf("bad error: %s", err) } err = testClient.DeleteServer(&DeleteServerInput{ ServiceID: "foo", PoolID: "", }) if err != ErrMissingPool { t.Errorf("bad error: %q", err) } err = testClient.DeleteServer(&DeleteServerInput{ ServiceID: "foo", PoolID: "bar", Server: "", }) if err != ErrMissingServer { t.Errorf("bad error: %q", err) } }
explode_data.jsonl/8323
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 219 }
[ 2830, 3393, 2959, 57418, 5475, 19416, 1155, 353, 8840, 836, 8, 341, 2405, 1848, 1465, 198, 9859, 284, 1273, 2959, 18872, 5475, 2099, 6435, 5475, 2505, 515, 197, 91619, 915, 25, 8324, 197, 3518, 743, 1848, 961, 15495, 25080, 1860, 915, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestClient_GetCurrentWeather(t *testing.T) { mux := setupMux(t) s := httptest.NewServer(mux) defer s.Close() expected := &Weather{ Location: "fakecity", Description: "Partly cloudy", Temperature: -3, WindSpeed: 4, } c := getClient(t, s) actual, err := c.GetCurrentWeather("fakecity") if assert.Nil(t, err, "Client_GetCurrentWeather: unexpected fetch error %s", err) { assert.Equal(t, expected, actual, "ParseWeather: incorrectly parsed weather") } }
explode_data.jsonl/63363
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 180 }
[ 2830, 3393, 2959, 13614, 5405, 28981, 1155, 353, 8840, 836, 8, 341, 2109, 2200, 1669, 6505, 44, 2200, 1155, 692, 1903, 1669, 54320, 70334, 7121, 5475, 1255, 2200, 340, 16867, 274, 10421, 2822, 42400, 1669, 609, 28981, 515, 197, 197, 470...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInc(t *testing.T) { j := julian.CalendarGregorianToJD(2065, 6, 24) var e pe.Elements pe.Mean(pe.Mercury, j, &e) if i := pe.Inc(pe.Mercury, j); i != e.Inc { t.Fatal(i, "!=", e.Inc) } }
explode_data.jsonl/32578
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 103 }
[ 2830, 3393, 39245, 1155, 353, 8840, 836, 8, 341, 12428, 1669, 40538, 1103, 45122, 43512, 22090, 1249, 49915, 7, 17, 15, 21, 20, 11, 220, 21, 11, 220, 17, 19, 340, 2405, 384, 1051, 68777, 198, 197, 375, 1321, 5307, 65079, 1321, 261, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_SinglePrefixMatcher_IsIncluded(t *testing.T) { matcher := NewSimplePrefixMatcher("folder-1/b/a") assert.True(t, matcher.IsIncluded("folder-1/b/a")) assert.True(t, matcher.IsIncluded("folder-1/b/ab")) assert.True(t, matcher.IsIncluded("folder-1/b/a/345")) assert.False(t, matcher.IsIncluded("folder-1/b")) }
explode_data.jsonl/45895
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 1098, 2173, 14335, 37554, 31879, 84610, 1155, 353, 8840, 836, 8, 341, 2109, 28058, 1669, 1532, 16374, 14335, 37554, 445, 17668, 12, 16, 3470, 14186, 5130, 6948, 32443, 1155, 11, 36052, 4506, 84610, 445, 17668, 12, 16, 3470, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSwiftRuleStorage(t *testing.T) { s, err := e2e.NewScenario(networkName) require.NoError(t, err) defer s.Close() swift := e2edb.NewSwiftStorage() require.NoError(t, s.StartAndWaitReady(swift)) store, err := ruler.NewRuleStorage(ruler.RuleStoreConfig{ Type: "swift", Swift: swiftConfig(swift), }, nil) require.NoError(t, err) ctx := context.Background() // Add 2 rule group. r1 := newRule(userID, "1") err = store.SetRuleGroup(ctx, userID, "foo", r1) require.NoError(t, err) r2 := newRule(userID, "2") err = store.SetRuleGroup(ctx, userID, "bar", r2) require.NoError(t, err) // Get rules back. rls, err := store.LoadAllRuleGroups(ctx) require.NoError(t, err) require.Equal(t, 2, len(rls[userID])) userRules := rls[userID] sort.Slice(userRules, func(i, j int) bool { return userRules[i].Name < userRules[j].Name }) require.Equal(t, r1, userRules[0]) require.Equal(t, r2, userRules[1]) // Delete the first rule group err = store.DeleteRuleGroup(ctx, userID, "foo", r1.Name) require.NoError(t, err) //Verify we only have the second rule group rls, err = store.LoadAllRuleGroups(ctx) require.NoError(t, err) require.Equal(t, 1, len(rls[userID])) require.Equal(t, r2, rls[userID][0]) }
explode_data.jsonl/66730
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 507 }
[ 2830, 3393, 55336, 11337, 5793, 1155, 353, 8840, 836, 8, 341, 1903, 11, 1848, 1669, 384, 17, 68, 7121, 54031, 46542, 675, 340, 17957, 35699, 1155, 11, 1848, 340, 16867, 274, 10421, 741, 77295, 2085, 1669, 384, 17, 93727, 7121, 55336, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTLSClientCertFromFileAndKeyFromPem(t *testing.T) { clientTLSOverride := endpoint.MutualTLSConfig{} clientTLSOverride.Client.Cert.Path = pathvar.Subst(certPath) clientTLSOverride.Client.Key.Path = "" clientTLSOverride.Client.Cert.Pem = "" clientTLSOverride.Client.Key.Pem = `-----BEGIN EC PRIVATE KEY----- MIGkAgEBBDByldj7VTpqTQESGgJpR9PFW9b6YTTde2WN6/IiBo2nW+CIDmwQgmAl c/EOc9wmgu+gBwYFK4EEACKhZANiAAT6I1CGNrkchIAEmeJGo53XhDsoJwRiohBv 2PotEEGuO6rMyaOupulj2VOj+YtgWw4ZtU49g4Nv6rq1QlKwRYyMwwRJSAZHIUMh YZjcDi7YEOZ3Fs1hxKmIxR+TTR2vf9I= -----END EC PRIVATE KEY-----` backends, err := overrideClientTLSInBackend(configBackend, &clientTLSOverride) assert.Nil(t, err) config, err := ConfigFromBackend(backends...) assert.Nil(t, err) certs := config.TLSClientCerts() assert.Equal(t, 1, len(certs), "Expected only one tls cert struct") if reflect.DeepEqual(certs[0], tls.Certificate{}) { t.Fatal("Actual cert is empty") } }
explode_data.jsonl/34095
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 438 }
[ 2830, 3393, 45439, 2959, 36934, 43633, 3036, 1592, 3830, 47, 336, 1155, 353, 8840, 836, 8, 1476, 25291, 45439, 2177, 1669, 14887, 1321, 332, 928, 45439, 2648, 16094, 25291, 45439, 2177, 11716, 727, 529, 17474, 284, 1815, 947, 12391, 267, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestScannerEmptyRangeSet(t *testing.T) { defer leaktest.AfterTest(t)() ranges := newTestRangeSet(0, t) q := &testQueue{} mc := hlc.NewManualClock(123) clock := hlc.NewClock(mc.UnixNano, time.Nanosecond) s := newReplicaScanner(makeAmbCtx(), clock, time.Hour, 0, 0, ranges) s.AddQueues(q) stopper := stop.NewStopper() defer stopper.Stop(context.TODO()) s.Start(stopper) time.Sleep(time.Millisecond) // give it some time to (not) busy loop if count := s.scanCount(); count > 1 { t.Errorf("expected at most one loop, but got %d", count) } }
explode_data.jsonl/78122
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 226 }
[ 2830, 3393, 31002, 3522, 6046, 1649, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 7000, 5520, 1669, 501, 2271, 6046, 1649, 7, 15, 11, 259, 340, 18534, 1669, 609, 1944, 7554, 16094, 97662, 1669, 305, 172...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFrist(t *testing.T) { for _, test := range allTests { if len(test.out) == 0 { continue } if res := Frist(test.n, test.s); res != test.out[0] { t.Fatalf("Frist(%d, %s) = %s, want %s.", test.n, test.s, res, test.out[0]) } } }
explode_data.jsonl/76258
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 128 }
[ 2830, 3393, 37, 2819, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 678, 18200, 341, 197, 743, 2422, 8623, 2532, 8, 621, 220, 15, 341, 298, 11664, 198, 197, 197, 532, 197, 743, 592, 1669, 434, 2819, 8623, 1253, 11, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestWorkerShouldWorkSequentiallyWithOnlyOneGoroutine(t *testing.T) { resultCh := make(chan string) worker := work.NewWorker(1, func(p work.Payload) interface{} { return fmt.Sprintf("%s.", p.Data) }, true) go func() { var result string for v := range worker.Completions() { result += v.Output.(string) } resultCh <- result }() for i := 0; i < 100; i++ { err := worker.Dispatch(work.Payload{Data: strconv.Itoa(i)}) if err != nil { t.Fail() return } } worker.Quit() assert.Equal(t, "0.1.2.3.4.5.6.7.8.9.10.11.12.13.14.15.16.17.18.19.20.21.22.23.24.25.26.27.28.29.30.31.32.33.34.35."+ "36.37.38.39.40.41.42.43.44.45.46.47.48.49.50.51.52.53.54.55.56.57.58.59.60.61.62.63.64.65.66.67.68.69.70.71.72.73.74.75.76.77.78."+ "79.80.81.82.83.84.85.86.87.88.89.90.91.92.93.94.95.96.97.98.99.", <-resultCh, "Jobs were completed in wrong order or incompletely") }
explode_data.jsonl/11921
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 442 }
[ 2830, 3393, 21936, 14996, 6776, 22046, 398, 2354, 7308, 3966, 38, 269, 14159, 1155, 353, 8840, 836, 8, 341, 9559, 1143, 1669, 1281, 35190, 914, 340, 197, 21462, 1669, 975, 7121, 21936, 7, 16, 11, 2915, 1295, 975, 86432, 8, 3749, 6257,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLocation_UniqueKey(t *testing.T) { m1 := &Location{ LocationID: 1, BookNumber: sql.NullInt32{Int32: 2, Valid: true}, ChapterNumber: sql.NullInt32{Int32: 3, Valid: true}, DocumentID: sql.NullInt32{Int32: 4, Valid: true}, Track: sql.NullInt32{Int32: 5, Valid: true}, IssueTagNumber: 6, KeySymbol: sql.NullString{String: "nwtsty", Valid: true}, MepsLanguage: 7, LocationType: 8, Title: sql.NullString{String: "ThisTitleShouldNotBeInUniqueKey", Valid: true}, } m2 := &Location{ LocationID: 1, BookNumber: sql.NullInt32{}, ChapterNumber: sql.NullInt32{}, DocumentID: sql.NullInt32{}, Track: sql.NullInt32{}, IssueTagNumber: 6, KeySymbol: sql.NullString{}, MepsLanguage: 7, LocationType: 8, Title: sql.NullString{String: "ThisOTitleShouldNotBeInUniqueKeyEither", Valid: true}, } assert.Equal(t, "2_3_4_5_6_nwtsty_7_8", m1.UniqueKey()) assert.Equal(t, "0_0_0_0_6__7_8", m2.UniqueKey()) }
explode_data.jsonl/60843
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 474 }
[ 2830, 3393, 4707, 62, 22811, 1592, 1155, 353, 8840, 836, 8, 341, 2109, 16, 1669, 609, 4707, 515, 197, 197, 4707, 915, 25, 257, 220, 16, 345, 197, 197, 7134, 2833, 25, 257, 5704, 23979, 1072, 18, 17, 90, 1072, 18, 17, 25, 220, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWithServerOption(t *testing.T) { opt := WithServerOption(grpc.WriteBufferSize(128 * 1024)) cmp := newCmp(t, opt) assert.Equal(t, 3, len(cmp.config.serverOptions)) }
explode_data.jsonl/68380
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 66 }
[ 2830, 3393, 2354, 5475, 5341, 1155, 353, 8840, 836, 8, 341, 64838, 1669, 3085, 5475, 5341, 35963, 3992, 4073, 52661, 7, 16, 17, 23, 353, 220, 16, 15, 17, 19, 1171, 1444, 1307, 1669, 501, 34, 1307, 1155, 11, 3387, 340, 6948, 12808, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestBasicUsage(t *testing.T) { store := core.NewMemoryRws() subject, err := core.NewPageStorage(store, noOpSync) assertNoError(t, err) err = subject.InitialiseDb() assertNoError(t, err) // Write a stream, bind to a doc and a path inputRdr := bytes.NewReader([]byte(inputData)) pageId, err := subject.WriteStream(inputRdr) assertNoError(t, err) assertValidPage(t, pageId) newDoc, err := support.NewRandomId() assertNoError(t, err) expiredPageId, err := subject.BindIndex(newDoc, pageId) assertNoError(t, err) assertInvalidPage(t, expiredPageId) previousDocId, err := subject.BindPath("/my/path/to/poem", newDoc) assertNoError(t, err) assertNil(t, previousDocId) // Make sure we can see the path and ids headPageId, err := subject.GetDocumentHead(newDoc) assertNoError(t, err) assertPagesEqual(t, headPageId, pageId) docId, err := subject.GetDocumentIdByPath("/my/path/to/poem") assertNoError(t, err) assertSameId(t, docId, newDoc) paths, err := subject.GetPathsForDocument(docId) assertNoError(t, err) assertContains(t, paths, "/my/path/to/poem") // Now see if we can recover the stream pageStream := subject.GetStream(headPageId) data, err := io.ReadAll(pageStream) assertNoError(t,err) assertSameString(t, string(data), inputData) }
explode_data.jsonl/28106
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 472 }
[ 2830, 3393, 15944, 14783, 1155, 353, 8840, 836, 8, 341, 57279, 1669, 6200, 7121, 10642, 49, 8915, 741, 28624, 583, 11, 1848, 1669, 6200, 7121, 2665, 5793, 31200, 11, 902, 7125, 12154, 340, 6948, 2753, 1454, 1155, 11, 1848, 692, 9859, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestImportEquals(t *testing.T) { imp1 := &Import{Path: VarTerm("foo"), Alias: Var("bar")} imp11 := &Import{Path: VarTerm("foo"), Alias: Var("bar")} imp2 := &Import{Path: VarTerm("foo")} imp3 := &Import{Path: RefTerm(VarTerm("bar"), VarTerm("baz"), VarTerm("qux")), Alias: Var("corge")} imp33 := &Import{Path: RefTerm(VarTerm("bar"), VarTerm("baz"), VarTerm("qux")), Alias: Var("corge")} imp4 := &Import{Path: RefTerm(VarTerm("bar"), VarTerm("baz"), VarTerm("qux"))} assertImportsEqual(t, imp1, imp1) assertImportsEqual(t, imp1, imp11) assertImportsEqual(t, imp3, imp3) assertImportsEqual(t, imp3, imp33) imps := []*Import{imp1, imp2, imp3, imp4} for i := range imps { for j := range imps { if i != j { assertImportsNotEqual(t, imps[i], imps[j]) } } } }
explode_data.jsonl/65300
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 337 }
[ 2830, 3393, 11511, 4315, 1155, 353, 8840, 836, 8, 341, 197, 6664, 16, 1669, 609, 11511, 90, 1820, 25, 8735, 17249, 445, 7975, 3975, 58040, 25, 8735, 445, 2257, 42132, 197, 6664, 16, 16, 1669, 609, 11511, 90, 1820, 25, 8735, 17249, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestService_ReleaseDetails(t *testing.T) { tcs := []struct { svc Service expectedRes *release.Release expectedErr error }{ { // TC#1 svc: Service{ storage: &storage.Fake{ GetErr: errFake, }, }, expectedErr: errFake, }, { // TC#2 svc: Service{ storage: &storage.Fake{ Item: []byte("{}"), }, newHelmProxyFn: func(kube *model.Kube) (proxy.Interface, error) { return nil, errFake }, }, expectedErr: errFake, }, { // TC#3 svc: Service{ storage: &storage.Fake{ Item: []byte("{}"), }, newHelmProxyFn: func(kube *model.Kube) (proxy.Interface, error) { return &fakeHelmProxy{ err: errFake, }, nil }, }, expectedErr: errFake, }, { // TC#4 svc: Service{ storage: &storage.Fake{ Item: []byte("{}"), }, newHelmProxyFn: func(kube *model.Kube) (proxy.Interface, error) { return &fakeHelmProxy{ getReleaseResp: &services.GetReleaseContentResponse{ Release: fakeRls, }, }, nil }, }, expectedRes: fakeRls, }, } for i, tc := range tcs { rls, err := tc.svc.ReleaseDetails(context.Background(), "testCluster", "") require.Equalf(t, tc.expectedErr, errors.Cause(err), "TC#%d: check errors", i+1) if err == nil { require.Equalf(t, tc.expectedRes, rls, "TC#%d: check results", i+1) } } }
explode_data.jsonl/1998
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 661 }
[ 2830, 3393, 1860, 85573, 7799, 1155, 353, 8840, 836, 8, 341, 3244, 4837, 1669, 3056, 1235, 341, 197, 1903, 7362, 5362, 271, 197, 42400, 1061, 353, 22998, 58693, 198, 197, 42400, 7747, 1465, 198, 197, 59403, 197, 197, 90, 442, 24591, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestControllerUnPublishVolume(t *testing.T) { dataDisk := make([]compute.DataDisk, 0) dataDisk = append(dataDisk, compute.DataDisk{Name: to.StringPtr(testVolumeName)}) vm := NewFakeVm(dataDisk) d, err := NewFakeDriver(t) require.NoError(t, err) testCloud := d.GetCloud() mockVMsClient := testCloud.VirtualMachinesClient.(*mockvmclient.MockInterface) mockVMsClient.EXPECT().Get(gomock.Any(), testCloud.ResourceGroup, fakeNode, gomock.Any()).Return(*vm, nil).AnyTimes() mockVMsClient.EXPECT().UpdateAsync(gomock.Any(), testCloud.ResourceGroup, gomock.Any(), gomock.Any(), gomock.Any()).Return(&azure.Future{}, nil).AnyTimes() mockVMsClient.EXPECT().UpdateAsync(gomock.Any(), testCloud.ResourceGroup, gomock.Any(), gomock.Any(), gomock.Any()).Return(nil, nil).AnyTimes() mockVMsClient.EXPECT().WaitForUpdateResult(gomock.Any(), gomock.Any(), testCloud.ResourceGroup, gomock.Any()).Return(nil).AnyTimes() req := &csi.ControllerUnpublishVolumeRequest{ VolumeId: testVolumeName, NodeId: fakeNode, } _, err = d.ControllerUnpublishVolume(context.Background(), req) require.NoError(t, err) }
explode_data.jsonl/49371
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 401 }
[ 2830, 3393, 2051, 1806, 50145, 18902, 1155, 353, 8840, 836, 8, 341, 8924, 47583, 1669, 1281, 10556, 27706, 3336, 47583, 11, 220, 15, 340, 8924, 47583, 284, 8737, 2592, 47583, 11, 12564, 3336, 47583, 63121, 25, 311, 6431, 5348, 8623, 189...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateFolder(t *testing.T) { client := NewMockClient(createdFolderData) api := &wrike.API{Config: mockAPIConfig, HTTPClient: client} params := NewCreateFolderParams() folders, err := api.CreateFolder("IEAAAAAQI4AB5BGU", params) if err != nil { fmt.Println(err.Error()) } // Check request object assert.Equal(t, client.Request.Method, http.MethodPost) assert.Equal(t, client.Request.URL.String(), "https://app-eu.wrike.com/api/v4/folders/IEAAAAAQI4AB5BGU/folders") body, _ := ioutil.ReadAll(client.Request.Body) data, _ := url.QueryUnescape(string(body)) assert.Equal(t, data, "customFields=[{\"id\":\"IEAAAAAQJUAAAAAX\",\"value\":\"testValue\"}]&description=Test description&metadata=[{\"key\":\"testMetaKey\",\"value\":\"testMetaValue\"}]&project={\"ownerIds\":[\"KUAAAAAQ\"],\"status\":\"Green\",\"startDate\":\"2019-02-18\",\"endDate\":\"2019-02-25\"}&shareds=[\"KUAAAAAQ\"]&title=Test folder") SharedRequestTests(t, client.Request) assert.Equal(t, string(folders.Data[0].ID), "IEAAAAAQI4AB5BGV") }
explode_data.jsonl/64306
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 397 }
[ 2830, 3393, 4021, 13682, 1155, 353, 8840, 836, 8, 341, 25291, 1669, 1532, 11571, 2959, 70963, 13682, 1043, 340, 54299, 1669, 609, 86, 40652, 24922, 90, 2648, 25, 7860, 7082, 2648, 11, 10130, 2959, 25, 2943, 532, 25856, 1669, 1532, 4021,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStmtLists(t *testing.T) { for _, src := range stmts { file, err := parser.ParseFile(fset, "", "package p; func _() {"+src+"}", parser.ParseComments) if err != nil { panic(err) // error in test } var buf bytes.Buffer err = Fprint(&buf, fset, file.Decls[0].(*ast.FuncDecl).Body.List) // only print statements if err != nil { panic(err) // error in test } out := buf.String() if out != src { t.Errorf("\ngot : %q\nwant: %q\n", out, src) } } }
explode_data.jsonl/64602
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 208 }
[ 2830, 3393, 31063, 37848, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 2286, 1669, 2088, 20020, 82, 341, 197, 17661, 11, 1848, 1669, 6729, 8937, 1703, 955, 746, 11, 7342, 330, 1722, 281, 26, 2915, 716, 368, 5212, 10, 3548, 5172, 9545, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestWriteRAWJSONMarshalError(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { writeRawJSON(http.StatusOK, &marshalError{errors.New("Undecodable")}, w) })) client := http.Client{} resp, err := client.Get(server.URL) if err != nil { t.Errorf("unexpected error: %v", err) } if resp.StatusCode != http.StatusInternalServerError { t.Errorf("unexpected status code %d", resp.StatusCode) } }
explode_data.jsonl/71506
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 172 }
[ 2830, 3393, 7985, 21550, 5370, 55438, 1454, 1155, 353, 8840, 836, 8, 341, 41057, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 3622, 1758, 37508, 11, 4232, 353, 1254, 9659, 8, 341, 197, 24945, 20015, 5370, 19886, 52989, 11, 609, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConfigSetLogLevel(t *testing.T) { f, err := ioutil.TempDir("/tmp", "newrelic") assert.NoError(t, err) defer os.RemoveAll(f) // Initialize the new configuration directory c, err := LoadConfig(f) assert.NoError(t, err) assert.Equal(t, c.configDir, f) // Set the valid log levels for _, l := range []string{ "ERROR", "WARN", "INFO", "DEBUG", "TRACE", } { err = c.Set("logLevel", l) assert.NoError(t, err) assert.Equal(t, l, c.LogLevel) } err = c.Set("logLevel", "INVALID_VALUE") assert.Error(t, err) }
explode_data.jsonl/68830
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 234 }
[ 2830, 3393, 2648, 1649, 72676, 1155, 353, 8840, 836, 8, 341, 1166, 11, 1848, 1669, 43144, 65009, 6184, 4283, 5173, 497, 330, 931, 265, 415, 1138, 6948, 35699, 1155, 11, 1848, 340, 16867, 2643, 84427, 955, 692, 197, 322, 9008, 279, 501...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMapStringByte_ForEach(t *testing.T) { Convey("TestMapStringByte.ForEach", t, func() { var k string = "b6993a57-1529-4fe1-bd6b-7fe97290933d" var v byte = 254 hits := 0 test := omap.NewMapStringByte(1) So(test.Put(k, v), ShouldPointTo, test) So(test.Len(), ShouldEqual, 1) So(test.ForEach(func(kk string, vv byte) { So(kk, ShouldEqual, k) So(vv, ShouldEqual, v) hits++ }), ShouldPointTo, test) So(hits, ShouldEqual, 1) }) }
explode_data.jsonl/5060
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 216 }
[ 2830, 3393, 2227, 703, 7153, 84368, 4854, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 2271, 2227, 703, 7153, 67743, 497, 259, 11, 2915, 368, 341, 197, 2405, 595, 914, 284, 330, 65, 21, 24, 24, 18, 64, 20, 22, 12, 16, 20, 17,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSuspendWithDeadline(t *testing.T) { cancel, controller := newController() defer cancel() wfcset := controller.wfclientset.ArgoprojV1alpha1().Workflows("") // operate the workflow. it should become in a suspended state after ctx := context.Background() wf := unmarshalWF(suspendTemplateWithDeadline) wf, err := wfcset.Create(ctx, wf, metav1.CreateOptions{}) assert.Nil(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) assert.Nil(t, err) assert.True(t, util.IsWorkflowSuspended(wf)) // operate again and verify no pods were scheduled woc = newWorkflowOperationCtx(wf, controller) woc.operate(ctx) updatedWf, err := wfcset.Get(ctx, wf.Name, metav1.GetOptions{}) assert.Nil(t, err) found := false for _, node := range updatedWf.Status.Nodes { if node.Type == wfv1.NodeTypeSuspend { assert.Equal(t, node.Phase, wfv1.NodeFailed) assert.Contains(t, node.Message, "Step exceeded its deadline") found = true } } assert.True(t, found) }
explode_data.jsonl/70967
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 415 }
[ 2830, 3393, 50, 12758, 2354, 83593, 1155, 353, 8840, 836, 8, 341, 84441, 11, 6461, 1669, 501, 2051, 741, 16867, 9121, 741, 6692, 8316, 746, 1669, 6461, 1418, 69, 2972, 746, 18979, 45926, 73, 53, 16, 7141, 16, 1005, 6776, 38140, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestIoBufferMaxBufferReadOnce(t *testing.T) { b := newIoBuffer(1) s := randString(MaxBufferLength + 1) input := make([]byte, 0, 1024) reader := bytes.NewReader([]byte(s)) countbytes := 0 for { n, _, err := b.ReadOnce(reader) if err != nil { if err == io.EOF { break } t.Fatal(err) } countbytes = countbytes + int(n) if countbytes >= MaxBufferLength { input = append(input, b.Peek(int(countbytes))...) b.Drain(int(countbytes)) countbytes = 0 } } if countbytes > 0 { input = append(input, b.Peek(int(countbytes))...) b.Drain(int(countbytes)) } if !bytes.Equal(input, []byte(s)) { t.Errorf("Expect got %s but got %s", s, string(input)) } if b.Cap() > MaxBufferLength { t.Errorf("Expect got length %d", b.Cap()) } }
explode_data.jsonl/24062
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 338 }
[ 2830, 3393, 42799, 4095, 5974, 4095, 4418, 12522, 1155, 353, 8840, 836, 8, 341, 2233, 1669, 501, 42799, 4095, 7, 16, 340, 1903, 1669, 10382, 703, 90332, 4095, 4373, 488, 220, 16, 340, 22427, 1669, 1281, 10556, 3782, 11, 220, 15, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestClips(t *testing.T) { Convey("get Clips", t, func() { _, _, _, err := dao.Clips(ctx(), 27515258, 1, 20) err = nil So(err, ShouldBeNil) }) }
explode_data.jsonl/51612
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 76 }
[ 2830, 3393, 5066, 3077, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 455, 83373, 497, 259, 11, 2915, 368, 341, 197, 197, 6878, 8358, 8358, 1848, 1669, 24775, 21610, 3077, 7502, 1507, 220, 17, 22, 20, 16, 20, 17, 20, 23, 11, 220...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidArgsFuncCmdContext(t *testing.T) { validArgsFunc := func(cmd *Command, args []string, toComplete string) ([]string, ShellCompDirective) { ctx := cmd.Context() if ctx == nil { t.Error("Received nil context in completion func") } else if ctx.Value("testKey") != "123" { t.Error("Received invalid context") } return nil, ShellCompDirectiveDefault } rootCmd := &Command{ Use: "root", Run: emptyRun, } childCmd := &Command{ Use: "childCmd", ValidArgsFunction: validArgsFunc, Run: emptyRun, } rootCmd.AddCommand(childCmd) //nolint:golint,staticcheck // We can safely use a basic type as key in tests. ctx := context.WithValue(context.Background(), "testKey", "123") // Test completing an empty string on the childCmd _, output, err := executeCommandWithContextC(ctx, rootCmd, ShellCompNoDescRequestCmd, "childCmd", "") if err != nil { t.Errorf("Unexpected error: %v", err) } expected := strings.Join([]string{ ":0", "Completion ended with directive: ShellCompDirectiveDefault", ""}, "\n") if output != expected { t.Errorf("expected: %q, got: %q", expected, output) } }
explode_data.jsonl/43746
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 426 }
[ 2830, 3393, 4088, 4117, 9626, 15613, 1972, 1155, 353, 8840, 836, 8, 341, 56322, 4117, 9626, 1669, 2915, 14160, 353, 4062, 11, 2827, 3056, 917, 11, 311, 12548, 914, 8, 34923, 917, 11, 29402, 13552, 62076, 8, 341, 197, 20985, 1669, 5439...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestProtobufNilInput(t *testing.T) { var marshaler transports.Marshaler marshaler = transports.ProtobufMarshaler{} _, err := marshaler.Marshal(nil) if strings.Index(err.Error(), transports.MarshalerNilTypeError) < 0 { t.Fatal("Nil type doesn't break the Protobuf marshaler") } }
explode_data.jsonl/71163
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 103 }
[ 2830, 3393, 12423, 18464, 19064, 2505, 1155, 353, 8840, 836, 8, 341, 2405, 60771, 261, 68069, 37271, 261, 198, 2109, 28423, 261, 284, 68069, 42825, 18464, 55438, 261, 16094, 197, 6878, 1848, 1669, 60771, 261, 37271, 27907, 692, 743, 9069,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestHelpMatch(t *testing.T) { plug := Help{} plug.Setup(make(chan IRCMessage), PluginConf{}) tests := []struct { msg string matched bool }{ {"Laala, would you please help me", true}, {"Laala, please help me", true}, {"Laala, help me", true}, {"Laala: help me", true}, {"Laala~ help me", true}, {"Laala help me", true}, {"Laala, help me!", true}, {"Laala, help me?", true}, {"Laala, help me.", true}, {"Laala, help me,", true}, {"Laala, help me~", true}, {"Laala, help me~!.,-", true}, {"Laala, me help", false}, {"Laala, you please me help would", false}, {"Laala, you please me help", false}, {"Laala, you me help", false}, {"Laala, I hate you", false}, {"Laala, tell me about yourself", true}, {"Laala, please, tell me about yourself", true}, {"Laala, tell me about yourself, please", true}, {"Laala, please, tell me about yourself, please", true}, {"Laala, how do I search for anime", true}, {"Laala, how do I search for anime???? ", true}, {"Laala, how do I search for manga", true}, {"Laala, how do I search for manga?!", true}, } for _, test := range tests { result := plug.match.MatchString(test.msg) if result != test.matched { t.Error(test.msg, "expected", test.matched, "but got", result) } } }
explode_data.jsonl/25012
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 482 }
[ 2830, 3393, 12689, 8331, 1155, 353, 8840, 836, 8, 341, 197, 47474, 1669, 11479, 16094, 197, 47474, 39820, 36944, 35190, 59328, 2052, 701, 21245, 15578, 6257, 692, 78216, 1669, 3056, 1235, 341, 197, 21169, 257, 914, 198, 197, 2109, 34244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFileRename(t *testing.T) { t.Run("CacheModeOff", func(t *testing.T) { testFileRename(t, vfscommon.CacheModeOff) }) t.Run("CacheModeFull", func(t *testing.T) { testFileRename(t, vfscommon.CacheModeFull) }) }
explode_data.jsonl/9739
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 95 }
[ 2830, 3393, 1703, 88757, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 8233, 3636, 4596, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 18185, 1703, 88757, 1155, 11, 92941, 5464, 46130, 3636, 4596, 340, 197, 3518, 3244, 16708, 445, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestVolumeProvisionerWithZeroCapacity(t *testing.T) { plugMgr, tmpDir := newPluginMgr(t, makeScaleIOSecret(testSecret, testns)) defer os.RemoveAll(tmpDir) plug, err := plugMgr.FindPluginByName(sioPluginName) if err != nil { t.Fatalf("Can't find the plugin %v", sioPluginName) } sioPlug, ok := plug.(*sioPlugin) if !ok { t.Fatal("Cannot assert plugin to be type sioPlugin") } options := volume.VolumeOptions{ ClusterName: "testcluster", PVName: "pvc-sio-dynamic-vol", PVC: volumetest.CreateTestPVC("0Mi", []api.PersistentVolumeAccessMode{api.ReadWriteOnce}), PersistentVolumeReclaimPolicy: api.PersistentVolumeReclaimDelete, } options.PVC.Namespace = testns options.PVC.Spec.AccessModes = []api.PersistentVolumeAccessMode{ api.ReadWriteOnce, } options.Parameters = map[string]string{ confKey.gateway: "http://test.scaleio:11111", confKey.system: "sio", confKey.protectionDomain: testSioPD, confKey.storagePool: "default", confKey.secretName: "sio-secret", } provisioner, _ := sioPlug.NewProvisioner(options) sio := newFakeSio() sioVol := provisioner.(*sioVolume) if err := sioVol.setSioMgrFromConfig(); err != nil { t.Fatalf("failed to create scaleio mgr from config: %v", err) } sioVol.sioMgr.client = sio _, err = provisioner.Provision(nil, nil) if err == nil { t.Fatalf("call to Provision() should fail with invalid capacity") } }
explode_data.jsonl/29497
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 633 }
[ 2830, 3393, 18902, 1336, 13013, 261, 2354, 17999, 29392, 1155, 353, 8840, 836, 8, 341, 197, 47474, 25567, 11, 4174, 6184, 1669, 501, 11546, 25567, 1155, 11, 1281, 6947, 28136, 50856, 8623, 19773, 11, 1273, 4412, 1171, 16867, 2643, 84427, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestErrWrapDefVal(t *testing.T) { gopClTest(t, ` import "strconv" func addSafe(x, y string) int { return strconv.Atoi(x)?:0 + strconv.Atoi(y)?:0 } `, `package main import strconv "strconv" func addSafe(x string, y string) int { return func() (_gop_ret int) { var _gop_err error _gop_ret, _gop_err = strconv.Atoi(x) if _gop_err != nil { return 0 } return }() + func() (_gop_ret int) { var _gop_err error _gop_ret, _gop_err = strconv.Atoi(y) if _gop_err != nil { return 0 } return }() } `) }
explode_data.jsonl/73611
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 260 }
[ 2830, 3393, 7747, 26787, 2620, 2208, 1155, 353, 8840, 836, 8, 341, 3174, 453, 5066, 2271, 1155, 11, 22074, 474, 330, 51848, 1837, 2830, 912, 25663, 2075, 11, 379, 914, 8, 526, 341, 853, 33317, 67107, 2075, 8, 4820, 15, 488, 33317, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServer_Rejects_HeadersNoEnd_Then_Ping(t *testing.T) { testServerRejectsConn(t, func(st *serverTester) { st.writeHeaders(HeadersFrameParam{ StreamID: 1, BlockFragment: st.encodeHeader(), EndStream: true, EndHeaders: false, }) if err := st.fr.WritePing(false, [8]byte{}); err != nil { t.Fatal(err) } }) }
explode_data.jsonl/71652
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 161 }
[ 2830, 3393, 5475, 50693, 583, 82, 62, 10574, 2753, 3727, 62, 12209, 1088, 287, 1155, 353, 8840, 836, 8, 341, 18185, 5475, 78413, 82, 9701, 1155, 11, 2915, 5895, 353, 4030, 58699, 8, 341, 197, 18388, 3836, 10574, 7, 10574, 4369, 2001, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFalseRequire(t *testing.T) { default_suite.expectBundled(t, bundled{ files: map[string]string{ "/entry.js": ` (require => require('/test.txt'))() `, "/test.txt": `This is a test.`, }, entryPaths: []string{"/entry.js"}, options: config.Options{ Mode: config.ModeBundle, AbsOutputFile: "/out.js", }, }) }
explode_data.jsonl/38480
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 162 }
[ 2830, 3393, 4049, 17959, 1155, 353, 8840, 836, 8, 341, 11940, 57239, 25952, 33, 1241, 832, 1155, 11, 51450, 515, 197, 74075, 25, 2415, 14032, 30953, 515, 298, 197, 3115, 4085, 2857, 788, 22074, 571, 197, 23482, 589, 1373, 3396, 1944, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetSecurityToNoneByDefault(t *testing.T) { jaeger := v1.NewJaeger(types.NamespacedName{Name: "my-instance"}) normalize(context.Background(), jaeger) assert.Equal(t, v1.IngressSecurityNoneExplicit, jaeger.Spec.Ingress.Security) }
explode_data.jsonl/21852
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 89 }
[ 2830, 3393, 1649, 15352, 1249, 4064, 1359, 3675, 1155, 353, 8840, 836, 8, 341, 197, 5580, 1878, 1669, 348, 16, 7121, 52445, 1878, 52613, 98932, 68552, 675, 63121, 25, 330, 2408, 73655, 23625, 197, 30590, 5378, 19047, 1507, 11937, 1878, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestBuildUncleanTarFilenames(t *testing.T) { skip.If(t, versions.LessThan(testEnv.DaemonAPIVersion(), "1.37"), "broken in earlier versions") skip.If(t, testEnv.DaemonInfo.OSType == "windows", "FIXME") ctx := context.TODO() defer setupTest(t)() dockerfile := `FROM scratch COPY foo / FROM scratch COPY bar /` buf := bytes.NewBuffer(nil) w := tar.NewWriter(buf) writeTarRecord(t, w, "Dockerfile", dockerfile) writeTarRecord(t, w, "../foo", "foocontents0") writeTarRecord(t, w, "/bar", "barcontents0") err := w.Close() assert.NilError(t, err) apiclient := testEnv.APIClient() resp, err := apiclient.ImageBuild(ctx, buf, types.ImageBuildOptions{ Remove: true, ForceRemove: true, }) out := bytes.NewBuffer(nil) assert.NilError(t, err) _, err = io.Copy(out, resp.Body) resp.Body.Close() assert.NilError(t, err) // repeat with changed data should not cause cache hits buf = bytes.NewBuffer(nil) w = tar.NewWriter(buf) writeTarRecord(t, w, "Dockerfile", dockerfile) writeTarRecord(t, w, "../foo", "foocontents1") writeTarRecord(t, w, "/bar", "barcontents1") err = w.Close() assert.NilError(t, err) resp, err = apiclient.ImageBuild(ctx, buf, types.ImageBuildOptions{ Remove: true, ForceRemove: true, }) out = bytes.NewBuffer(nil) assert.NilError(t, err) _, err = io.Copy(out, resp.Body) resp.Body.Close() assert.NilError(t, err) assert.Assert(t, !strings.Contains(out.String(), "Using cache")) }
explode_data.jsonl/82585
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 607 }
[ 2830, 3393, 11066, 63718, 2675, 62733, 37, 52768, 1155, 353, 8840, 836, 8, 341, 1903, 13389, 32901, 1155, 11, 10795, 1214, 433, 26067, 8623, 14359, 909, 64, 7291, 7082, 5637, 1507, 330, 16, 13, 18, 22, 3975, 330, 48909, 304, 6788, 107...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestToErrorE(t *testing.T) { type args struct { s string } tests := []struct { name string args args want string want1 error }{ { name: "", args: args{ s: "error", }, want: "error", want1: nil, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, got1 := ToErrorE(tt.args.s) if got.Error() != tt.want { t.Errorf("ToErrorE() got = %v, want %v", got.Error(), tt.want) } if !reflect.DeepEqual(got1, tt.want1) { t.Errorf("ToErrorE() got1 = %v, want %v", got1, tt.want1) } }) } }
explode_data.jsonl/5483
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 293 }
[ 2830, 3393, 1249, 1454, 36, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 1903, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 220, 914, 198, 197, 31215, 220, 2827, 198, 197, 50780, 220, 914, 198, 197, 50...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRetriesNewWrappedErrors_Is(t *testing.T) { err := NewRetriesResult(NewResult(500, "this is a wrapped error, %w", io.ErrUnexpectedEOF), 0, time.Now(), nil) if !protocol.ResultIs(err, io.ErrUnexpectedEOF) { t.Error("Result expected to be a wrapped ErrUnexpectedEOF but was not") } }
explode_data.jsonl/66498
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 109 }
[ 2830, 3393, 12020, 4019, 3564, 67795, 13877, 31879, 1155, 353, 8840, 836, 8, 341, 9859, 1669, 1532, 12020, 4019, 2077, 35063, 2077, 7, 20, 15, 15, 11, 330, 574, 374, 264, 19472, 1465, 11, 1018, 86, 497, 6399, 27862, 29430, 23483, 701,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEcPointAddSimple(t *testing.T) { curve := btcec.S256() num := big.NewInt(1) p1, _ := NewScalarBaseMult(curve, num) p2, _ := NewScalarBaseMult(curve, num) p3, err := p1.Add(p2) if err != nil { t.Errorf("EcPoint.Add failed: %v", err) } num = big.NewInt(2) ep, _ := NewScalarBaseMult(curve, num) if !bytes.Equal(ep.Bytes(), p3.Bytes()) { t.Errorf("EcPoint.Add failed: should equal %v, found: %v", ep, p3) } }
explode_data.jsonl/75664
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 202 }
[ 2830, 3393, 50730, 2609, 2212, 16374, 1155, 353, 8840, 836, 8, 341, 33209, 586, 1669, 19592, 68955, 808, 17, 20, 21, 741, 22431, 1669, 2409, 7121, 1072, 7, 16, 340, 3223, 16, 11, 716, 1669, 1532, 20639, 3978, 40404, 17591, 586, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPeerGroupResolverPolicyNoAvailablePeers(t *testing.T) { signedBy, identities, err := GetPolicies(org1) if err != nil { panic(err) } sigPolicyEnv := &common.SignaturePolicyEnvelope{ Version: 0, Rule: signedBy[o1], Identities: identities, } expected := []PeerGroup{} testPeerGroupResolver( t, sigPolicyEnv, nil, expected, nil) }
explode_data.jsonl/21572
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 135 }
[ 2830, 3393, 30888, 2808, 18190, 13825, 2753, 16485, 10197, 388, 1155, 353, 8840, 836, 8, 341, 1903, 1542, 1359, 11, 39421, 11, 1848, 1669, 2126, 47, 42038, 36246, 16, 340, 743, 1848, 961, 2092, 341, 197, 30764, 3964, 340, 197, 630, 84...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestServer_Response_LargeWrite_FlowControlled(t *testing.T) { // Make these reads. Before each read, the client adds exactly enough // flow-control to satisfy the read. Numbers chosen arbitrarily. reads := []int{123, 1, 13, 127} size := 0 for _, n := range reads { size += n } testServerResponse(t, func(w http.ResponseWriter, r *http.Request) error { w.(http.Flusher).Flush() n, err := w.Write(bytes.Repeat([]byte("a"), size)) if err != nil { return fmt.Errorf("Write error: %v", err) } if n != size { return fmt.Errorf("wrong size %d from Write", n) } return nil }, func(st *serverTester) { // Set the window size to something explicit for this test. // It's also how much initial data we expect. if err := st.fr.WriteSettings(Setting{SettingInitialWindowSize, uint32(reads[0])}); err != nil { t.Fatal(err) } st.wantSettingsAck() getSlash(st) // make the single request hf := st.wantHeaders() if hf.StreamEnded() { t.Fatal("unexpected END_STREAM flag") } if !hf.HeadersEnded() { t.Fatal("want END_HEADERS flag") } df := st.wantData() if got := len(df.Data()); got != reads[0] { t.Fatalf("Initial window size = %d but got DATA with %d bytes", reads[0], got) } for _, quota := range reads[1:] { if err := st.fr.WriteWindowUpdate(1, uint32(quota)); err != nil { t.Fatal(err) } df := st.wantData() if int(quota) != len(df.Data()) { t.Fatalf("read %d bytes after giving %d quota", len(df.Data()), quota) } } }) }
explode_data.jsonl/71670
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 598 }
[ 2830, 3393, 5475, 65873, 2351, 2744, 7985, 1400, 10303, 3273, 832, 1155, 353, 8840, 836, 8, 341, 197, 322, 7405, 1493, 15804, 13, 13235, 1817, 1349, 11, 279, 2943, 11367, 6896, 3322, 198, 197, 322, 6396, 4465, 311, 26553, 279, 1349, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAssignmentRestore(t *testing.T) { testCases := []NodeRestoreTestCase{ {"a=1", "`a`=1"}, {"b=1+2", "`b`=1+2"}, } extractNodeFunc := func(node Node) Node { return node.(*UpdateStmt).List[0] } runNodeRestoreTest(t, testCases, "UPDATE t1 SET %s", extractNodeFunc) }
explode_data.jsonl/27581
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 126 }
[ 2830, 3393, 41613, 56284, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1955, 56284, 16458, 515, 197, 197, 4913, 64, 28, 16, 497, 35973, 64, 90260, 16, 7115, 197, 197, 4913, 65, 28, 16, 10, 17, 497, 35973, 65, 90260, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClusteredIndexCorCol(t *testing.T) { // For issue 23076 store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t1, t2;") tk.MustExec("create table t1 (c_int int, c_str varchar(40), primary key (c_int, c_str) clustered, key(c_int) );") tk.MustExec("create table t2 like t1 ;") tk.MustExec("insert into t1 values (1, 'crazy lumiere'), (10, 'goofy mestorf');") tk.MustExec("insert into t2 select * from t1 ;") tk.MustQuery("select (select t2.c_str from t2 where t2.c_str = t1.c_str and t2.c_int = 10 order by t2.c_str limit 1) x from t1;").Check(testkit.Rows("<nil>", "goofy mestorf")) }
explode_data.jsonl/65574
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 284 }
[ 2830, 3393, 28678, 291, 1552, 10580, 6127, 1155, 353, 8840, 836, 8, 341, 197, 322, 1752, 4265, 220, 17, 18, 15, 22, 21, 198, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBoundedQuantilesResultSetsStateCorrectly(t *testing.T) { lower, upper := -5.0, 5.0 bq := getNoiselessBQ(t, lower, upper) _, err := bq.Result(0.5) if err != nil { t.Fatalf("Couldn't compute dp result for rank=0.5: %v", err) } if bq.state != resultReturned { t.Errorf("BoundedQuantiles should have its state set to ResultReturned, got %v, want ResultReturned", bq.state) } }
explode_data.jsonl/80641
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 157 }
[ 2830, 3393, 33, 13082, 44220, 3658, 2077, 30175, 1397, 33092, 398, 1155, 353, 8840, 836, 8, 341, 8810, 1202, 11, 8416, 1669, 481, 20, 13, 15, 11, 220, 20, 13, 15, 198, 2233, 80, 1669, 633, 61819, 1717, 33, 48, 1155, 11, 4722, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBasicAuthAddAuthorizationDataToTheRequest(t *testing.T) { req, err := http.NewRequest("GET", "http://www.google.com", nil) if err != nil { t.Errorf("Error creating request: %v", err) return } credentials.BasicAuth("username", "password").Sign(req) if req.Header.Get("Authorization") != "Basic dXNlcm5hbWU6cGFzc3dvcmQ=" { t.Errorf("Expected Basic dXNlcm5hbWU6cGFzc3dvcmQ=, got %v", req.Header.Get("Authorization")) } }
explode_data.jsonl/48266
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 180 }
[ 2830, 3393, 15944, 5087, 2212, 18124, 1043, 1249, 785, 1900, 1155, 353, 8840, 836, 8, 341, 24395, 11, 1848, 1669, 1758, 75274, 445, 3806, 497, 330, 1254, 1110, 2136, 5713, 905, 497, 2092, 340, 743, 1848, 961, 2092, 341, 197, 3244, 130...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFoo(t *testing.T) { calls := 0 f := func(key string) { calls = calls + 1 } trk := New(f, 10*time.Millisecond) objRef := corev1.ObjectReference{ APIVersion: "ref.knative.dev/v1alpha1", Kind: "Thing1", Namespace: "ns", Name: "foo", } thing1 := &Resource{ TypeMeta: metav1.TypeMeta{ APIVersion: objRef.APIVersion, Kind: objRef.Kind, }, ObjectMeta: metav1.ObjectMeta{ Namespace: objRef.Namespace, Name: objRef.Name, }, } thing2 := &Resource{ TypeMeta: metav1.TypeMeta{ APIVersion: "reffer.knative.dev/v1alpha1", Kind: "Thing2", }, ObjectMeta: metav1.ObjectMeta{ Namespace: "default", Name: "bar", }, } t.Run("Not tracked yet", func(t *testing.T) { trk.OnChanged(thing1) if got, want := calls, 0; got != want { t.Errorf("OnChanged() = %v, wanted %v", got, want) } }) t.Run("Tracked gets called", func(t *testing.T) { if err := trk.Track(objRef, thing2); err != nil { t.Errorf("Track() = %v", err) } trk.OnChanged(thing1) if got, want := calls, 1; got != want { t.Errorf("OnChanged() = %v, wanted %v", got, want) } }) t.Run("Still gets called", func(t *testing.T) { trk.OnChanged(thing1) if got, want := calls, 2; got != want { t.Errorf("OnChanged() = %v, wanted %v", got, want) } }) // Check that after the sleep duration, we stop getting called. time.Sleep(20 * time.Millisecond) t.Run("Stops getting called", func(t *testing.T) { trk.OnChanged(thing1) if got, want := calls, 2; got != want { t.Errorf("OnChanged() = %v, wanted %v", got, want) } }) t.Run("Starts getting called again", func(t *testing.T) { if err := trk.Track(objRef, thing2); err != nil { t.Errorf("Track() = %v", err) } trk.OnChanged(thing1) if got, want := calls, 3; got != want { t.Errorf("OnChanged() = %v, wanted %v", got, want) } }) t.Run("OnChanged non-accessor", func(t *testing.T) { // Check that passing in a resource that doesn't implement // accessor won't panic. trk.OnChanged("not an accessor") if got, want := calls, 3; got != want { t.Errorf("OnChanged() = %v, wanted %v", got, want) } }) t.Run("Track bad object", func(t *testing.T) { if err := trk.Track(objRef, struct{}{}); err == nil { t.Error("Track() = nil, wanted error") } }) }
explode_data.jsonl/18872
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1026 }
[ 2830, 3393, 40923, 1155, 353, 8840, 836, 8, 341, 1444, 5583, 1669, 220, 15, 198, 1166, 1669, 2915, 4857, 914, 8, 341, 197, 1444, 5583, 284, 6738, 488, 220, 16, 198, 197, 630, 25583, 74, 1669, 1532, 955, 11, 220, 16, 15, 77053, 714...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClientServer(t *testing.T) { t.Run("tcp", func(t *testing.T) { ctx := context.Background() done := make(chan struct{}) lis, err := net.Listen("tcp", "127.0.0.1:0") // any available address if err != nil { t.Fatal("Listen:", err) } defer func() { if lis == nil { return // already closed } if err := lis.Close(); err != nil { if !strings.HasSuffix(err.Error(), "use of closed network connection") { t.Fatal(err) } } }() ha := testHandlerA{t: t} go func() { if err := serve(ctx, lis, &ha); err != nil { if !strings.HasSuffix(err.Error(), "use of closed network connection") { t.Error(err) } } close(done) }() conn, err := net.Dial("tcp", lis.Addr().String()) if err != nil { t.Fatal("Dial:", err) } testClientServer(ctx, t, jsonrpc2.NewBufferedStream(conn, jsonrpc2.VarintObjectCodec{})) lis.Close() <-done // ensure Serve's error return (if any) is caught by this test }) t.Run("websocket", func(t *testing.T) { ctx := context.Background() done := make(chan struct{}) ha := testHandlerA{t: t} upgrader := websocket.Upgrader{ReadBufferSize: 1024, WriteBufferSize: 1024} s := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { c, err := upgrader.Upgrade(w, r, nil) if err != nil { t.Fatal(err) } defer c.Close() jc := jsonrpc2.NewConn(r.Context(), websocketjsonrpc2.NewObjectStream(c), &ha) <-jc.DisconnectNotify() close(done) })) defer s.Close() c, _, err := websocket.DefaultDialer.Dial(strings.Replace(s.URL, "http:", "ws:", 1), nil) if err != nil { t.Fatal(err) } defer c.Close() testClientServer(ctx, t, websocketjsonrpc2.NewObjectStream(c)) <-done // keep the test running until the WebSocket disconnects (to avoid missing errors) }) }
explode_data.jsonl/50181
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 784 }
[ 2830, 3393, 2959, 5475, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 27161, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 20985, 1669, 2266, 19047, 741, 197, 40495, 1669, 1281, 35190, 2036, 6257, 692, 197, 8810, 285, 11, 1848, 1669...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCompsUnderPoint(t *testing.T) { r := algebra.NewRay(0, 0, -5, 0, 0, 1) shape := primitives.NewGlassSphere(algebra.TranslationMatrix(0, 0, 1), 1.5) i := primitives.NewIntersection(shape, 5) xs := primitives.NewIntersections() xs.GetHits().Push(i) comps := PrepareComputations(i, r, xs) EPSILON := 0.00001 if !(comps.UnderPoint.Get()[2] > EPSILON/2) { // z coord t.Errorf("Unexpected UnderPoint z-coordinate: %f", comps.UnderPoint.Get()[2]) } if !(comps.Point.Get()[2] < comps.UnderPoint.Get()[2]) { t.Errorf("Unexpected relative positioning of z-intersect with z-underpoint intersect: %f versus %f", comps.Point.Get()[2], comps.UnderPoint.Get()[2]) } }
explode_data.jsonl/27655
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 279 }
[ 2830, 3393, 1092, 1690, 16250, 2609, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 46876, 7121, 29187, 7, 15, 11, 220, 15, 11, 481, 20, 11, 220, 15, 11, 220, 15, 11, 220, 16, 340, 197, 12231, 1669, 71194, 7121, 84003, 42959, 17643, 26...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSetOperations(t *testing.T) { tests := []struct { a string b string c string op string }{ {`{1,2,3,4}`, `{1,3,5}`, `{2,4}`, "-"}, {`{1,3,5}`, `{1,2,3,4}`, `{5,}`, "-"}, {`{1,2,3,4}`, `{1,3,5}`, `{1,3}`, "&"}, {`{1,3,5}`, `{1,2,3,4}`, `{1,3}`, "&"}, {`{1,2,3,4}`, `{1,3,5}`, `{1,2,3,4,5}`, "|"}, {`{1,3,5}`, `{1,2,3,4}`, `{1,2,3,4,5}`, "|"}, } for _, tc := range tests { s1 := MustParseTerm(tc.a).Value.(Set) s2 := MustParseTerm(tc.b).Value.(Set) s3 := MustParseTerm(tc.c).Value.(Set) var result Set if tc.op == "-" { result = s1.Diff(s2) } else if tc.op == "&" { result = s1.Intersect(s2) } else if tc.op == "|" { result = s1.Union(s2) } else { panic("bad operation") } if result.Compare(s3) != 0 { t.Errorf("Expected %v for %v %v %v but got: %v", s3, tc.a, tc.op, tc.b, result) } } }
explode_data.jsonl/2928
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 504 }
[ 2830, 3393, 1649, 35120, 1155, 353, 8840, 836, 8, 1476, 78216, 1669, 3056, 1235, 341, 197, 11323, 220, 914, 198, 197, 2233, 220, 914, 198, 197, 1444, 220, 914, 198, 197, 39703, 914, 198, 197, 59403, 197, 197, 90, 63, 90, 16, 11, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestSingleJSONV1BatchToOCProto(t *testing.T) { blob, err := ioutil.ReadFile("./testdata/zipkin_v1_single_batch.json") if err != nil { t.Fatalf("failed to load test data: %v", err) } got, err := V1JSONBatchToOCProto(blob) if err != nil { t.Fatalf("failed to translate zipkinv1 to OC proto: %v", err) } want := ocBatchesFromZipkinV1 sortTraceByNodeName(want) sortTraceByNodeName(got) if !reflect.DeepEqual(got, want) { t.Fatalf("Unsuccessful conversion\nGot:\n\t%v\nWant:\n\t%v", got, want) } }
explode_data.jsonl/9086
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 222 }
[ 2830, 3393, 10888, 5370, 53, 16, 21074, 1249, 7612, 31549, 1155, 353, 8840, 836, 8, 341, 2233, 1684, 11, 1848, 1669, 43144, 78976, 13988, 92425, 14, 9964, 7989, 2273, 16, 19487, 14534, 4323, 1138, 743, 1848, 961, 2092, 341, 197, 3244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestExt(t *testing.T) { for _, test := range exttests { if x := filepath.Ext(test.path); x != test.ext { t.Errorf("Ext(%q) = %q, want %q", test.path, x, test.ext) } } }
explode_data.jsonl/1658
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 85 }
[ 2830, 3393, 6756, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 1303, 23841, 341, 197, 743, 856, 1669, 26054, 16146, 8623, 3875, 1215, 856, 961, 1273, 9220, 341, 298, 3244, 13080, 445, 6756, 15238, 80, 8, 284, 1018, 80, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
3
func TestExecutionsList(t *testing.T) { client, err := clients.NewWorkflowV2Client() th.AssertNoErr(t, err) workflow, err := CreateWorkflow(t, client) th.AssertNoErr(t, err) defer DeleteWorkflow(t, client, workflow) execution, err := CreateExecution(t, client, workflow) th.AssertNoErr(t, err) defer DeleteExecution(t, client, execution) list, err := ListExecutions(t, client, &executions.ListOpts{ Description: &executions.ListFilter{ Value: execution.Description, }, CreatedAt: &executions.ListDateFilter{ Filter: executions.FilterGTE, Value: execution.CreatedAt, }, Input: execution.Input, }) th.AssertNoErr(t, err) th.AssertEquals(t, 1, len(list)) tools.PrintResource(t, list) }
explode_data.jsonl/29468
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 276 }
[ 2830, 3393, 10216, 3977, 852, 1155, 353, 8840, 836, 8, 341, 25291, 11, 1848, 1669, 8239, 7121, 62768, 53, 17, 2959, 741, 70479, 11711, 2753, 7747, 1155, 11, 1848, 692, 197, 56249, 11, 1848, 1669, 4230, 62768, 1155, 11, 2943, 340, 7047...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWrapPanic(t *testing.T) { buf := bytes.NewBuffer(nil) DefaultLogger = NewLogger(buf, "", log.Lshortfile, LvlTrace) testHandleBusiness() results := strings.Split(buf.String(), "\n") if len(results) != 2 { t.Errorf("expect %d line logs, but got %d", 2, len(results)) return } prefix := "std_test.go:57: wrap a panic; level=error; stacks=[" if !strings.HasPrefix(results[0], prefix) { t.Errorf("unexpected line: %s", results[0]) return } stack := results[0][len(prefix):] if index := strings.IndexByte(stack, ']'); index > -1 { stack = stack[:index] } expects := []string{ "github.com/xgfone/go-apiserver/log/std_test.go:func1:57", "github.com/xgfone/go-apiserver/log/std_test.go:handleBusiness:58", "github.com/xgfone/go-apiserver/log/std_test.go:testHandleBusiness:63", "github.com/xgfone/go-apiserver/log/std_test.go:TestWrapPanic:70", } stacks := strings.Fields(stack) for i, stack := range stacks { // Remove the testing.go. if strings.HasPrefix(stack, "testing/testing.go:") { stacks = stacks[:i] break } } if len(expects) != len(stacks) { t.Errorf("expect %d stacks, but got %d", len(expects), len(stacks)) } else { for i, line := range expects { if stacks[i] != line { t.Errorf("%d: expect stack '%s', but got '%s'", i, line, stacks[i]) } } } }
explode_data.jsonl/75128
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 565 }
[ 2830, 3393, 26787, 47, 31270, 1155, 353, 8840, 836, 8, 341, 26398, 1669, 5820, 7121, 4095, 27907, 340, 91084, 7395, 284, 1532, 7395, 10731, 11, 7342, 1487, 1214, 8676, 1192, 11, 444, 14536, 6550, 692, 18185, 6999, 22727, 2822, 55497, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestDiscoverySubscribe(t *testing.T) { zones := map[string]string{ "example.com.": "example.com. 1 IN A 1.2.3.4", } // Start DNS server dns.HandleFunc(".", func(w dns.ResponseWriter, req *dns.Msg) { msg := dns.Msg{} msg.SetReply(req) msg.Authoritative = true domain := msg.Question[0].Name zoneStr, ok := zones[domain] if ok { parser := dns.NewZoneParser(strings.NewReader(zoneStr), domain, "") for { rr, ok := parser.Next() if !ok { break } if rr.Header().Rrtype == req.Question[0].Qtype { msg.Answer = append(msg.Answer, rr) } } } w.WriteMsg(&msg) }) defer dns.HandleRemove(".") s, addrstr, err := RunLocalUDPServer(":0") if err != nil { t.Fatalf("unable to run test server: %v", err) } defer s.Shutdown() _, port, err := net.SplitHostPort(addrstr) if err != nil { t.Fatal(err) } r := resolver.NewResolverFromConfig(&dns.ClientConfig{ Servers: []string{"127.0.0.1"}, Port: port, }) d := discovery{c: DefaultConfig, r: r} ctx := context.TODO() var cbAddrs ServiceAddresses var cbErr error cbCh := make(chan struct{}) cb := func(_ context.Context, addrs ServiceAddresses) error { select { case cbCh <- struct{}{}: default: } cbAddrs = addrs return cbErr } cbWait := func() { select { case <-cbCh: case <-time.After(time.Second * 2): t.Fatalf("CB failed") } } // Do a subscribe if err := d.SubscribeServiceAddresses(ctx, "example.com", cb); err != nil { t.Fatalf("Error doing initial subscribe: %v", err) } // wait a second, ensure that we got another cbWait() // set an error, ensure that we get some more retires cbErr = fmt.Errorf("some error") prevAddrs := cbAddrs for i := 0; i < 3; i++ { cbWait() } // Clear error ensure that we get an update cbErr = nil cbWait() if cbAddrs.Equal(prevAddrs) { t.Fatalf("no update!") } // test that an update is seen immediately prevAddrs = cbAddrs cbWait() // Update immediately zones["example.com."] = "example.com. 1 IN A 1.2.3.4\nexample.com. 1 IN A 1.2.3.5" // Wait, and see if we get updates for i := 0; i < 3; i++ { cbWait() if len(prevAddrs) == len(cbAddrs) { t.Fatalf("%d callback missing update!", i) } } }
explode_data.jsonl/9532
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 943 }
[ 2830, 3393, 67400, 28573, 1155, 353, 8840, 836, 8, 341, 20832, 3154, 1669, 2415, 14032, 30953, 515, 197, 197, 1, 8687, 905, 13, 788, 330, 8687, 905, 13, 220, 16, 1964, 362, 220, 16, 13, 17, 13, 18, 13, 19, 756, 197, 630, 197, 32...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestDMLScatter(t *testing.T) { ctx := context.Background() conn, err := mysql.Connect(ctx, &vtParams) require.NoError(t, err) defer conn.Close() /* Simple insert. after this dml, the tables will contain the following: t3 (id5, id6, id7): 1 2 3 2 2 3 3 4 3 4 5 4 t3_id7_idx (id7, keyspace_id:id6): 3 2 3 2 3 4 4 5 */ exec(t, conn, "begin") exec(t, conn, "insert into t3(id5, id6, id7) values(1, 2, 3), (2, 2, 3), (3, 4, 3), (4, 5, 4)") exec(t, conn, "commit") qr := exec(t, conn, "select id5, id6, id7 from t3 order by id5") if got, want := fmt.Sprintf("%v", qr.Rows), "[[INT64(1) INT64(2) INT64(3)] [INT64(2) INT64(2) INT64(3)] [INT64(3) INT64(4) INT64(3)] [INT64(4) INT64(5) INT64(4)]]"; got != want { t.Errorf("select:\n%v want\n%v", got, want) } /* Updating a non lookup column. after this dml, the tables will contain the following: t3 (id5, id6, id7): 42 2 3 2 2 3 3 4 3 4 5 4 t3_id7_idx (id7, keyspace_id:id6): 3 2 3 2 3 4 4 5 */ exec(t, conn, "update `ks[-]`.t3 set id5 = 42 where id5 = 1") qr = exec(t, conn, "select id5, id6, id7 from t3 order by id5") if got, want := fmt.Sprintf("%v", qr.Rows), "[[INT64(2) INT64(2) INT64(3)] [INT64(3) INT64(4) INT64(3)] [INT64(4) INT64(5) INT64(4)] [INT64(42) INT64(2) INT64(3)]]"; got != want { t.Errorf("select:\n%v want\n%v", got, want) } /* Updating a lookup column. after this dml, the tables will contain the following: t3 (id5, id6, id7): 42 2 42 2 2 42 3 4 3 4 5 4 t3_id7_idx (id7, keyspace_id:id6): 42 2 42 2 3 4 4 5 */ exec(t, conn, "begin") exec(t, conn, "update t3 set id7 = 42 where id6 = 2") exec(t, conn, "commit") qr = exec(t, conn, "select id5, id6, id7 from t3 order by id5") if got, want := fmt.Sprintf("%v", qr.Rows), "[[INT64(2) INT64(2) INT64(42)] [INT64(3) INT64(4) INT64(3)] [INT64(4) INT64(5) INT64(4)] [INT64(42) INT64(2) INT64(42)]]"; got != want { t.Errorf("select:\n%v want\n%v", got, want) } /* delete one specific keyspace id. after this dml, the tables will contain the following: t3 (id5, id6, id7): 3 4 3 4 5 4 t3_id7_idx (id7, keyspace_id:id6): 3 4 4 5 */ exec(t, conn, "delete from t3 where id6 = 2") qr = exec(t, conn, "select * from t3 where id6 = 2") require.Empty(t, qr.Rows) qr = exec(t, conn, "select * from t3_id7_idx where id6 = 2") require.Empty(t, qr.Rows) // delete all the rows. exec(t, conn, "delete from `ks[-]`.t3") qr = exec(t, conn, "select * from t3") require.Empty(t, qr.Rows) qr = exec(t, conn, "select * from t3_id7_idx") require.Empty(t, qr.Rows) }
explode_data.jsonl/52403
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1212 }
[ 2830, 3393, 35, 2668, 3326, 1650, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 32917, 11, 1848, 1669, 10564, 43851, 7502, 11, 609, 9708, 4870, 340, 17957, 35699, 1155, 11, 1848, 340, 16867, 4534, 10421, 2822, 197, 1057, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4