text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestServerService(t *testing.T) { t.Parallel() ctx := context.Background() Convey("Given a grpc server without a client", t, func() { dialer := registerPinpointServer(t, &pinpointServer{}) Convey("When we connect to the Pinpoint service", func() { conn, err := grpc.DialContext(ctx, "bufnet", grpc.WithContextDialer(dialer), grpc.WithInsecure()) So(err, ShouldBeNil) t.Cleanup(func() { conn.Close() }) client := proto.NewPinpointClient(conn) Convey("Then requests to ScheduleJob will fail with 'misconfigured service'", func() { _, err := client.ScheduleJob(ctx, &proto.ScheduleJobRequest{}) So(err, ShouldNotBeNil) So(err.Error(), ShouldContainSubstring, "misconfigured service") }) }) }) Convey("Given a grpc server with a legacy client not behind the ESP", t, func() { ts := startFakeLegacyServer(t, nil) log.Printf("legacy service = %s", ts.URL) dialer := registerPinpointServer(t, &pinpointServer{legacyPinpointService: ts.URL, LegacyClient: &http.Client{}}) Convey("When we connect to the Pinpoint service", func() { conn, err := grpc.DialContext(ctx, "bufnet", grpc.WithContextDialer(dialer), grpc.WithInsecure()) So(err, ShouldBeNil) defer conn.Close() client := proto.NewPinpointClient(conn) Convey("Then requests to ScheduleJob will fail with 'missing required auth header'", func() { _, err := client.ScheduleJob(ctx, &proto.ScheduleJobRequest{}) So(err, ShouldBeStatusError, codes.PermissionDenied) So(err.Error(), ShouldContainSubstring, "missing required auth header") }) }) }) }
explode_data.jsonl/53292
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 568 }
[ 2830, 3393, 5475, 1860, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 20985, 1669, 2266, 19047, 741, 93070, 5617, 445, 22043, 264, 47900, 3538, 2041, 264, 2943, 497, 259, 11, 2915, 368, 341, 197, 2698, 530, 261, 1669, 4161, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestData(t *testing.T) { f := mbtest.NewEventFetcher(t, getConfig()) err := mbtest.WriteEvent(f, t) if err != nil { t.Fatal("write", err) } }
explode_data.jsonl/59295
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 69 }
[ 2830, 93200, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 10016, 1944, 7121, 1556, 97492, 1155, 11, 66763, 2398, 9859, 1669, 10016, 1944, 4073, 1556, 955, 11, 259, 340, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 445, 4934, 497, 1848, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestAggregate(t *testing.T) { t.Run("TestMaxTimeMSInGetMore", func(t *testing.T) { ctx := context.Background() monitor, started, succeeded, failed := setUpMonitor() dbName := "TestAggMaxTimeDB" collName := "TestAggMaxTimeColl" top := testutil.MonitoredTopology(t, dbName, monitor) clearChannels(started, succeeded, failed) skipIfBelow32(ctx, t, top) clearChannels(started, succeeded, failed) err := operation.NewInsert( bsoncore.BuildDocument(nil, bsoncore.AppendInt32Element(nil, "x", 1)), bsoncore.BuildDocument(nil, bsoncore.AppendInt32Element(nil, "x", 1)), bsoncore.BuildDocument(nil, bsoncore.AppendInt32Element(nil, "x", 1)), ).Collection(collName).Database(dbName). Deployment(top).ServerSelector(description.WriteSelector()).Execute(context.Background()) noerr(t, err) clearChannels(started, succeeded, failed) op := operation.NewAggregate(bsoncore.BuildDocumentFromElements(nil)). Collection(collName).Database(dbName).Deployment(top).ServerSelector(description.WriteSelector()). CommandMonitor(monitor).BatchSize(2) err = op.Execute(context.Background()) noerr(t, err) batchCursor, err := op.Result(driver.CursorOptions{MaxTimeMS: 10, BatchSize: 2, CommandMonitor: monitor}) noerr(t, err) var e *event.CommandStartedEvent select { case e = <-started: case <-time.After(2000 * time.Millisecond): t.Fatal("timed out waiting for aggregate") } require.Equal(t, "aggregate", e.CommandName) clearChannels(started, succeeded, failed) // first Next() should automatically return true require.True(t, batchCursor.Next(ctx), "expected true from first Next, got false") clearChannels(started, succeeded, failed) batchCursor.Next(ctx) // should do getMore select { case e = <-started: case <-time.After(200 * time.Millisecond): t.Fatal("timed out waiting for getMore") } require.Equal(t, "getMore", e.CommandName) _, err = e.Command.LookupErr("maxTimeMS") noerr(t, err) }) t.Run("Multiple Batches", func(t *testing.T) { ds := []bsoncore.Document{ bsoncore.BuildDocument(nil, bsoncore.AppendInt32Element(nil, "_id", 1)), bsoncore.BuildDocument(nil, bsoncore.AppendInt32Element(nil, "_id", 2)), bsoncore.BuildDocument(nil, bsoncore.AppendInt32Element(nil, "_id", 3)), bsoncore.BuildDocument(nil, bsoncore.AppendInt32Element(nil, "_id", 4)), bsoncore.BuildDocument(nil, bsoncore.AppendInt32Element(nil, "_id", 5)), } wc := writeconcern.New(writeconcern.WMajority()) testutil.AutoInsertDocs(t, wc, ds...) op := operation.NewAggregate(bsoncore.BuildArray(nil, bsoncore.BuildDocumentValue( bsoncore.BuildDocumentElement(nil, "$match", bsoncore.BuildDocumentElement(nil, "_id", bsoncore.AppendInt32Element(nil, "$gt", 2), ), ), ), bsoncore.BuildDocumentValue( bsoncore.BuildDocumentElement(nil, "$sort", bsoncore.AppendInt32Element(nil, "_id", 1), ), ), )).Collection(testutil.ColName(t)).Database(dbName).Deployment(testutil.Topology(t)). ServerSelector(description.WriteSelector()).BatchSize(2) err := op.Execute(context.Background()) noerr(t, err) cursor, err := op.Result(driver.CursorOptions{BatchSize: 2}) noerr(t, err) var got []bsoncore.Document for i := 0; i < 2; i++ { if !cursor.Next(context.Background()) { t.Error("Cursor should have results, but does not have a next result") } docs, err := cursor.Batch().Documents() noerr(t, err) got = append(got, docs...) } readers := ds[2:] for i, g := range got { if !bytes.Equal(g[:len(readers[i])], readers[i]) { t.Errorf("Did not get expected document. got %v; want %v", bson.Raw(g[:len(readers[i])]), readers[i]) } } if cursor.Next(context.Background()) { t.Error("Cursor should be exhausted but has more results") } }) t.Run("AllowDiskUse", func(t *testing.T) { ds := []bsoncore.Document{ bsoncore.BuildDocument(nil, bsoncore.AppendInt32Element(nil, "_id", 1)), bsoncore.BuildDocument(nil, bsoncore.AppendInt32Element(nil, "_id", 2)), } wc := writeconcern.New(writeconcern.WMajority()) testutil.AutoInsertDocs(t, wc, ds...) op := operation.NewAggregate(bsoncore.BuildArray(nil)).Collection(testutil.ColName(t)).Database(dbName). Deployment(testutil.Topology(t)).ServerSelector(description.WriteSelector()).AllowDiskUse(true) err := op.Execute(context.Background()) if err != nil { t.Errorf("Expected no error from allowing disk use, but got %v", err) } }) }
explode_data.jsonl/10314
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1734 }
[ 2830, 3393, 64580, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 2271, 5974, 1462, 4826, 641, 1949, 7661, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 20985, 1669, 2266, 19047, 741, 197, 197, 32225, 11, 3855, 11, 25331, 11, 4641, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestStructDeclConformsNonNullable(t *testing.T) { decl, ok := testSchema(t).lookupDeclByName("ExampleStruct", false) if !ok { t.Fatalf("lookupDeclByName failed") } structDecl := decl.(*StructDecl) checkConforms(t, context{}, structDecl, []conformTest{ conformOk{gidlir.Record{ Name: "ExampleStruct", Fields: []gidlir.Field{ {Key: gidlir.FieldKey{Name: "s"}, Value: "foo"}, }, }}, conformFail{gidlir.Record{ Name: "ExampleStruct", Fields: []gidlir.Field{ {Key: gidlir.FieldKey{Name: "DefinitelyNotS"}, Value: "foo"}, }, }, "field DefinitelyNotS: unknown"}, conformFail{gidlir.Record{ Name: "DefinitelyNotExampleStruct", Fields: []gidlir.Field{ {Key: gidlir.FieldKey{Name: "s"}, Value: "foo"}, }, }, "expecting struct test.mixer/ExampleStruct"}, conformFail{nil, "expecting non-null struct"}, conformFail{"foo", "expecting struct"}, conformFail{0, "expecting struct"}, }, ) }
explode_data.jsonl/21400
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 427 }
[ 2830, 3393, 9422, 21629, 1109, 9807, 8121, 15703, 1155, 353, 8840, 836, 8, 341, 197, 10005, 11, 5394, 1669, 1273, 8632, 1155, 568, 21020, 21629, 16898, 445, 13314, 9422, 497, 895, 340, 743, 753, 562, 341, 197, 3244, 30762, 445, 21020, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestKVDeleteRange(t *testing.T) { defer testutil.AfterTest(t) clus := integration.NewClusterV3(t, &integration.ClusterConfig{Size: 3}) defer clus.Terminate(t) kv := clus.RandClient() ctx := context.TODO() tests := []struct { key string opts []clientv3.OpOption wkeys []string }{ // [a, c) { key: "a", opts: []clientv3.OpOption{clientv3.WithRange("c")}, wkeys: []string{"c", "c/abc", "d"}, }, // >= c { key: "c", opts: []clientv3.OpOption{clientv3.WithFromKey()}, wkeys: []string{"a", "b"}, }, // c* { key: "c", opts: []clientv3.OpOption{clientv3.WithPrefix()}, wkeys: []string{"a", "b", "d"}, }, // * { key: "\x00", opts: []clientv3.OpOption{clientv3.WithFromKey()}, wkeys: []string{}, }, } for i, tt := range tests { keySet := []string{"a", "b", "c", "c/abc", "d"} for j, key := range keySet { if _, err := kv.Put(ctx, key, ""); err != nil { t.Fatalf("#%d: couldn't put %q (%v)", j, key, err) } } _, err := kv.Delete(ctx, tt.key, tt.opts...) if err != nil { t.Fatalf("#%d: couldn't delete range (%v)", i, err) } resp, err := kv.Get(ctx, "a", clientv3.WithFromKey()) if err != nil { t.Fatalf("#%d: couldn't get keys (%v)", i, err) } keys := []string{} for _, kv := range resp.Kvs { keys = append(keys, string(kv.Key)) } if !reflect.DeepEqual(tt.wkeys, keys) { t.Errorf("#%d: resp.Kvs got %v, expected %v", i, keys, tt.wkeys) } } }
explode_data.jsonl/16405
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 722 }
[ 2830, 3393, 82707, 6435, 6046, 1155, 353, 8840, 836, 8, 341, 16867, 1273, 1314, 36892, 2271, 1155, 692, 197, 4163, 1669, 17590, 7121, 28678, 53, 18, 1155, 11, 609, 60168, 72883, 2648, 90, 1695, 25, 220, 18, 3518, 16867, 1185, 355, 836...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestDataConnectorSuite(t *testing.T) { s := new(AdminDataSuite) s.ctx = &DBConnector{} require.NoError(t, db.ClearCollections(evergreen.ConfigCollection, task.Collection, task.OldCollection, build.Collection, model.VersionCollection, event.AllLogCollection), "Error clearing collections") b := &build.Build{ Id: "buildtest", Status: evergreen.BuildStarted, Version: "abc", } v := &model.Version{ Id: b.Version, Status: evergreen.VersionStarted, } testTask1 := &task.Task{ Id: "taskToRestart", Activated: false, BuildId: b.Id, Execution: 1, Project: "sample", StartTime: time.Date(2017, time.June, 12, 12, 0, 0, 0, time.Local), Status: evergreen.TaskFailed, Details: apimodels.TaskEndDetail{ Type: evergreen.CommandTypeTest, }, } testTask2 := &task.Task{ Id: "taskThatSucceeded", Activated: false, BuildId: b.Id, Execution: 1, Project: "sample", StartTime: time.Date(2017, time.June, 12, 12, 0, 0, 0, time.Local), Status: evergreen.TaskSucceeded, } testTask3 := &task.Task{ Id: "taskOutsideOfTimeRange", Activated: false, BuildId: b.Id, Execution: 1, Project: "sample", StartTime: time.Date(2017, time.June, 11, 12, 0, 0, 0, time.Local), Status: evergreen.TaskFailed, } p := &model.ProjectRef{ Identifier: "sample", } b.Tasks = []build.TaskCache{ { Id: testTask1.Id, }, { Id: testTask2.Id, }, { Id: testTask3.Id, }, } require.NoError(t, b.Insert(), "error inserting documents") require.NoError(t, v.Insert(), "error inserting documents") require.NoError(t, testTask1.Insert(), "error inserting documents") require.NoError(t, testTask2.Insert(), "error inserting documents") require.NoError(t, testTask3.Insert(), "error inserting documents") require.NoError(t, p.Insert(), "error inserting documents") suite.Run(t, s) }
explode_data.jsonl/14982
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 780 }
[ 2830, 93200, 35954, 28000, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 501, 7, 7210, 1043, 28000, 340, 1903, 30608, 284, 609, 3506, 35954, 16094, 17957, 35699, 1155, 11, 2927, 13524, 52730, 2026, 423, 13250, 10753, 6482, 11, 3383, 28629, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewUser(t *testing.T) { t.Parallel() tests := []struct { email, pwd string wantError bool }{ {"good@email.com", "goodpassword", false}, {"notld@email", "goodpassword", false}, {"good@email.com", "badpd", true}, {"bademail", "goodpassword", true}, {"bad@", "goodpassword", true}, {"@email", "goodpassword", true}, {"good@email.com", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa50", false}, {"good@email.com", "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa51", true}, } for _, test := range tests { t.Run(test.email, func(t *testing.T) { user, err := models.NewUser(test.email, test.pwd) if test.wantError { assert.Error(t, err) } else { assert.NoError(t, err) assert.Equal(t, test.email, user.Email) assert.NotEmpty(t, user.HashedPassword) newHash, _ := utils.HashPassword(test.pwd) assert.NotEqual(t, newHash, user.HashedPassword, "Salt should prevent equality") } }) } }
explode_data.jsonl/31891
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 385 }
[ 2830, 3393, 3564, 1474, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 78216, 1669, 3056, 1235, 341, 197, 57549, 11, 32706, 914, 198, 197, 50780, 1454, 220, 1807, 198, 197, 59403, 197, 197, 4913, 18536, 71776, 905, 497, 330, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGenRSTNoHiddenParents(t *testing.T) { // We generate on a subcommand so we have both subcommands and parents for _, name := range []string{"rootflag", "strtwo"} { f := rootCmd.PersistentFlags().Lookup(name) f.Hidden = true defer func() { f.Hidden = false }() } buf := new(bytes.Buffer) if err := GenReST(echoCmd, buf); err != nil { t.Fatal(err) } output := buf.String() checkStringContains(t, output, echoCmd.Long) checkStringContains(t, output, echoCmd.Example) checkStringContains(t, output, "boolone") checkStringOmits(t, output, "rootflag") checkStringContains(t, output, rootCmd.Short) checkStringContains(t, output, echoSubCmd.Short) checkStringOmits(t, output, deprecatedCmd.Short) checkStringOmits(t, output, "Options inherited from parent commands") }
explode_data.jsonl/17805
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 276 }
[ 2830, 3393, 9967, 49, 784, 2753, 17506, 61769, 1155, 353, 8840, 836, 8, 341, 197, 322, 1205, 6923, 389, 264, 1186, 5631, 773, 582, 614, 2176, 1186, 24270, 323, 6562, 198, 2023, 8358, 829, 1669, 2088, 3056, 917, 4913, 2888, 9903, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLegalCommentsInline(t *testing.T) { default_suite.expectBundled(t, bundled{ files: map[string]string{ "/entry.js": ` import './a' import './b' import './c' `, "/a.js": `console.log('in a') //! Copyright notice 1`, "/b.js": `console.log('in b') //! Copyright notice 1`, "/c.js": `console.log('in c') //! Copyright notice 2`, }, entryPaths: []string{"/entry.js"}, options: config.Options{ Mode: config.ModeBundle, AbsOutputFile: "/out.js", LegalComments: config.LegalCommentsInline, }, }) }
explode_data.jsonl/38559
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 245 }
[ 2830, 3393, 52786, 17373, 25324, 1155, 353, 8840, 836, 8, 341, 11940, 57239, 25952, 33, 1241, 832, 1155, 11, 51450, 515, 197, 74075, 25, 2415, 14032, 30953, 515, 298, 197, 3115, 4085, 2857, 788, 22074, 571, 21918, 2756, 64, 1248, 571, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEntry_SelectionHides(t *testing.T) { e, window := setupSelection(false) defer teardownImageTest(window) c := window.Canvas() test.AssertImageMatches(t, "entry/selection_initial.png", c.Capture()) c.Unfocus() test.AssertImageMatches(t, "entry/selection_focus_lost.png", c.Capture()) c.Focus(e) test.AssertImageMatches(t, "entry/selection_focus_gained.png", c.Capture()) }
explode_data.jsonl/57323
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 146 }
[ 2830, 3393, 5874, 88435, 39, 3341, 1155, 353, 8840, 836, 8, 341, 7727, 11, 3241, 1669, 6505, 11177, 3576, 340, 16867, 49304, 1906, 2271, 15906, 340, 1444, 1669, 3241, 54121, 2822, 18185, 11711, 1906, 42470, 1155, 11, 330, 4085, 14, 2361...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParse_ChargeInvoiceNotification(t *testing.T) { result := MustParseFile("testdata/charge_invoice_notification.xml") if n, ok := result.(*webhooks.ChargeInvoiceNotification); !ok { t.Fatalf("unexpected type: %T, result", n) } else if diff := cmp.Diff(n, &webhooks.ChargeInvoiceNotification{ Type: webhooks.NewChargeInvoice, Account: webhooks.Account{ XMLName: xml.Name{Local: "account"}, Code: "1234", }, Invoice: webhooks.ChargeInvoice{ XMLName: xml.Name{Local: "invoice"}, UUID: "42feb03ce368c0e1ead35d4bfa89b82e", State: recurly.ChargeInvoiceStatePending, Origin: recurly.ChargeInvoiceOriginRenewal, SubscriptionUUIDs: []string{"40b8f5e99df03b8684b99d4993b6e089"}, InvoiceNumber: 2405, Currency: "USD", BalanceInCents: 100000, TotalInCents: 100000, SubtotalInCents: 100000, SubTotalBeforeDiscountInCents: 100000, NetTerms: recurly.NewInt(30), CollectionMethod: recurly.CollectionMethodManual, CreatedAt: recurly.NewTime(MustParseTime("2018-02-13T16:00:04Z")), UpdatedAt: recurly.NewTime(MustParseTime("2018-02-13T16:00:04Z")), DueOn: recurly.NewTime(MustParseTime("2018-03-16T15:00:04Z")), CustomerNotes: "Thanks for your business!", TermsAndConditions: "Payment can be made out to Acme, Co.", }, }); diff != "" { t.Fatal(diff) } }
explode_data.jsonl/76099
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 879 }
[ 2830, 3393, 14463, 27588, 2744, 34674, 11196, 1155, 353, 8840, 836, 8, 341, 9559, 1669, 15465, 14463, 1703, 445, 92425, 14, 13891, 39225, 34296, 9028, 1138, 743, 308, 11, 5394, 1669, 1102, 41399, 2911, 38560, 6353, 2744, 34674, 11196, 121...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetNetworkResourceServicePrincipalTokenNegative(t *testing.T) { env := &azure.PublicCloud for _, config := range CrossTenantNetworkResourceNegativeConfig { _, err := GetNetworkResourceServicePrincipalToken(config, env) assert.Error(t, err) } }
explode_data.jsonl/30652
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 81 }
[ 2830, 3393, 1949, 12320, 4783, 1860, 31771, 3323, 38489, 1155, 353, 8840, 836, 8, 341, 57538, 1669, 609, 39495, 49139, 16055, 198, 2023, 8358, 2193, 1669, 2088, 11261, 71252, 12320, 4783, 38489, 2648, 341, 197, 197, 6878, 1848, 1669, 2126...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestPrivateActivityYesInvisibleForPublic(t *testing.T) { defer prepareTestEnv(t)() testPrivateActivityDoSomethingForActionEntries(t) testPrivateActivityHelperEnablePrivateActivity(t) visible := testPrivateActivityHelperHasVisibleActivitiesFromPublic(t) assert.False(t, visible, "user should have no visible activities") }
explode_data.jsonl/51649
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 93 }
[ 2830, 3393, 16787, 4052, 9454, 641, 12601, 2461, 12676, 1155, 353, 8840, 836, 8, 341, 16867, 10549, 2271, 14359, 1155, 8, 741, 18185, 16787, 4052, 5404, 23087, 2461, 2512, 24533, 1155, 340, 18185, 16787, 4052, 5511, 11084, 16787, 4052, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCommonSQLInjections(t *testing.T) { for _, sess := range testSession { reset(t, sess) for _, injectionAttempt := range strings.Split(injectionAttempts, "\n") { // Create a user with the attempted injection as the email address _, err := sess.InsertInto("dbr_people"). Pair("name", injectionAttempt). Exec() require.NoError(t, err) // SELECT the name back and ensure it's equal to the injection attempt var name string err = sess.Select("name").From("dbr_people").OrderDesc("id").Limit(1).LoadOne(&name) require.NoError(t, err) require.Equal(t, injectionAttempt, name) } } }
explode_data.jsonl/32489
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 10839, 6688, 641, 24575, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 21875, 1669, 2088, 1273, 5283, 341, 197, 70343, 1155, 11, 21875, 692, 197, 2023, 8358, 25071, 47052, 1669, 2088, 9069, 19823, 5900, 7606, 81517, 11, 2917, 77...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTopicTrie_matchedClients_Qos(t *testing.T) { a := assert.New(t) for _, v := range topicMatchQosTest { trie := newTopicTrie() for _, tt := range v.topics { trie.subscribe("cid", tt) } rs := trie.getMatchedTopicFilter(v.matchTopic.name) a.Equal(v.matchTopic.qos, rs["cid"][0].Qos) } }
explode_data.jsonl/72944
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 143 }
[ 2830, 3393, 26406, 51, 7231, 93900, 47174, 13337, 436, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 2060, 7121, 1155, 340, 2023, 8358, 348, 1669, 2088, 8544, 8331, 48, 436, 2271, 341, 197, 197, 8927, 1669, 501, 26406, 51, 7231, 741, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInvalidHostOld(t *testing.T) { c := StartCapture() exit = func(r int) {} os.Args = []string{"dosa", "--host", "invalid-hostname.", "schema", "upsertable", "--scope", "bar", "--prefix", "foo", "../../testentity"} main() output := c.stop(true) assert.Contains(t, output, "invalid-hostname") assert.Contains(t, output, "no such host") }
explode_data.jsonl/16643
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 132 }
[ 2830, 3393, 7928, 9296, 18284, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 5145, 27429, 741, 14519, 284, 2915, 2601, 526, 8, 5613, 25078, 51015, 284, 3056, 917, 4913, 67, 11983, 497, 14482, 3790, 497, 330, 11808, 38589, 606, 10465, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIntervalStatsStatement(t *testing.T) { pullTimestamp := time.Now().UTC() testCases := map[string]struct { topMetricsQueryMaxRows int expectedSQL string expectedParams map[string]interface{} }{ "Statement with top metrics query max rows": {topMetricsQueryMaxRows, query + topMetricsQueryLimitCondition, map[string]interface{}{ topMetricsQueryLimitParameterName: topMetricsQueryMaxRows, pullTimestampParameterName: pullTimestamp, }}, "Statement without top metrics query max rows": {0, query, map[string]interface{}{pullTimestampParameterName: pullTimestamp}}, } for name, testCase := range testCases { t.Run(name, func(t *testing.T) { args := statementArgs{ query: query, topMetricsQueryMaxRows: testCase.topMetricsQueryMaxRows, pullTimestamp: pullTimestamp, stalenessRead: true, } stmt := intervalStatsStatement(args) assert.Equal(t, testCase.expectedSQL, stmt.statement.SQL) assert.Equal(t, testCase.expectedParams, stmt.statement.Params) assert.True(t, stmt.stalenessRead) }) } }
explode_data.jsonl/16157
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 437 }
[ 2830, 3393, 10256, 16635, 8636, 1155, 353, 8840, 836, 8, 341, 3223, 617, 20812, 1669, 882, 13244, 1005, 21183, 2822, 18185, 37302, 1669, 2415, 14032, 60, 1235, 341, 197, 42118, 27328, 2859, 5974, 9024, 526, 198, 197, 42400, 6688, 310, 9...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestObserverEmitsEndpointsIntegration(t *testing.T) { c := container.New(t) image := "docker.io/library/nginx:1.17" cntr := c.StartImage(image, container.WithPortReady(80)) config := NewFactory().CreateDefaultConfig().(*Config) config.CacheSyncInterval = 1 * time.Second config.UseHostBindings = true config.UseHostnameIfPresent = true mn := &mockNotifier{endpointsMap: map[observer.EndpointID]observer.Endpoint{}} obvs := startObserverWithConfig(t, mn, config) defer stopObserver(t, obvs) require.Eventually(t, func() bool { return mn.AddCount() == 1 }, 3*time.Second, 10*time.Millisecond) endpoints := mn.EndpointsMap() require.Equal(t, len(endpoints), 1) for _, e := range endpoints { require.Equal(t, uint16(80), e.Details.Env()["alternate_port"]) require.Equal(t, string(cntr.ID), e.Details.Env()["container_id"]) require.Equal(t, image, e.Details.Env()["image"]) } }
explode_data.jsonl/32810
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 340 }
[ 2830, 3393, 17151, 2269, 1199, 80786, 52464, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 5476, 7121, 1155, 340, 31426, 1669, 330, 28648, 4245, 45446, 69261, 25, 16, 13, 16, 22, 698, 1444, 77, 376, 1669, 272, 12101, 1906, 10075, 11, 5476...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDecrementVersion(t *testing.T) { cases := []struct { kubeVersion string succeed bool exp string }{ {kubeVersion: "1.13", succeed: true, exp: "1.12"}, {kubeVersion: "1.15", succeed: true, exp: "1.14"}, {kubeVersion: "1.11", succeed: true, exp: "1.10"}, {kubeVersion: "1.1", succeed: true, exp: ""}, {kubeVersion: "invalid", succeed: false, exp: ""}, } for _, c := range cases { rv := decrementVersion(c.kubeVersion) if c.succeed { if c.exp != rv { t.Fatalf("decrementVersion(%q) - Got %q expected %s", c.kubeVersion, rv, c.exp) } } else { if len(rv) > 0 { t.Fatalf("decrementVersion(%q) - Expected empty string but Got %s", c.kubeVersion, rv) } } } }
explode_data.jsonl/60204
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 332 }
[ 2830, 3393, 4900, 54655, 5637, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 16463, 3760, 5637, 914, 198, 197, 1903, 29264, 257, 1807, 198, 197, 48558, 260, 914, 198, 197, 59403, 197, 197, 69094, 3760, 5637, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestAllocIPAddressOnErr(t *testing.T) { ctrl, _, mockEC2 := setup(t) defer ctrl.Finish() mockEC2.EXPECT().AssignPrivateIpAddresses(gomock.Any()).Return(nil, errors.New("Error on AssignPrivateIpAddresses")) ins := &EC2InstanceMetadataCache{ec2SVC: mockEC2} err := ins.AllocIPAddress("eni-id") assert.Error(t, err) }
explode_data.jsonl/19299
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 130 }
[ 2830, 3393, 25154, 87976, 1925, 7747, 1155, 353, 8840, 836, 8, 341, 84381, 11, 8358, 7860, 7498, 17, 1669, 6505, 1155, 340, 16867, 23743, 991, 18176, 2822, 77333, 7498, 17, 22402, 7285, 1005, 28933, 16787, 23378, 52290, 3268, 316, 1176, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFileWriterFlush(t *testing.T) { fw := NewFileWriter(t.Name()+".log", time.Hour, 0) s := fmt.Sprintf("open file: %s\n", time.Now()) fw.Write([]byte(s)) fw.Write([]byte("flush\n")) fw.Flush() fw.Write([]byte("after flush\n")) }
explode_data.jsonl/74559
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 104 }
[ 2830, 3393, 1703, 6492, 46874, 1155, 353, 8840, 836, 8, 341, 1166, 86, 1669, 1532, 1703, 6492, 1155, 2967, 17140, 3263, 839, 497, 882, 73550, 11, 220, 15, 340, 1903, 1669, 8879, 17305, 445, 2508, 1034, 25, 1018, 82, 1699, 497, 882, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTrigger(t *testing.T) { controller := gomock.NewController(t) defer controller.Finish() checkBuild := func(_ context.Context, build *core.Build, stages []*core.Stage) { if diff := cmp.Diff(build, dummyBuild, ignoreBuildFields); diff != "" { t.Errorf(diff) } if diff := cmp.Diff(stages, dummyStages, ignoreStageFields); diff != "" { t.Errorf(diff) } } checkStatus := func(_ context.Context, _ *core.User, req *core.StatusInput) error { if diff := cmp.Diff(req.Build, dummyBuild, ignoreBuildFields); diff != "" { t.Errorf(diff) } if diff := cmp.Diff(req.Repo, dummyRepo, ignoreStageFields); diff != "" { t.Errorf(diff) } return nil } mockUsers := mock.NewMockUserStore(controller) mockUsers.EXPECT().Find(gomock.Any(), dummyRepo.UserID).Return(dummyUser, nil) mockRepos := mock.NewMockRepositoryStore(controller) mockRepos.EXPECT().Increment(gomock.Any(), dummyRepo).Return(dummyRepo, nil) mockConfigService := mock.NewMockConfigService(controller) mockConfigService.EXPECT().Find(gomock.Any(), gomock.Any()).Return(dummyYaml, nil) mockConvertService := mock.NewMockConvertService(controller) mockConvertService.EXPECT().Convert(gomock.Any(), gomock.Any()).Return(dummyYaml, nil) mockValidateService := mock.NewMockValidateService(controller) mockValidateService.EXPECT().Validate(gomock.Any(), gomock.Any()).Return(nil) mockStatus := mock.NewMockStatusService(controller) mockStatus.EXPECT().Send(gomock.Any(), gomock.Any(), gomock.Any()).Return(nil).Do(checkStatus) mockQueue := mock.NewMockScheduler(controller) mockQueue.EXPECT().Schedule(gomock.Any(), gomock.Any()).Return(nil) mockBuilds := mock.NewMockBuildStore(controller) mockBuilds.EXPECT().Create(gomock.Any(), gomock.Any(), gomock.Any()).Do(checkBuild).Return(nil) mockWebhooks := mock.NewMockWebhookSender(controller) mockWebhooks.EXPECT().Send(gomock.Any(), gomock.Any()).Return(nil) triggerer := New( nil, mockConfigService, mockConvertService, nil, mockStatus, mockBuilds, mockQueue, mockRepos, mockUsers, mockValidateService, mockWebhooks, ) build, err := triggerer.Trigger(noContext, dummyRepo, dummyHook) if err != nil { t.Error(err) return } if diff := cmp.Diff(build, dummyBuild, ignoreBuildFields); diff != "" { t.Errorf(diff) } }
explode_data.jsonl/26991
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 894 }
[ 2830, 3393, 17939, 1155, 353, 8840, 836, 8, 341, 61615, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 6461, 991, 18176, 2822, 25157, 11066, 1669, 2915, 2490, 2266, 9328, 11, 1936, 353, 2153, 25212, 11, 17628, 29838, 2153, 69873, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestHTTPClientParallelCapped(t *testing.T) { var reqs int64 ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if req := atomic.AddInt64(&reqs, 1); req > 5 { t.Errorf("Beyond parallelism cap: %v", req) } <-time.After(time.Millisecond * 10) w.Write([]byte("foobar")) atomic.AddInt64(&reqs, -1) })) defer ts.Close() conf := NewConfig() conf.HTTP.Config.URL = ts.URL + "/testpost" conf.HTTP.Parallel = true conf.HTTP.MaxParallel = 5 h, err := NewHTTP(conf, nil, log.Noop(), metrics.Noop()) if err != nil { t.Fatal(err) } msgs, res := h.ProcessMessage(message.New([][]byte{ []byte("foo"), []byte("bar"), []byte("baz"), []byte("qux"), []byte("quz"), []byte("foo2"), []byte("bar2"), []byte("baz2"), []byte("qux2"), []byte("quz2"), })) if res != nil { t.Error(res.Error()) } else if expC, actC := 10, msgs[0].Len(); actC != expC { t.Errorf("Wrong result count: %v != %v", actC, expC) } else if exp, act := "foobar", string(message.GetAllBytes(msgs[0])[0]); act != exp { t.Errorf("Wrong result: %v != %v", act, exp) } }
explode_data.jsonl/20222
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 489 }
[ 2830, 3393, 9230, 2959, 16547, 34, 5677, 1155, 353, 8840, 836, 8, 341, 2405, 4232, 82, 526, 21, 19, 198, 57441, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 743, 4232, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMapProxy_ReplaceIfSameWithNilOldValue(t *testing.T) { _, err := mp.ReplaceIfSame("test", nil, "test") AssertErrorNotNil(t, err, "replaceIfSame did not return an error for nil oldValue") mp.Clear() }
explode_data.jsonl/57003
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 77 }
[ 2830, 3393, 2227, 16219, 62, 23107, 2679, 19198, 2354, 19064, 18284, 1130, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 10490, 20858, 2679, 19198, 445, 1944, 497, 2092, 11, 330, 1944, 1138, 18017, 1454, 96144, 1155, 11, 1848, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestAdmin_Pin(t *testing.T) { ts, _, teardown := startupT(t) defer teardown() c1 := store.Comment{Text: "test test #1", Locator: store.Locator{SiteID: "radio-t", URL: "https://radio-t.com/blah"}} c2 := store.Comment{Text: "test test #2", ParentID: "p1", Locator: store.Locator{SiteID: "radio-t", URL: "https://radio-t.com/blah"}} id1 := addComment(t, c1, ts) addComment(t, c2, ts) pin := func(val int) int { client := http.Client{} req, err := http.NewRequest(http.MethodPut, fmt.Sprintf("%s/api/v1/admin/pin/%s?site=radio-t&url=https://radio-t.com/blah&pin=%d", ts.URL, id1, val), nil) assert.Nil(t, err) requireAdminOnly(t, req) req.SetBasicAuth("admin", "password") resp, err := client.Do(req) assert.Nil(t, err) return resp.StatusCode } code := pin(1) assert.Equal(t, 200, code) body, code := get(t, fmt.Sprintf("%s/api/v1/id/%s?site=radio-t&url=https://radio-t.com/blah", ts.URL, id1)) assert.Equal(t, 200, code) cr := store.Comment{} err := json.Unmarshal([]byte(body), &cr) assert.Nil(t, err) assert.True(t, cr.Pin) code = pin(-1) assert.Equal(t, 200, code) body, code = get(t, fmt.Sprintf("%s/api/v1/id/%s?site=radio-t&url=https://radio-t.com/blah", ts.URL, id1)) assert.Equal(t, 200, code) cr = store.Comment{} err = json.Unmarshal([]byte(body), &cr) assert.Nil(t, err) assert.False(t, cr.Pin) }
explode_data.jsonl/51825
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 623 }
[ 2830, 3393, 7210, 35453, 1155, 353, 8840, 836, 8, 341, 57441, 11, 8358, 49304, 1669, 20567, 51, 1155, 340, 16867, 49304, 2822, 1444, 16, 1669, 3553, 56730, 90, 1178, 25, 330, 1944, 1273, 671, 16, 756, 197, 197, 33831, 25, 3553, 1214, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSignWithCertChain(t *testing.T) { rootCertFile := "../testdata/multilevelpki/root-cert.pem" certChainFile := "../testdata/multilevelpki/int-cert-chain.pem" signingCertFile := "../testdata/multilevelpki/int-cert.pem" signingKeyFile := "../testdata/multilevelpki/int-key.pem" rsaKeySize := 2048 defaultWorkloadCertTTL := 30 * time.Minute maxWorkloadCertTTL := time.Hour caopts, err := NewPluggedCertIstioCAOptions(certChainFile, signingCertFile, signingKeyFile, rootCertFile, defaultWorkloadCertTTL, maxWorkloadCertTTL, rsaKeySize) if err != nil { t.Fatalf("Failed to create a plugged-cert CA Options: %v", err) } ca, err := NewIstioCA(caopts) if err != nil { t.Errorf("Got error while creating plugged-cert CA: %v", err) } if ca == nil { t.Fatalf("Failed to create a plugged-cert CA.") } opts := util.CertOptions{ // This value is not used, instead, subjectID should be used in certificate. Host: "spiffe://different.com/test", RSAKeySize: 2048, IsCA: false, } csrPEM, privPEM, err := util.GenCSR(opts) if err != nil { t.Error(err) } certPEM, signErr := ca.SignWithCertChain(csrPEM, []string{"localhost"}, time.Hour, false) if signErr != nil { t.Error(err) } cert, err := tls.X509KeyPair(certPEM, privPEM) if err != nil { t.Error(err) } if len(cert.Certificate) != 3 { t.Errorf("Unexpected number of certificates returned: %d (expected 4)", len(cert.Certificate)) } }
explode_data.jsonl/15504
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 576 }
[ 2830, 3393, 7264, 2354, 36934, 18837, 1155, 353, 8840, 836, 8, 341, 33698, 36934, 1703, 1669, 7005, 92425, 3183, 494, 457, 889, 79, 6642, 72074, 61034, 49373, 698, 1444, 529, 18837, 1703, 1669, 7005, 92425, 3183, 494, 457, 889, 79, 6642...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestReceiveTicket_InvalidRecipientRand_AlreadyRevealed(t *testing.T) { sender, b, v, ts, faceValue, winProb, sig := newRecipientFixtureOrFatal(t) r := newRecipientOrFatal(t, RandAddress(), b, v, ts, faceValue, winProb) params := r.TicketParams(sender) // Test invalid recipientRand revealed ticket := newTicket(sender, params, 0) // Config stub validator with valid winning tickets v.SetIsWinningTicket(true) _, _, err := r.ReceiveTicket(ticket, sig, params.Seed) if err != nil { t.Fatal(err) } // Redeem ticket to invalidate recipientRand if err := r.RedeemWinningTickets([]string{ticket.RecipientRandHash.Hex()}); err != nil { t.Fatal(err) } // New ticket with same invalid recipientRand, but updated senderNonce ticket = newTicket(sender, params, 1) _, _, err = r.ReceiveTicket(ticket, sig, params.Seed) if err == nil { t.Error("expected invalid recipientRand revealed error") } if err != nil && !strings.Contains(err.Error(), "invalid already revealed recipientRand") { t.Errorf("expected invalid recipientRand revealed error, got %v", err) } }
explode_data.jsonl/44759
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 375 }
[ 2830, 3393, 14742, 34058, 62, 7928, 74432, 56124, 40812, 2307, 693, 586, 5838, 1155, 353, 8840, 836, 8, 341, 1903, 1659, 11, 293, 11, 348, 11, 10591, 11, 3579, 1130, 11, 3164, 36980, 11, 8366, 1669, 501, 74432, 18930, 2195, 62396, 115...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestSummaryDataPoint_StartTimestamp(t *testing.T) { ms := NewSummaryDataPoint() assert.EqualValues(t, Timestamp(0), ms.StartTimestamp()) testValStartTimestamp := Timestamp(1234567890) ms.SetStartTimestamp(testValStartTimestamp) assert.EqualValues(t, testValStartTimestamp, ms.StartTimestamp()) }
explode_data.jsonl/32749
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 102 }
[ 2830, 3393, 19237, 1043, 2609, 38056, 20812, 1155, 353, 8840, 836, 8, 341, 47691, 1669, 1532, 19237, 1043, 2609, 741, 6948, 12808, 6227, 1155, 11, 32758, 7, 15, 701, 9829, 12101, 20812, 2398, 18185, 2208, 3479, 20812, 1669, 32758, 7, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateUserWithToken(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() t.Run("CreateWithTokenHappyPath", func(t *testing.T) { user := model.User{Email: th.GenerateTestEmail(), Nickname: "Corey Hulen", Password: "hello1", Username: GenerateTestUsername(), Roles: model.SystemAdminRoleId + " " + model.SystemUserRoleId} token := model.NewToken( app.TokenTypeTeamInvitation, model.MapToJSON(map[string]string{"teamId": th.BasicTeam.Id, "email": user.Email}), ) require.NoError(t, th.App.Srv().Store.Token().Save(token)) ruser, resp, err := th.Client.CreateUserWithToken(&user, token.Token) require.NoError(t, err) CheckCreatedStatus(t, resp) th.Client.Login(user.Email, user.Password) require.Equal(t, user.Nickname, ruser.Nickname) require.Equal(t, model.SystemUserRoleId, ruser.Roles, "should clear roles") CheckUserSanitization(t, ruser) _, err = th.App.Srv().Store.Token().GetByToken(token.Token) require.Error(t, err, "The token must be deleted after being used") teams, appErr := th.App.GetTeamsForUser(ruser.Id) require.Nil(t, appErr) require.NotEmpty(t, teams, "The user must have teams") require.Equal(t, th.BasicTeam.Id, teams[0].Id, "The user joined team must be the team provided.") }) th.TestForSystemAdminAndLocal(t, func(t *testing.T, client *model.Client4) { user := model.User{Email: th.GenerateTestEmail(), Nickname: "Corey Hulen", Password: "hello1", Username: GenerateTestUsername(), Roles: model.SystemAdminRoleId + " " + model.SystemUserRoleId} token := model.NewToken( app.TokenTypeTeamInvitation, model.MapToJSON(map[string]string{"teamId": th.BasicTeam.Id, "email": user.Email}), ) require.NoError(t, th.App.Srv().Store.Token().Save(token)) ruser, resp, err := client.CreateUserWithToken(&user, token.Token) require.NoError(t, err) CheckCreatedStatus(t, resp) th.Client.Login(user.Email, user.Password) require.Equal(t, user.Nickname, ruser.Nickname) require.Equal(t, model.SystemUserRoleId, ruser.Roles, "should clear roles") CheckUserSanitization(t, ruser) _, err = th.App.Srv().Store.Token().GetByToken(token.Token) require.Error(t, err, "The token must be deleted after being used") teams, appErr := th.App.GetTeamsForUser(ruser.Id) require.Nil(t, appErr) require.NotEmpty(t, teams, "The user must have teams") require.Equal(t, th.BasicTeam.Id, teams[0].Id, "The user joined team must be the team provided.") }, "CreateWithTokenHappyPath") t.Run("NoToken", func(t *testing.T) { user := model.User{Email: th.GenerateTestEmail(), Nickname: "Corey Hulen", Password: "hello1", Username: GenerateTestUsername(), Roles: model.SystemAdminRoleId + " " + model.SystemUserRoleId} token := model.NewToken( app.TokenTypeTeamInvitation, model.MapToJSON(map[string]string{"teamId": th.BasicTeam.Id, "email": user.Email}), ) require.NoError(t, th.App.Srv().Store.Token().Save(token)) defer th.App.DeleteToken(token) _, _, err := th.Client.CreateUserWithToken(&user, "") require.Error(t, err) CheckErrorID(t, err, "api.user.create_user.missing_token.app_error") }) t.Run("TokenExpired", func(t *testing.T) { user := model.User{Email: th.GenerateTestEmail(), Nickname: "Corey Hulen", Password: "hello1", Username: GenerateTestUsername(), Roles: model.SystemAdminRoleId + " " + model.SystemUserRoleId} timeNow := time.Now() past49Hours := timeNow.Add(-49*time.Hour).UnixNano() / int64(time.Millisecond) token := model.NewToken( app.TokenTypeTeamInvitation, model.MapToJSON(map[string]string{"teamId": th.BasicTeam.Id, "email": user.Email}), ) token.CreateAt = past49Hours require.NoError(t, th.App.Srv().Store.Token().Save(token)) defer th.App.DeleteToken(token) _, resp, err := th.Client.CreateUserWithToken(&user, token.Token) require.Error(t, err) CheckBadRequestStatus(t, resp) CheckErrorID(t, err, "api.user.create_user.signup_link_expired.app_error") }) t.Run("WrongToken", func(t *testing.T) { user := model.User{Email: th.GenerateTestEmail(), Nickname: "Corey Hulen", Password: "hello1", Username: GenerateTestUsername(), Roles: model.SystemAdminRoleId + " " + model.SystemUserRoleId} _, resp, err := th.Client.CreateUserWithToken(&user, "wrong") require.Error(t, err) CheckNotFoundStatus(t, resp) CheckErrorID(t, err, "api.user.create_user.signup_link_invalid.app_error") }) t.Run("EnableUserCreationDisable", func(t *testing.T) { enableUserCreation := th.App.Config().TeamSettings.EnableUserCreation defer func() { th.App.UpdateConfig(func(cfg *model.Config) { cfg.TeamSettings.EnableUserCreation = enableUserCreation }) }() user := model.User{Email: th.GenerateTestEmail(), Nickname: "Corey Hulen", Password: "hello1", Username: GenerateTestUsername(), Roles: model.SystemAdminRoleId + " " + model.SystemUserRoleId} token := model.NewToken( app.TokenTypeTeamInvitation, model.MapToJSON(map[string]string{"teamId": th.BasicTeam.Id, "email": user.Email}), ) require.NoError(t, th.App.Srv().Store.Token().Save(token)) defer th.App.DeleteToken(token) th.App.UpdateConfig(func(cfg *model.Config) { *cfg.TeamSettings.EnableUserCreation = false }) _, resp, err := th.Client.CreateUserWithToken(&user, token.Token) require.Error(t, err) CheckNotImplementedStatus(t, resp) CheckErrorID(t, err, "api.user.create_user.signup_email_disabled.app_error") }) th.TestForSystemAdminAndLocal(t, func(t *testing.T, client *model.Client4) { enableUserCreation := th.App.Config().TeamSettings.EnableUserCreation defer th.App.UpdateConfig(func(cfg *model.Config) { cfg.TeamSettings.EnableUserCreation = enableUserCreation }) user := model.User{Email: th.GenerateTestEmail(), Nickname: "Corey Hulen", Password: "hello1", Username: GenerateTestUsername(), Roles: model.SystemAdminRoleId + " " + model.SystemUserRoleId} token := model.NewToken( app.TokenTypeTeamInvitation, model.MapToJSON(map[string]string{"teamId": th.BasicTeam.Id, "email": user.Email}), ) require.NoError(t, th.App.Srv().Store.Token().Save(token)) defer th.App.DeleteToken(token) th.App.UpdateConfig(func(cfg *model.Config) { *cfg.TeamSettings.EnableUserCreation = false }) _, resp, err := client.CreateUserWithToken(&user, token.Token) require.Error(t, err) CheckNotImplementedStatus(t, resp) CheckErrorID(t, err, "api.user.create_user.signup_email_disabled.app_error") }, "EnableUserCreationDisable") t.Run("EnableOpenServerDisable", func(t *testing.T) { user := model.User{Email: th.GenerateTestEmail(), Nickname: "Corey Hulen", Password: "hello1", Username: GenerateTestUsername(), Roles: model.SystemAdminRoleId + " " + model.SystemUserRoleId} token := model.NewToken( app.TokenTypeTeamInvitation, model.MapToJSON(map[string]string{"teamId": th.BasicTeam.Id, "email": user.Email}), ) require.NoError(t, th.App.Srv().Store.Token().Save(token)) enableOpenServer := th.App.Config().TeamSettings.EnableOpenServer defer func() { th.App.UpdateConfig(func(cfg *model.Config) { cfg.TeamSettings.EnableOpenServer = enableOpenServer }) }() th.App.UpdateConfig(func(cfg *model.Config) { *cfg.TeamSettings.EnableOpenServer = false }) ruser, resp, err := th.Client.CreateUserWithToken(&user, token.Token) require.NoError(t, err) CheckCreatedStatus(t, resp) th.Client.Login(user.Email, user.Password) require.Equal(t, user.Nickname, ruser.Nickname) require.Equal(t, model.SystemUserRoleId, ruser.Roles, "should clear roles") CheckUserSanitization(t, ruser) _, err = th.App.Srv().Store.Token().GetByToken(token.Token) require.Error(t, err, "The token must be deleted after be used") }) }
explode_data.jsonl/47482
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2833 }
[ 2830, 3393, 4021, 1474, 2354, 3323, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 2822, 3244, 16708, 445, 4021, 2354, 3323, 32847, 1820, 497, 2915, 1155, 353, 8840, 836, 8, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInsertShardedIgnoreUnownedVerifyFail(t *testing.T) { invschema := &vschemapb.SrvVSchema{ Keyspaces: map[string]*vschemapb.Keyspace{ "sharded": { Sharded: true, Vindexes: map[string]*vschemapb.Vindex{ "hash": { Type: "hash", }, "onecol": { Type: "lookup", Params: map[string]string{ "table": "lkp1", "from": "from", "to": "toc", }, }, }, Tables: map[string]*vschemapb.Table{ "t1": { ColumnVindexes: []*vschemapb.ColumnVindex{{ Name: "hash", Columns: []string{"id"}, }, { Name: "onecol", Columns: []string{"c3"}, }}, }, }, }, }, } vs, err := vindexes.BuildVSchema(invschema) if err != nil { t.Fatal(err) } ks := vs.Keyspaces["sharded"] ins := NewInsert( InsertSharded, ks.Keyspace, []sqltypes.PlanValue{{ // colVindex columns: id Values: []sqltypes.PlanValue{{ // rows for id Values: []sqltypes.PlanValue{{ Value: sqltypes.NewInt64(1), }}, }}, }, { // colVindex columns: c3 Values: []sqltypes.PlanValue{{ // rows for c3 Values: []sqltypes.PlanValue{{ Value: sqltypes.NewInt64(2), }}, }}, }}, ks.Tables["t1"], "prefix", []string{" mid1", " mid2", " mid3"}, " suffix", ) vc := &loggingVCursor{ shards: []string{"-20", "20-"}, } _, err = ins.Execute(vc, map[string]*querypb.BindVariable{}, false) expectError(t, "Execute", err, "execInsertSharded: getInsertShardedRoute: values [[INT64(2)]] for column [c3] does not map to keyspace ids") }
explode_data.jsonl/55996
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 805 }
[ 2830, 3393, 13780, 2016, 20958, 12497, 1806, 18332, 32627, 19524, 1155, 353, 8840, 836, 8, 341, 17430, 11562, 3416, 1669, 609, 11562, 2407, 391, 65, 808, 10553, 53, 8632, 515, 197, 197, 8850, 27338, 25, 2415, 14032, 8465, 11562, 2407, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDeviceClassOIDs_readOID_multipleLevel(t *testing.T) { var ifIndexOidReader MockOIDReader var ifDescrOidReader MockOIDReader var radioInterfaceOidReader MockOIDReader ctx := context.Background() ifIndexOidReader. On("readOID", ctx, []value.Value(nil), true). Return(map[int]interface{}{ 1: value.New("1"), 2: value.New("2"), 3: value.New("3"), }, nil) ifDescrOidReader. On("readOID", ctx, []value.Value(nil), true). Return(map[int]interface{}{ 1: value.New("Port 1"), 2: value.New("Port 2"), 3: value.New("Port 3"), }, nil) radioInterfaceOidReader. On("readOID", ctx, []value.Value(nil), true). Return(map[int]interface{}{ 1: map[string]interface{}{ "level_in": value.New(1), "level_out": value.New(-1), }, 2: map[string]interface{}{ "level_in": value.New(2), "level_out": value.New(-2), }, }, nil) sut := deviceClassOIDs{ "ifIndex": &ifIndexOidReader, "ifDescr": &ifDescrOidReader, "radio": &radioInterfaceOidReader, } expected := map[int]interface{}{ 1: map[string]interface{}{ "ifIndex": value.New(1), "ifDescr": value.New("Port 1"), "radio": map[string]interface{}{ "level_in": value.New(1), "level_out": value.New(-1), }, }, 2: map[string]interface{}{ "ifIndex": value.New(2), "ifDescr": value.New("Port 2"), "radio": map[string]interface{}{ "level_in": value.New(2), "level_out": value.New(-2), }, }, 3: map[string]interface{}{ "ifIndex": value.New(3), "ifDescr": value.New("Port 3"), }, } res, err := sut.readOID(ctx, []value.Value(nil), true) if assert.NoError(t, err) { assert.Equal(t, expected, res) } }
explode_data.jsonl/68085
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 755 }
[ 2830, 3393, 6985, 1957, 29805, 82, 6443, 29805, 45233, 4449, 1155, 353, 8840, 836, 8, 341, 2405, 421, 1552, 46, 307, 5062, 14563, 29805, 5062, 198, 2405, 421, 11065, 81, 46, 307, 5062, 14563, 29805, 5062, 198, 2405, 8887, 5051, 46, 30...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestQueuedRetry_StopWhileWaiting(t *testing.T) { qCfg := CreateDefaultQueueSettings() qCfg.NumConsumers = 1 rCfg := CreateDefaultRetrySettings() be := newBaseExporter(defaultExporterCfg, zap.NewNop(), WithRetry(rCfg), WithQueue(qCfg)) ocs := newObservabilityConsumerSender(be.qrSender.consumerSender) be.qrSender.consumerSender = ocs require.NoError(t, be.Start(context.Background(), componenttest.NewNopHost())) firstMockR := newMockRequest(context.Background(), 2, errors.New("transient error")) ocs.run(func() { // This is asynchronous so it should just enqueue, no errors expected. droppedItems, err := be.sender.send(firstMockR) require.NoError(t, err) assert.Equal(t, 0, droppedItems) }) // Enqueue another request to ensure when calling shutdown we drain the queue. secondMockR := newMockRequest(context.Background(), 3, nil) ocs.run(func() { // This is asynchronous so it should just enqueue, no errors expected. droppedItems, err := be.sender.send(secondMockR) require.NoError(t, err) assert.Equal(t, 0, droppedItems) }) assert.NoError(t, be.Shutdown(context.Background())) // TODO: Ensure that queue is drained, and uncomment the next 3 lines. // https://github.com/jaegertracing/jaeger/pull/2349 firstMockR.checkNumRequests(t, 1) // secondMockR.checkNumRequests(t, 1) // ocs.checkSendItemsCount(t, 3) ocs.checkDroppedItemsCount(t, 2) // require.Zero(t, be.qrSender.queue.Size()) }
explode_data.jsonl/46007
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 524 }
[ 2830, 3393, 25776, 3260, 51560, 80308, 7983, 42104, 1155, 353, 8840, 836, 8, 341, 18534, 42467, 1669, 4230, 3675, 7554, 6086, 741, 18534, 42467, 39847, 41966, 388, 284, 220, 16, 198, 7000, 42467, 1669, 4230, 3675, 51560, 6086, 741, 73142,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWriteError(t *testing.T) { // nil error should not panic rec := httptest.NewRecorder() r := new(http.Request) writeError(zap.NewExample(), rec, r, nil) h := rec.Header() if len(h) > 0 { t.Fatalf("unexpected non-empty headers: %#v", h) } b := rec.Body.String() if len(b) > 0 { t.Fatalf("unexpected non-empty body: %q", b) } tests := []struct { err error wcode int wi string }{ { v2error.NewError(v2error.EcodeKeyNotFound, "/foo/bar", 123), http.StatusNotFound, "123", }, { v2error.NewError(v2error.EcodeTestFailed, "/foo/bar", 456), http.StatusPreconditionFailed, "456", }, { err: errors.New("something went wrong"), wcode: http.StatusInternalServerError, }, } for i, tt := range tests { rw := httptest.NewRecorder() writeError(zap.NewExample(), rw, r, tt.err) if code := rw.Code; code != tt.wcode { t.Errorf("#%d: code=%d, want %d", i, code, tt.wcode) } if idx := rw.Header().Get("X-Etcd-Index"); idx != tt.wi { t.Errorf("#%d: X-Etcd-Index=%q, want %q", i, idx, tt.wi) } } }
explode_data.jsonl/44959
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 503 }
[ 2830, 3393, 7985, 1454, 1155, 353, 8840, 836, 8, 341, 197, 322, 2092, 1465, 1265, 537, 21975, 198, 67904, 1669, 54320, 70334, 7121, 47023, 741, 7000, 1669, 501, 19886, 9659, 340, 24945, 1454, 13174, 391, 7121, 13314, 1507, 1395, 11, 435...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestGetLatestReleaseTagName(t *testing.T) { tagName, err := GetLatestReleaseTagName("containous", "structor") require.NoError(t, err) assert.Regexp(t, `v\d+.\d+(.\d+)?`, tagName) }
explode_data.jsonl/33573
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 76 }
[ 2830, 3393, 1949, 31992, 16077, 22616, 1155, 353, 8840, 836, 8, 341, 60439, 675, 11, 1848, 1669, 2126, 31992, 16077, 22616, 445, 52095, 782, 497, 330, 3086, 5130, 17957, 35699, 1155, 11, 1848, 340, 6948, 8989, 4580, 1155, 11, 1565, 85, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestKraken(t *testing.T) { Convey("should not crash", t, func() { _, err := instance.Get(&config) So(err, ShouldBeNil) }) Convey("should be able to get key", t, func() { res, _ := instance.Get(&config) res.AddTestBalance("ETH", 0.1) result := res.GetCurrencyValue("ETH") So(result, ShouldEqual, 0.1) // does not exist notexisting := res.GetCurrencyValue("ETHH") So(notexisting, ShouldEqual, 0.0) }) Convey("should be able to get all", t, func() { i, _ := instance.Get(&config) i.AddTestBalance("ETH", 1.11) res := i.GetAll([]string{"ETH"}) So(len(res), ShouldEqual, 1) So(res[0].Balance, ShouldEqual, 1.11) }) }
explode_data.jsonl/24101
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 271 }
[ 2830, 3393, 81165, 3366, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 5445, 537, 9920, 497, 259, 11, 2915, 368, 341, 197, 197, 6878, 1848, 1669, 2867, 2234, 2099, 1676, 340, 197, 76912, 3964, 11, 12260, 3430, 19064, 340, 197, 3518,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeniesInvalidServiceAccount(t *testing.T) { ns := "myns" // Build a test client that the admission plugin can use to look up the service account missing from its cache client := fake.NewSimpleClientset() admit := NewServiceAccount() admit.SetExternalKubeClientSet(client) informerFactory := informers.NewSharedInformerFactory(nil, controller.NoResyncPeriodFunc()) admit.SetExternalKubeInformerFactory(informerFactory) pod := &api.Pod{} attrs := admission.NewAttributesRecord(pod, nil, api.Kind("Pod").WithVersion("version"), ns, "myname", api.Resource("pods").WithVersion("version"), "", admission.Create, false, nil) err := admit.Admit(attrs) if err == nil { t.Errorf("Expected error for missing service account, got none") } }
explode_data.jsonl/61346
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 233 }
[ 2830, 3393, 23619, 550, 7928, 1860, 7365, 1155, 353, 8840, 836, 8, 341, 84041, 1669, 330, 76, 1872, 82, 1837, 197, 322, 7854, 264, 1273, 2943, 429, 279, 25293, 9006, 646, 990, 311, 1401, 705, 279, 2473, 2692, 7402, 504, 1181, 6500, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestImportStatementAlias(t *testing.T) { p := createParser(`import "exchange" as d`) bvmUtils.Assert(t, isImportStatement(p), "should detect import statement") parseImportStatement(p) n := p.scope.Next() bvmUtils.AssertNow(t, n.Type() == ast.ImportStatement, "wrong import type") a := n.(*ast.ImportStatementNode) bvmUtils.AssertNow(t, a.Path == "exchange", "wrong import path value") bvmUtils.AssertNow(t, a.Alias == "d", "wrong import alias value") }
explode_data.jsonl/49762
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 168 }
[ 2830, 3393, 11511, 8636, 22720, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 1855, 6570, 5809, 474, 330, 39568, 1, 438, 294, 24183, 2233, 7338, 4209, 11711, 1155, 11, 374, 11511, 8636, 1295, 701, 330, 5445, 11140, 1159, 5114, 1138, 75115, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestString(t *testing.T) { store, clean := realtikvtest.CreateMockStoreAndSetup(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("select 1") // here to check the panic bug in String() when txn is nil after committed. t.Log(tk.Session().String()) }
explode_data.jsonl/5732
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 107 }
[ 2830, 3393, 703, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1931, 83, 1579, 85, 1944, 7251, 11571, 6093, 3036, 21821, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 340, 3244, 74,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConferenceUnmarshall(t *testing.T) { data, err := ioutil.ReadFile("testdata/conference.xml") if err != nil { t.Fatalf("Unable to read test data: %v", err) } c := new(Conference) err = xml.Unmarshal(data, c) if err != nil { t.Errorf("Unable to Unmarshal: %v", err) return } if c == nil { t.Error("No Conference found") return } if c.title != "Meta-Conference" { t.Errorf("Failed matching title. Expected \"Meta-Conference\", found \"%s\"", c.title) return } }
explode_data.jsonl/6311
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 204 }
[ 2830, 3393, 91248, 1806, 84161, 1155, 353, 8840, 836, 8, 341, 8924, 11, 1848, 1669, 43144, 78976, 445, 92425, 31236, 2202, 9028, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 17075, 311, 1349, 1273, 821, 25, 1018, 85, 497, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestValidateObjectMetaCustomName(t *testing.T) { errs := ValidateObjectMeta(&api.ObjectMeta{Name: "test", GenerateName: "foo"}, false, func(s string, prefix bool) (bool, string) { if s == "test" { return true, "" } return false, "name-gen" }) if len(errs) != 1 { t.Fatalf("unexpected errors: %v", errs) } if !strings.Contains(errs[0].Error(), "name-gen") { t.Errorf("unexpected error message: %v", errs) } }
explode_data.jsonl/62778
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 173 }
[ 2830, 3393, 17926, 1190, 12175, 10268, 675, 1155, 353, 8840, 836, 8, 341, 9859, 82, 1669, 23282, 1190, 12175, 2099, 2068, 80222, 63121, 25, 330, 1944, 497, 19813, 675, 25, 330, 7975, 14345, 895, 11, 2915, 1141, 914, 11, 9252, 1807, 8,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestOutdatedClientIrmaConfiguration(t *testing.T) { client, _ := parseStorage(t) defer test.ClearTestStorage(t) // Remove old studentCard credential from before support for optional attributes, and issue a new one require.NoError(t, client.RemoveAllCredentials()) require.Nil(t, requestorSessionHelper(t, getIssuanceRequest(true), client).Err) // client does not have updated irma_configuration with new attribute irma-demo.RU.studentCard.newAttribute, // and the server does. Disclose an attribute from this credential. The client implicitly discloses value 0 // for the new attribute, and the server accepts. req := getDisclosureRequest(irma.NewAttributeTypeIdentifier("irma-demo.RU.studentCard.level")) require.Nil(t, requestorSessionHelper(t, req, client, sessionOptionUpdatedIrmaConfiguration).Err) }
explode_data.jsonl/70000
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 238 }
[ 2830, 3393, 2662, 3577, 2959, 48113, 1728, 7688, 1155, 353, 8840, 836, 8, 341, 25291, 11, 716, 1669, 4715, 5793, 1155, 340, 16867, 1273, 13524, 2271, 5793, 1155, 692, 197, 322, 10783, 2310, 5458, 5770, 40207, 504, 1573, 1824, 369, 10101...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTransportNoRaceOnRequestObjectAfterRequestComplete(t *testing.T) { st := newServerTester(t, func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(200) io.WriteString(w, "body") }, optOnlyServer) defer st.Close() tr := &Transport{TLSClientConfig: tlsConfigInsecure} defer tr.CloseIdleConnections() req, _ := http.NewRequest("GET", st.ts.URL, nil) resp, err := tr.RoundTrip(req) if err != nil { t.Fatal(err) } if _, err = io.Copy(ioutil.Discard, resp.Body); err != nil { t.Fatalf("error reading response body: %v", err) } if err := resp.Body.Close(); err != nil { t.Fatalf("error closing response body: %v", err) } // This access of req.Header should not race with code in the transport. req.Header = http.Header{} }
explode_data.jsonl/16136
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 284 }
[ 2830, 3393, 27560, 2753, 55991, 1925, 1900, 1190, 6025, 1900, 12548, 1155, 353, 8840, 836, 8, 341, 18388, 1669, 501, 5475, 58699, 1155, 11, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 6692, 69794, 7, 17, 15, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_New(t *testing.T) { t.Parallel() a := assert.New(t) p := provider() a.Equal(p.ClientKey, os.Getenv("SALESFORCE_KEY")) a.Equal(p.Secret, os.Getenv("SALESFORCE_SECRET")) a.Equal(p.CallbackURL, "/foo") }
explode_data.jsonl/30757
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 102 }
[ 2830, 3393, 39582, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 11323, 1669, 2060, 7121, 1155, 340, 3223, 1669, 9109, 2822, 11323, 12808, 1295, 11716, 1592, 11, 2643, 64883, 445, 7778, 14017, 80205, 6600, 5455, 11323, 12808, 1295...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSyncStatefulSet(t *testing.T) { tests := []struct { name string key string initialObjs []runtime.Object expectedError string prepare func(t *testing.T, c *Controller) validate func(t *testing.T, c *Controller) }{ { name: "invalid key", key: "foo/bar/baz", expectedError: `unexpected key format: "foo/bar/baz"`, }, { name: "statefulset update enqueues pods owned by statefulset", key: "default/foo", initialObjs: []runtime.Object{ newStatefulSetWithUID(1, "foo", "default", "123"), newPodWithOwnerRefs("bar", "default", []metav1.OwnerReference{ newOwnerRef("foo", "StatefulSet", "123"), }), newPod("qux", "default"), }, validate: func(t *testing.T, c *Controller) { assert.Equal(t, 1, c.podQueue.Len()) }, }, { name: "no pods owned by statefulset", key: "default/foo", initialObjs: []runtime.Object{ newStatefulSetWithUID(1, "foo", "default", "123"), newPodWithOwnerRefs("bar", "default", []metav1.OwnerReference{ newOwnerRef("foo", "StatefulSet", "456"), }), newPod("qux", "default"), }, validate: func(t *testing.T, c *Controller) { assert.Equal(t, 0, c.podQueue.Len()) }, }, { name: "statefulset deletion enqueues annotated pvcs in namespace", key: "default/foo", initialObjs: []runtime.Object{ newPVC("foo", "default"), newPVCWithStatefulSetAnnotation("bar", "default", "foo"), newPVCWithStatefulSetAnnotation("baz", "kube-system", "foo"), newPVC("bar", "kube-system"), }, validate: func(t *testing.T, c *Controller) { assert.Equal(t, 1, c.pvcQueue.Len()) }, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { c, err := newFakeController(test.initialObjs...) require.NoError(t, err) if test.prepare != nil { test.prepare(t, c) } fakeIndexerAdd(t, c, test.initialObjs...) err = c.syncStatefulSet(test.key) if test.expectedError != "" { require.Error(t, err) assert.Equal(t, test.expectedError, err.Error()) } else { require.NoError(t, err) } if test.validate != nil { test.validate(t, c) } }) } }
explode_data.jsonl/46212
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1003 }
[ 2830, 3393, 12154, 1397, 1262, 1649, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 688, 914, 198, 197, 23634, 1843, 914, 198, 197, 85270, 4121, 2519, 256, 3056, 22255, 8348, 198, 197, 42400, 1454, 914, 198, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewControllerForAllInOneAsExplicitValue(t *testing.T) { jaeger := v1.NewJaeger(types.NamespacedName{Name: "my-instance"}) jaeger.Spec.Strategy = v1.DeploymentStrategyDeprecatedAllInOne // same as 'all-in-one' ctrl := For(context.TODO(), jaeger) assert.Equal(t, ctrl.Type(), v1.DeploymentStrategyAllInOne) }
explode_data.jsonl/21842
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 124 }
[ 2830, 3393, 3564, 2051, 2461, 2403, 641, 3966, 2121, 98923, 1130, 1155, 353, 8840, 836, 8, 341, 197, 5580, 1878, 1669, 348, 16, 7121, 52445, 1878, 52613, 98932, 68552, 675, 63121, 25, 330, 2408, 73655, 23625, 197, 5580, 1878, 36473, 273...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIdentifier(t *testing.T) { assert := tdd.New(t) t.Run("Parse", func(t *testing.T) { // Test parser with a fully loaded DID // https://w3c.github.io/did-core/#example-a-resource-external-to-a-did-document example := "did:example:123/custom/path?service=agent&relativeRef=/credentials#degree" id, err := Parse(example) assert.Nil(err, "failed to parse identifier") // Verify path contents assert.Equal("/custom/path", id.Path()) assert.Equal(2, len(id.data.PathSegments)) assert.True(id.IsURL()) // Verify fragment value assert.Equal("#degree", id.Fragment()) // Verify query parameters q, err := id.Query() assert.Nil(err, "failed to parse query") assert.Equal("service=agent&relativeRef=/credentials", id.RawQuery()) assert.Equal("agent", q.Get("service")) assert.Equal("/credentials", q.Get("relativeRef")) // Verify string representation assert.Equal(example, id.String()) }) t.Run("Verify", func(t *testing.T) { seed := uuid.New() d, _ := NewIdentifier("bryk", seed.String()) // Check the id string is a valid UUID customVerifier := func(s string) error { _, e := uuid.Parse(s) return e } assert.Nil(d.Verify(customVerifier), "verify error") }) t.Run("Accessors", func(t *testing.T) { d, err := Parse("did:bryk:foo/rick/sanchez?variable=value&sample=test#c137") assert.Nil(err, "parse error") assert.Equal("bryk", d.Method(), "invalid method") assert.Equal("did:bryk:foo", d.DID(), "invalid DID") assert.Equal("/rick/sanchez", d.Path(), "invalid path") assert.Equal("#c137", d.Fragment(), "invalid fragment") assert.Equal("variable=value&sample=test", d.RawQuery(), "invalid raw query") q, err := d.Query() assert.Nil(err, "failed to retrieve query") assert.Equal("value", q.Get("variable"), "invalid query variable") assert.Equal("test", q.Get("sample"), "invalid query variable") }) t.Run("New", func(*testing.T) { idString := uuid.New() _, err := NewIdentifier("", idString.String()) assert.NotNil(err, "failed to catch missing method") for i := 0; i <= 100; i++ { d, _ := NewIdentifier("bryk", idString.String()) _, err = Parse(d.String()) assert.Nil(err, "invalid identifier produced") } }) t.Run("Serialization", func(t *testing.T) { // Create a new identifier instance from a string id, err := Parse("did:example:q7ckgxeq1lxmra0r") assert.Nil(err, "parse error") // Add a new key assert.Nil(id.AddNewVerificationMethod("key-1", KeyTypeEd), "add key error") // Encode bin := encode(id) // Restore id2, err := decode(bin) assert.Nil(err, "decode error") assert.Equal(id.data.VerificationMethods[0].Private, id2.data.VerificationMethods[0].Private, "invalid data restored") }) }
explode_data.jsonl/37746
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1059 }
[ 2830, 3393, 8714, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 259, 631, 7121, 1155, 692, 3244, 16708, 445, 14463, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 197, 322, 3393, 6729, 448, 264, 7225, 6661, 59119, 198, 197, 197, 322, 370...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSortedArray_Clear(t *testing.T) { gtest.C(t, func(t *gtest.T) { a1 := []interface{}{"a", "d", "c", "b", "e", "f"} func1 := func(v1, v2 interface{}) int { return strings.Compare(gconv.String(v1), gconv.String(v2)) } array1 := garray.NewSortedArrayFrom(a1, func1) t.Assert(array1.Len(), 6) array1.Clear() t.Assert(array1.Len(), 0) }) }
explode_data.jsonl/67020
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 173 }
[ 2830, 3393, 51051, 1857, 57744, 1155, 353, 8840, 836, 8, 341, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 11323, 16, 1669, 3056, 4970, 6257, 4913, 64, 497, 330, 67, 497, 330, 66, 497, 330, 65, 497, 330, 68...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEmpty(t *testing.T) { trans, err := newFieldsTransformer(version, common.Fields{}) assert.NoError(t, err) out, err := trans.transform() assert.NoError(t, err) expected := common.MapStr{ "fieldFormatMap": common.MapStr{}, "fields": []common.MapStr{ common.MapStr{ "name": "_id", "type": "string", "scripted": false, "aggregatable": false, "analyzed": false, "count": 0, "indexed": false, "doc_values": false, "searchable": false, }, common.MapStr{ "name": "_type", "type": "string", "scripted": false, "count": 0, "aggregatable": true, "analyzed": false, "indexed": false, "doc_values": false, "searchable": true, }, common.MapStr{ "name": "_index", "type": "string", "scripted": false, "count": 0, "aggregatable": false, "analyzed": false, "indexed": false, "doc_values": false, "searchable": false, }, common.MapStr{ "name": "_score", "type": "number", "scripted": false, "count": 0, "aggregatable": false, "analyzed": false, "indexed": false, "doc_values": false, "searchable": false, }, }, } assert.Equal(t, expected, out) }
explode_data.jsonl/37630
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 736 }
[ 2830, 3393, 3522, 1155, 353, 8840, 836, 8, 341, 72453, 11, 1848, 1669, 501, 8941, 46358, 37770, 11, 4185, 42809, 37790, 6948, 35699, 1155, 11, 1848, 340, 13967, 11, 1848, 1669, 1356, 8134, 741, 6948, 35699, 1155, 11, 1848, 340, 42400, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServiceCreateWithMultipleImages(t *testing.T) { _, _, _, err := fakeServiceCreate([]string{ "service", "create", "foo", "--image", "gcr.io/foo/bar:baz", "--image", "gcr.io/bar/foo:baz", "--no-wait"}, false) assert.Assert(t, util.ContainsAll(err.Error(), "\"--image\"", "\"gcr.io/bar/foo:baz\"", "flag", "once")) }
explode_data.jsonl/42439
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 1860, 4021, 2354, 32089, 14228, 1155, 353, 8840, 836, 8, 341, 197, 6878, 8358, 8358, 1848, 1669, 12418, 1860, 4021, 10556, 917, 515, 197, 197, 1, 7936, 497, 330, 3182, 497, 330, 7975, 497, 14482, 1805, 497, 330, 70, 5082, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsSubtree(t *testing.T) { cases := []struct { S *TreeNode T *TreeNode Expect bool }{ { S: &TreeNode{ Val: 3, Left: &TreeNode{ Val: 4, Left: &TreeNode{Val: 1}, Right: &TreeNode{Val: 2}, }, Right: &TreeNode{ Val: 5, }, }, T: &TreeNode{ Val: 4, Left: &TreeNode{Val: 1}, Right: &TreeNode{Val: 2}, }, Expect: true, }, { S: &TreeNode{ Val: 3, Left: &TreeNode{ Val: 4, Left: &TreeNode{Val: 1}, Right: &TreeNode{ Val: 2, Left: &TreeNode{Val: 0}, }, }, Right: &TreeNode{ Val: 5, }, }, T: &TreeNode{ Val: 4, Left: &TreeNode{Val: 1}, Right: &TreeNode{Val: 2}, }, Expect: false, }, { S: &TreeNode{Val: 1}, T: nil, Expect: true, }, } for _, c := range cases { if got := isSubtree(c.S, c.T); got != c.Expect { t.Errorf("isSubtree(%v, %v) = %v; expect %v", c.S, c.T, got, c.Expect) } } }
explode_data.jsonl/41658
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 595 }
[ 2830, 3393, 3872, 3136, 9344, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 7568, 414, 353, 26597, 198, 197, 10261, 414, 353, 26597, 198, 197, 35911, 1807, 198, 197, 59403, 197, 197, 515, 298, 7568, 25, 609, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestVirtualFile(t *testing.T) { fileContent := "# this is content" fileName := "dir/awesome_file.md" vfr := NewVirtualFileResolver( CreateFileInfo(fileName, false), func(ctx context.Context) (string, error) { return fileContent, nil }, ) t.Run("Path", func(t *testing.T) { if have, want := vfr.Path(), fileName; have != want { t.Fatalf("wrong path, want=%q have=%q", want, have) } }) t.Run("Name", func(t *testing.T) { if have, want := vfr.Name(), path.Base(fileName); have != want { t.Fatalf("wrong name, want=%q have=%q", want, have) } }) t.Run("IsDirectory", func(t *testing.T) { if have, want := vfr.IsDirectory(), false; have != want { t.Fatalf("wrong IsDirectory, want=%t have=%t", want, have) } }) t.Run("Content", func(t *testing.T) { have, err := vfr.Content(context.Background()) if err != nil { t.Fatal(err) } if want := fileContent; have != want { t.Fatalf("wrong Content, want=%q have=%q", want, have) } }) t.Run("RichHTML", func(t *testing.T) { have, err := vfr.RichHTML(context.Background()) if err != nil { t.Fatal(err) } renderedMarkdown := `<h1><a name="this-is-content" class="anchor" href="#this-is-content" rel="nofollow" aria-hidden="true"><span></span></a>this is content</h1> ` if diff := cmp.Diff(have, renderedMarkdown); diff != "" { t.Fatalf("wrong RichHTML: %s", diff) } }) t.Run("Binary", func(t *testing.T) { isBinary, err := vfr.Binary(context.Background()) if err != nil { t.Fatal(err) } if isBinary { t.Fatalf("wrong Binary: %t", isBinary) } }) t.Run("Highlight", func(t *testing.T) { testHighlight := func(aborted bool) { highlightedContent := template.HTML("highlight of the file") highlight.Mocks.Code = func(p highlight.Params) (template.HTML, bool, error) { return highlightedContent, aborted, nil } t.Cleanup(highlight.ResetMocks) highlightedFile, err := vfr.Highlight(context.Background(), &HighlightArgs{}) if err != nil { t.Fatal(err) } if highlightedFile.Aborted() != aborted { t.Fatalf("wrong Aborted. want=%t have=%t", aborted, highlightedFile.Aborted()) } if highlightedFile.HTML() != string(highlightedContent) { t.Fatalf("wrong HTML. want=%q have=%q", highlightedContent, highlightedFile.HTML()) } } testHighlight(false) testHighlight(true) }) }
explode_data.jsonl/75158
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 965 }
[ 2830, 3393, 33026, 1703, 1155, 353, 8840, 836, 8, 341, 17661, 2762, 1669, 5869, 419, 374, 2213, 698, 17661, 675, 1669, 330, 3741, 14, 16875, 2458, 21324, 698, 5195, 1626, 1669, 1532, 33026, 1703, 18190, 1006, 197, 75569, 45430, 23014, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestKnownDNSProviderError(t *testing.T) { defer envTest.RestoreEnv() envTest.ClearEnv() provider, err := NewDNSChallengeProviderByName("exec") assert.Error(t, err) assert.Nil(t, provider) }
explode_data.jsonl/49158
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 77 }
[ 2830, 3393, 48206, 61088, 5179, 1454, 1155, 353, 8840, 836, 8, 341, 16867, 6105, 2271, 31129, 460, 14359, 741, 57538, 2271, 13524, 14359, 2822, 197, 19979, 11, 1848, 1669, 1532, 61088, 62078, 5179, 16898, 445, 11748, 1138, 6948, 6141, 115...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestEth_GetStorageAt(t *testing.T) { expectedRes := hexutil.Bytes{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0} rpcRes := call(t, "eth_getStorageAt", []string{addrA, fmt.Sprint(addrAStoreKey), zeroString}) var storage hexutil.Bytes err := storage.UnmarshalJSON(rpcRes.Result) require.NoError(t, err) t.Logf("Got value [%X] for %s with key %X\n", storage, addrA, addrAStoreKey) require.True(t, bytes.Equal(storage, expectedRes), "expected: %d (%d bytes) got: %d (%d bytes)", expectedRes, len(expectedRes), storage, len(storage)) }
explode_data.jsonl/845
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 246 }
[ 2830, 3393, 65390, 13614, 5793, 1655, 1155, 353, 8840, 836, 8, 341, 42400, 1061, 1669, 12371, 1314, 36868, 90, 15, 11, 220, 15, 11, 220, 15, 11, 220, 15, 11, 220, 15, 11, 220, 15, 11, 220, 15, 11, 220, 15, 11, 220, 15, 11, 220...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServerAPI(t *testing.T) { goodServer, _, goodTeardown := startupT(t, false) defer goodTeardown() badServer, _, badTeardown := startupT(t, true) defer badTeardown() startTime := time.Time{}.Format(time.RFC3339) endTime := time.Unix(0, 0).Format(time.RFC3339) var testData = []struct { ts *httptest.Server url string responseCode int candles []store.Candle result string method string body io.Reader }{ {ts: goodServer, url: "/api/candle", responseCode: http.StatusBadRequest, result: "{\"details\":\"\",\"error\":\"no 'from' field passed\"}\n"}, {ts: goodServer, url: "/api/candle?from=bad", responseCode: http.StatusExpectationFailed, result: "{\"details\":\"can't parse 'from' field\",\"error\":\"parsing time \\\"bad\\\" as \\\"2006-01-02T15:04:05Z07:00\\\": cannot parse \\\"bad\\\" as \\\"2006\\\"\"}\n"}, {ts: goodServer, url: fmt.Sprintf("/api/candle?from=%v&to=bad", startTime), responseCode: http.StatusExpectationFailed, result: "{\"details\":\"can't parse 'to' field\",\"error\":\"parsing time \\\"bad\\\" as \\\"2006-01-02T15:04:05Z07:00\\\": cannot parse \\\"bad\\\" as \\\"2006\\\"\"}\n"}, {ts: goodServer, url: fmt.Sprintf("/api/candle?from=%v&aggregate=bad", startTime), responseCode: http.StatusExpectationFailed, result: "{\"details\":\"can't parse 'aggregate' field\",\"error\":\"time: invalid duration bad\"}\n"}, {ts: goodServer, url: fmt.Sprintf("/api/candle?from=%v&to=%v", startTime, startTime), responseCode: http.StatusOK, result: "[]\n"}, {ts: goodServer, url: fmt.Sprintf("/api/candle?from=%v", startTime), responseCode: http.StatusOK, candles: []store.Candle{storedCandle}}, {ts: badServer, url: fmt.Sprintf("/api/candle?from=%v&to=%v&aggregate=5m", startTime, url.QueryEscape(endTime)), responseCode: http.StatusBadRequest, result: "{\"details\":\"can't load candles\",\"error\":\"test error\"}\n"}, {ts: goodServer, url: "/api/insert", responseCode: http.StatusBadRequest, method: http.MethodPost, result: "{\"details\":\"Problem decoding JSON\",\"error\":\"EOF\"}\n"}, {ts: goodServer, url: "/api/insert", responseCode: http.StatusBadRequest, method: http.MethodPost, body: bytes.NewReader([]byte(`{}`)), result: "{\"details\":\"ts\",\"error\":\"missing field in JSON\"}\n"}, {ts: goodServer, url: "/api/insert", responseCode: http.StatusBadRequest, method: http.MethodPost, body: bytes.NewReader([]byte(`{"ts":"1970-01-01T01:01:00+01:00"}`)), result: "{\"details\":\"dest\",\"error\":\"missing field in JSON\"}\n"}, {ts: goodServer, url: "/api/insert", responseCode: http.StatusBadRequest, method: http.MethodPost, body: bytes.NewReader([]byte(`{"ts":"1970-01-01T01:01:00+01:00","dest":"test"}}`)), result: "{\"details\":\"file_name\",\"error\":\"missing field in JSON\"}\n"}, {ts: goodServer, url: "/api/insert", responseCode: http.StatusBadRequest, method: http.MethodPost, body: bytes.NewReader([]byte(`{"ts":"1970-01-01T01:01:00+01:00","file_name":"rt_test.mp3","dest":"test"}`)), result: "{\"details\":\"from_ip\",\"error\":\"missing field in JSON\"}\n"}, {ts: badServer, url: "/api/insert", responseCode: http.StatusOK, method: http.MethodPost, body: bytes.NewReader([]byte(`{"from_ip":"127.0.0.1","file_name":"rt_test.mp3","dest":"new_node","ts":"1970-01-01T01:01:00+01:00"}`)), result: "{\"result\":\"ok\"}\n"}, {ts: badServer, url: "/api/insert", responseCode: http.StatusInternalServerError, method: http.MethodPost, body: bytes.NewReader([]byte(`{"from_ip":"127.0.0.1","file_name":"rt_test.mp3","dest":"new_node","ts":"1970-01-01T01:00:00+01:00"}`)), result: "{\"details\":\"Problem saving LogRecord\",\"error\":\"test error\"}\n"}, } client := http.Client{} for i, x := range testData { if x.method == "" { x.method = http.MethodGet } req, err := http.NewRequest(x.method, x.ts.URL+x.url, x.body) require.NoError(t, err, i) b, err := client.Do(req) require.NoError(t, err, i) body, err := ioutil.ReadAll(b.Body) require.NoError(t, err, i) if x.result != "" { assert.Equal(t, x.result, string(body), i) } if x.candles != nil { var candles []store.Candle err = json.Unmarshal(body, &candles) if err != nil { require.Nil(t, string(body), "problem parsing response body, case %d", i) } assert.Equal(t, x.candles, candles, i) } assert.Equal(t, x.responseCode, b.StatusCode, "case %d: %v", i, string(body)) } }
explode_data.jsonl/63789
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1784 }
[ 2830, 3393, 5475, 7082, 1155, 353, 8840, 836, 8, 341, 3174, 1386, 5475, 11, 8358, 1661, 6639, 37496, 1669, 20567, 51, 1155, 11, 895, 340, 16867, 1661, 6639, 37496, 741, 2233, 329, 5475, 11, 8358, 3873, 6639, 37496, 1669, 20567, 51, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestMultiChmodWithRecur(t *testing.T) { fileName := []string{ recursiveDirRoot + "/chmod1.txt", recursiveDirRoot + "/chmod2.txt", recursiveDirRoot + "/chmod3.txt", } err := Touch(fileName) if err != nil { panic(err) } t.Cleanup(func() { _ = RemoveWithRecur(dirRoot) }) if !Exists(fileName) { t.Error("Multi MultiChmodWithRecur test failed!") } err = ChmodWithRecur(dirRoot, 0755) if err != nil { panic(err) } }
explode_data.jsonl/34177
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 191 }
[ 2830, 3393, 20358, 1143, 2593, 2354, 693, 2352, 1155, 353, 8840, 836, 8, 341, 17661, 675, 1669, 3056, 917, 515, 197, 197, 49512, 6184, 8439, 488, 3521, 56274, 16, 3909, 756, 197, 197, 49512, 6184, 8439, 488, 3521, 56274, 17, 3909, 756...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStartNextHeightCorrectlyAfterTimeout(t *testing.T) { config := configSetup(t) ctx, cancel := context.WithCancel(context.Background()) defer cancel() cs1, vss := makeState(ctx, t, makeStateArgs{config: config}) cs1.state.ConsensusParams.Timeout.BypassCommitTimeout = false cs1.txNotifier = &fakeTxNotifier{ch: make(chan struct{})} vs2, vs3, vs4 := vss[1], vss[2], vss[3] height, round := cs1.Height, cs1.Round proposalCh := subscribe(ctx, t, cs1.eventBus, types.EventQueryCompleteProposal) timeoutProposeCh := subscribe(ctx, t, cs1.eventBus, types.EventQueryTimeoutPropose) precommitTimeoutCh := subscribe(ctx, t, cs1.eventBus, types.EventQueryTimeoutWait) newRoundCh := subscribe(ctx, t, cs1.eventBus, types.EventQueryNewRound) newBlockHeader := subscribe(ctx, t, cs1.eventBus, types.EventQueryNewBlockHeader) pv1, err := cs1.privValidator.GetPubKey(ctx) require.NoError(t, err) addr := pv1.Address() voteCh := subscribeToVoter(ctx, t, cs1, addr) // start round and wait for propose and prevote startTestRound(ctx, cs1, height, round) ensureNewRound(t, newRoundCh, height, round) ensureNewProposal(t, proposalCh, height, round) rs := cs1.GetRoundState() blockID := types.BlockID{ Hash: rs.ProposalBlock.Hash(), PartSetHeader: rs.ProposalBlockParts.Header(), } ensurePrevoteMatch(t, voteCh, height, round, blockID.Hash) signAddVotes(ctx, t, cs1, tmproto.PrevoteType, config.ChainID(), blockID, vs2, vs3, vs4) ensurePrecommit(t, voteCh, height, round) // the proposed block should now be locked and our precommit added validatePrecommit(ctx, t, cs1, round, round, vss[0], blockID.Hash, blockID.Hash) // add precommits signAddVotes(ctx, t, cs1, tmproto.PrecommitType, config.ChainID(), types.BlockID{}, vs2) signAddVotes(ctx, t, cs1, tmproto.PrecommitType, config.ChainID(), blockID, vs3) // wait till timeout occurs ensureNewTimeout(t, precommitTimeoutCh, height, round, cs1.voteTimeout(round).Nanoseconds()) ensureNewRound(t, newRoundCh, height, round+1) // majority is now reached signAddVotes(ctx, t, cs1, tmproto.PrecommitType, config.ChainID(), blockID, vs4) ensureNewBlockHeader(t, newBlockHeader, height, blockID.Hash) cs1.txNotifier.(*fakeTxNotifier).Notify() ensureNewTimeout(t, timeoutProposeCh, height+1, round, cs1.proposeTimeout(round).Nanoseconds()) rs = cs1.GetRoundState() assert.False( t, rs.TriggeredTimeoutPrecommit, "triggeredTimeoutPrecommit should be false at the beginning of each round") }
explode_data.jsonl/54290
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 904 }
[ 2830, 3393, 3479, 5847, 3640, 33092, 398, 6025, 7636, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 2193, 21821, 1155, 340, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 16867, 9121, 2822, 71899, 16, 11, 348, 778, 1669, 1281...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetFCtrlAsUint(t *testing.T) { for i, tc := range []struct { ADR bool ADRAckReq bool Ack bool FPending bool ClassB bool ExpectedFCtrl uint }{ { ExpectedFCtrl: 0x00, }, { ADR: true, ExpectedFCtrl: 0x80, }, { ADRAckReq: true, ExpectedFCtrl: 0x40, }, { Ack: true, ExpectedFCtrl: 0x20, }, { FPending: true, ExpectedFCtrl: 0x10, }, { ClassB: true, ExpectedFCtrl: 0x10, }, { ADR: true, ADRAckReq: true, Ack: true, FPending: true, ExpectedFCtrl: 0xF0, }, { ADR: true, ADRAckReq: true, Ack: true, ClassB: true, ExpectedFCtrl: 0xF0, }, } { t.Run(fmt.Sprintf("%d", i), func(t *testing.T) { a := assertions.New(t) fCtrl := getFCtrlAsUint(ttnpb.FCtrl{ ADR: tc.ADR, ADRAckReq: tc.ADRAckReq, Ack: tc.Ack, FPending: tc.FPending, ClassB: tc.ClassB, }) if !a.So(fCtrl, should.Equal, tc.ExpectedFCtrl) { t.Fatalf("Invalid FCtrl: %v", fCtrl) } }) } }
explode_data.jsonl/69427
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 709 }
[ 2830, 3393, 1949, 6754, 9599, 2121, 21570, 1155, 353, 8840, 836, 8, 341, 2023, 600, 11, 17130, 1669, 2088, 3056, 1235, 341, 197, 197, 96473, 1843, 1807, 198, 197, 197, 1808, 5609, 377, 27234, 257, 1807, 198, 197, 22985, 377, 1843, 180...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInvalidHardPodAffinitySymmetricWeight(t *testing.T) { handler := utiltesting.FakeHandler{ StatusCode: 500, ResponseBody: "", T: t, } server := httptest.NewServer(&handler) // TODO: Uncomment when fix #19254 // defer server.Close() client := clientset.NewForConfigOrDie(&restclient.Config{Host: server.URL, ContentConfig: restclient.ContentConfig{GroupVersion: &api.Registry.GroupOrDie(v1.GroupName).GroupVersion}}) // factory of "default-scheduler" informerFactory := informers.NewSharedInformerFactory(client, 0) factory := NewConfigFactory( v1.DefaultSchedulerName, client, informerFactory.Core().V1().Nodes(), informerFactory.Core().V1().Pods(), informerFactory.Core().V1().PersistentVolumes(), informerFactory.Core().V1().PersistentVolumeClaims(), informerFactory.Core().V1().ReplicationControllers(), informerFactory.Extensions().V1beta1().ReplicaSets(), informerFactory.Apps().V1beta1().StatefulSets(), informerFactory.Core().V1().Services(), -1, enableEquivalenceCache, ) _, err := factory.Create() if err == nil { t.Errorf("expected err: invalid hardPodAffinitySymmetricWeight, got nothing") } }
explode_data.jsonl/13327
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 416 }
[ 2830, 3393, 7928, 26907, 23527, 25841, 13489, 27912, 15903, 8295, 1155, 353, 8840, 836, 8, 341, 53326, 1669, 4094, 8840, 991, 726, 3050, 515, 197, 197, 15872, 25, 256, 220, 20, 15, 15, 345, 197, 197, 29637, 25, 8324, 197, 10261, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCrdBase(t *testing.T) { th := kusttest_test.MakeHarness(t) writeBaseWithCrd(th) m := th.Run("/app/base", th.MakeDefaultOptions()) th.AssertActualEqualsExpected(m, ` apiVersion: v1 data: PATH: yellowBrickRoad kind: Secret metadata: name: x-crdsecret --- apiVersion: jingfang.example.com/v1beta1 kind: MyKind metadata: name: x-mykind spec: beeRef: name: x-bee secretRef: name: x-crdsecret --- apiVersion: v1beta1 kind: Bee metadata: name: x-bee spec: action: fly `) }
explode_data.jsonl/24090
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 212 }
[ 2830, 3393, 34, 6498, 3978, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 595, 590, 1944, 4452, 50133, 74248, 1155, 340, 24945, 3978, 2354, 34, 6498, 24365, 340, 2109, 1669, 270, 16708, 4283, 676, 26090, 497, 270, 50133, 3675, 3798, 2398, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRedirects(t *testing.T) { c := NewCtx(t) defer c.Close() checkRedirect(c, AccessUser, "/", "/test1", http.StatusFound) // redirect to default namespace checkRedirect(c, AccessAdmin, "/", "/admin", http.StatusFound) checkLoginRedirect(c, AccessPublic, "/access-user") // not accessible namespace _, err := c.AuthGET(AccessUser, "/access-user") c.expectOK(err) }
explode_data.jsonl/74729
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 131 }
[ 2830, 3393, 17725, 82, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 1532, 23684, 1155, 340, 16867, 272, 10421, 2822, 25157, 17725, 1337, 11, 9549, 1474, 11, 64657, 3521, 1944, 16, 497, 1758, 10538, 6650, 8, 442, 6423, 311, 1638, 4473, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGenerateOutputResultRegex(t *testing.T) { dagTmpl := &wfv1.Template{DAG: &wfv1.DAGTemplate{}} ref, expr := generateOutputResultRegex("template-name", dagTmpl) assert.Equal(t, `tasks\.template-name\.outputs\.result`, ref) assert.Equal(t, `tasks\[['\"]template-name['\"]\]\.outputs.result`, expr) stepsTmpl := &wfv1.Template{Steps: []wfv1.ParallelSteps{}} ref, expr = generateOutputResultRegex("template-name", stepsTmpl) assert.Equal(t, `steps\.template-name\.outputs\.result`, ref) assert.Equal(t, `steps\[['\"]template-name['\"]\]\.outputs.result`, expr) }
explode_data.jsonl/71043
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 232 }
[ 2830, 3393, 31115, 5097, 2077, 32464, 1155, 353, 8840, 836, 8, 341, 2698, 351, 51, 54010, 1669, 609, 86, 27890, 16, 52530, 90, 35, 1890, 25, 609, 86, 27890, 16, 909, 1890, 7275, 6257, 532, 59504, 11, 15169, 1669, 6923, 5097, 2077, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrompt_NotifiesException(t *testing.T) { f := setup(rc(`edit:prompt = { fail ERROR }`)) defer f.Cleanup() f.TestTTYNotes(t, "[prompt error] ERROR\n", `see stack trace with "show $edit:exceptions[0]"`) evals(f.Evaler, `excs = (count $edit:exceptions)`) testGlobal(t, f.Evaler, "excs", "1") }
explode_data.jsonl/46581
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 135 }
[ 2830, 3393, 54615, 60816, 9606, 1354, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 6505, 35429, 5809, 3587, 25, 40581, 284, 314, 3690, 12874, 335, 63, 1171, 16867, 282, 727, 60639, 2822, 1166, 8787, 55544, 21667, 1155, 345, 197, 197, 36864...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFindAndAddNewPods_WithReprocessPodAndVolumeRetrievalError(t *testing.T) { // create dswp dswp, fakePodManager := prepareDswpWithVolume(t) // create pod containers := []v1.Container{ { VolumeMounts: []v1.VolumeMount{ { Name: "dswp-test-volume-name", MountPath: "/mnt", }, }, }, } pod := createPodWithVolume("dswp-test-pod", "dswp-test-volume-name", "file-bound", containers) fakePodManager.AddPod(pod) podName := util.GetUniquePodName(pod) dswp.findAndAddNewPods() if !dswp.podPreviouslyProcessed(podName) { t.Fatalf("Failed to record that the volumes for the specified pod: %s have been processed by the populator", podName) } pluginPVOmittingClient(dswp) dswp.ReprocessPod(podName) dswp.findAndAddNewPods() if !dswp.podPreviouslyProcessed(podName) { t.Fatalf("Failed to record that the volumes for the specified pod: %s have been processed by the populator", podName) } fakePodManager.DeletePod(pod) }
explode_data.jsonl/45745
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 381 }
[ 2830, 3393, 9885, 3036, 2212, 3564, 23527, 82, 62, 2354, 693, 4630, 23527, 3036, 18902, 12020, 7231, 831, 1454, 1155, 353, 8840, 836, 8, 341, 197, 322, 1855, 294, 2280, 79, 198, 2698, 2280, 79, 11, 12418, 23527, 2043, 1669, 10549, 35,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSendSnapshotThrottling(t *testing.T) { defer leaktest.AfterTest(t)() e := storage.NewDefaultInMem() defer e.Close() ctx := context.Background() var cfg base.RaftConfig cfg.SetDefaults() st := cluster.MakeTestingClusterSettings() header := SnapshotRequest_Header{ CanDecline: true, State: kvserverpb.ReplicaState{ Desc: &roachpb.RangeDescriptor{RangeID: 1}, }, } newBatch := e.NewBatch // Test that a failed Recv() causes a fail throttle { sp := &fakeStorePool{} expectedErr := errors.New("") c := fakeSnapshotStream{nil, expectedErr} err := sendSnapshot(ctx, &cfg, st, c, sp, header, nil, newBatch, nil) if sp.failedThrottles != 1 { t.Fatalf("expected 1 failed throttle, but found %d", sp.failedThrottles) } if !errors.Is(err, expectedErr) { t.Fatalf("expected error %s, but found %s", err, expectedErr) } } // Test that a declined snapshot causes a decline throttle. { sp := &fakeStorePool{} resp := &SnapshotResponse{ Status: SnapshotResponse_DECLINED, } c := fakeSnapshotStream{resp, nil} err := sendSnapshot(ctx, &cfg, st, c, sp, header, nil, newBatch, nil) if sp.declinedThrottles != 1 { t.Fatalf("expected 1 declined throttle, but found %d", sp.declinedThrottles) } if err == nil { t.Fatalf("expected error, found nil") } } // Test that a declined but required snapshot causes a fail throttle. { sp := &fakeStorePool{} header.CanDecline = false resp := &SnapshotResponse{ Status: SnapshotResponse_DECLINED, } c := fakeSnapshotStream{resp, nil} err := sendSnapshot(ctx, &cfg, st, c, sp, header, nil, newBatch, nil) if sp.failedThrottles != 1 { t.Fatalf("expected 1 failed throttle, but found %d", sp.failedThrottles) } if err == nil { t.Fatalf("expected error, found nil") } } // Test that an errored snapshot causes a fail throttle. { sp := &fakeStorePool{} resp := &SnapshotResponse{ Status: SnapshotResponse_ERROR, } c := fakeSnapshotStream{resp, nil} err := sendSnapshot(ctx, &cfg, st, c, sp, header, nil, newBatch, nil) if sp.failedThrottles != 1 { t.Fatalf("expected 1 failed throttle, but found %d", sp.failedThrottles) } if err == nil { t.Fatalf("expected error, found nil") } } }
explode_data.jsonl/116
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 874 }
[ 2830, 3393, 11505, 15009, 1001, 46689, 2718, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 7727, 1669, 5819, 7121, 3675, 641, 18816, 741, 16867, 384, 10421, 2822, 20985, 1669, 2266, 19047, 741, 2405, 13286, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestObjectUpdate2(t *testing.T) { ctx := context.Background() c, rollback := makeConnectionWithObjectHeaders(t) defer rollback() err := c.ObjectUpdate(ctx, CONTAINER, OBJECT, m2.ObjectHeaders()) if err != nil { t.Fatal(err) } _, headers, err := c.Object(ctx, CONTAINER, OBJECT) if err != nil { t.Fatal(err) } compareMaps(t, headers.ObjectMetadata(), map[string]string{"hello": "", "potato-salad": ""}) }
explode_data.jsonl/12692
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 165 }
[ 2830, 3393, 1190, 4289, 17, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 1444, 11, 60414, 1669, 1281, 4526, 2354, 1190, 10574, 1155, 340, 16867, 60414, 741, 9859, 1669, 272, 8348, 4289, 7502, 11, 16120, 34521, 11, 39786,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMinHeapify(t *testing.T) { var heap = []int{0, 10, 5, 20} want := []int{0, 5, 10, 20} minHeapify(&heap, len(heap)-1, 1) if !reflect.DeepEqual(want, heap) { t.Errorf("wanted heap %v got %v", want, heap) } }
explode_data.jsonl/22505
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 105 }
[ 2830, 3393, 6217, 27909, 1437, 1155, 353, 8840, 836, 8, 341, 2405, 17364, 284, 3056, 396, 90, 15, 11, 220, 16, 15, 11, 220, 20, 11, 220, 17, 15, 630, 50780, 1669, 3056, 396, 90, 15, 11, 220, 20, 11, 220, 16, 15, 11, 220, 17, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInitContextWithAuthProxy_CachedInvalidUserID(t *testing.T) { const name = "markelog" const userID = int64(1) const orgID = int64(4) upsertHandler := func(cmd *models.UpsertUserCommand) error { require.Equal(t, name, cmd.ExternalUser.Login) cmd.Result = &models.User{Id: userID} return nil } getUserHandler := func(cmd *models.GetSignedInUserQuery) error { // Simulate that the cached user ID is stale if cmd.UserId != userID { return models.ErrUserNotFound } cmd.Result = &models.SignedInUser{ UserId: userID, OrgId: orgID, } return nil } bus.AddHandler("", upsertHandler) bus.AddHandler("", getUserHandler) t.Cleanup(func() { bus.ClearBusHandlers() }) svc := getContextHandler(t) req, err := http.NewRequest("POST", "http://example.com", nil) require.NoError(t, err) ctx := &models.ReqContext{ Context: &macaron.Context{ Req: macaron.Request{ Request: req, }, Data: map[string]interface{}{}, }, Logger: log.New("Test"), } req.Header.Set(svc.Cfg.AuthProxyHeaderName, name) key := fmt.Sprintf(authproxy.CachePrefix, authproxy.HashCacheKey(name)) t.Logf("Injecting stale user ID in cache with key %q", key) err = svc.RemoteCache.Set(key, int64(33), 0) require.NoError(t, err) authEnabled := svc.initContextWithAuthProxy(ctx, orgID) require.True(t, authEnabled) require.Equal(t, userID, ctx.SignedInUser.UserId) require.True(t, ctx.IsSignedIn) i, err := svc.RemoteCache.Get(key) require.NoError(t, err) require.Equal(t, userID, i.(int64)) }
explode_data.jsonl/39296
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 614 }
[ 2830, 3393, 3803, 1972, 2354, 5087, 16219, 920, 3854, 7928, 36899, 1155, 353, 8840, 836, 8, 341, 4777, 829, 284, 330, 3987, 50571, 698, 4777, 35204, 284, 526, 21, 19, 7, 16, 340, 4777, 1240, 915, 284, 526, 21, 19, 7, 19, 692, 5981...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestQueryVote(t *testing.T) { ctx, k := keeper.MakeTestCtxAndKeeper(t) var cdc = codec.New() name := "name" valPubKey := keeper.MakeTestPubKey(keeper.SamplePubKey) valAddr := sdk.ValAddress(valPubKey.Address().Bytes()) accAddr := sdk.AccAddress(valPubKey.Address().Bytes()) validator := types.NewValidator( name, valAddr, valPubKey, stakingtypes.Description{"nil", "nil", "nil", "nil", "nil"}, ) k.SetValidator(ctx, name, validator) msg := msg.NewMsgVoteValidator(name, valAddr, true, accAddr) handleMsgVoteValidator(ctx, msg, k) bz, _ := cdc.MarshalJSON(types.NewQueryVoteParams(name, valAddr.String())) query := abci.RequestQuery{ Path: fmt.Sprintf("custom/%s/vote-poa/%s", types.QuerierRoute, name), Data: bz, } _, err := queryVote(ctx, query, k) require.NoError(t, err) }
explode_data.jsonl/81240
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 326 }
[ 2830, 3393, 2859, 41412, 1155, 353, 8840, 836, 8, 341, 20985, 11, 595, 1669, 53416, 50133, 2271, 23684, 3036, 77233, 1155, 340, 2405, 272, 7628, 284, 34647, 7121, 2822, 11609, 1669, 330, 606, 698, 19302, 29162, 1592, 1669, 53416, 50133, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSolidOwnerDoesNotBlockWaitingOwner(t *testing.T) { ctx := setup(t, 5) defer ctx.tearDown() clientSet := ctx.clientSet ns := createNamespaceOrDie("gc-foreground1", clientSet, t) defer deleteNamespaceOrDie(ns.Name, clientSet, t) podClient := clientSet.CoreV1().Pods(ns.Name) rcClient := clientSet.CoreV1().ReplicationControllers(ns.Name) // create the RC with the orphan finalizer set toBeDeletedRC, err := rcClient.Create(newOwnerRC(toBeDeletedRCName, ns.Name)) if err != nil { t.Fatalf("Failed to create replication controller: %v", err) } remainingRC, err := rcClient.Create(newOwnerRC(remainingRCName, ns.Name)) if err != nil { t.Fatalf("Failed to create replication controller: %v", err) } trueVar := true pod := newPod("pod", ns.Name, []metav1.OwnerReference{ {UID: toBeDeletedRC.ObjectMeta.UID, Name: toBeDeletedRC.Name, BlockOwnerDeletion: &trueVar}, {UID: remainingRC.ObjectMeta.UID, Name: remainingRC.Name}, }) _, err = podClient.Create(pod) if err != nil { t.Fatalf("Failed to create Pod: %v", err) } err = rcClient.Delete(toBeDeletedRCName, getForegroundOptions()) if err != nil { t.Fatalf("Failed to delete the rc: %v", err) } // verify the toBeDeleteRC is deleted if err := wait.PollImmediate(1*time.Second, 30*time.Second, func() (bool, error) { _, err := rcClient.Get(toBeDeletedRC.Name, metav1.GetOptions{}) if err != nil { if apierrors.IsNotFound(err) { return true, nil } return false, err } return false, nil }); err != nil { t.Errorf("unexpected error: %v", err) } // verify pods don't have the toBeDeleteRC as an owner anymore pod, err = podClient.Get("pod", metav1.GetOptions{}) if err != nil { t.Fatalf("Failed to list pods: %v", err) } if len(pod.ObjectMeta.OwnerReferences) != 1 { t.Errorf("expect pod to have only one ownerReference: got %#v", pod.ObjectMeta.OwnerReferences) } else if pod.ObjectMeta.OwnerReferences[0].Name != remainingRC.Name { t.Errorf("expect pod to have an ownerReference pointing to %s, got %#v", remainingRC.Name, pod.ObjectMeta.OwnerReferences) } }
explode_data.jsonl/18179
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 772 }
[ 2830, 3393, 45941, 13801, 21468, 2623, 4713, 42104, 13801, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 6505, 1155, 11, 220, 20, 340, 16867, 5635, 31853, 59342, 2822, 25291, 1649, 1669, 5635, 6581, 1649, 271, 84041, 1669, 1855, 22699, 2195,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFileMethods(t *testing.T) { r := fstest.NewRun(t) defer r.Finalise() vfs, file, _ := fileCreate(t, r, vfscommon.CacheModeOff) // String assert.Equal(t, "dir/file1", file.String()) assert.Equal(t, "<nil *File>", (*File)(nil).String()) // IsDir assert.Equal(t, false, file.IsDir()) // IsFile assert.Equal(t, true, file.IsFile()) // Mode assert.Equal(t, vfs.Opt.FilePerms, file.Mode()) // Name assert.Equal(t, "file1", file.Name()) // Path assert.Equal(t, "dir/file1", file.Path()) // Sys assert.Equal(t, nil, file.Sys()) // SetSys file.SetSys(42) assert.Equal(t, 42, file.Sys()) // Inode assert.NotEqual(t, uint64(0), file.Inode()) // Node assert.Equal(t, file, file.Node()) // ModTime assert.WithinDuration(t, t1, file.ModTime(), r.Fremote.Precision()) // Size assert.Equal(t, int64(14), file.Size()) // Sync assert.NoError(t, file.Sync()) // DirEntry assert.Equal(t, file.o, file.DirEntry()) // Dir assert.Equal(t, file.d, file.Dir()) // VFS assert.Equal(t, vfs, file.VFS()) }
explode_data.jsonl/9731
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 445 }
[ 2830, 3393, 1703, 17856, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 48434, 477, 7121, 6727, 1155, 340, 16867, 435, 991, 977, 1064, 741, 5195, 3848, 11, 1034, 11, 716, 1669, 1034, 4021, 1155, 11, 435, 11, 92941, 5464, 46130, 3636, 4596,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCollectoArgumentsOpenshiftTLS(t *testing.T) { viper.Set("platform", v1.FlagPlatformOpenShift) defer viper.Reset() for _, tt := range []struct { name string options v1.Options expectedCert string expectedKey string }{ { name: "Openshift certificates", options: v1.NewOptions(map[string]interface{}{ "a-option": "a-value", }), expectedCert: "/etc/tls-config/tls.crt", expectedKey: "/etc/tls-config/tls.key", }, { name: "Custom certificates", options: v1.NewOptions(map[string]interface{}{ "a-option": "a-value", "collector.grpc.tls.enabled": "true", "collector.grpc.tls.cert": "/my/custom/cert", "collector.grpc.tls.key": "/my/custom/key", }), expectedCert: "/my/custom/cert", expectedKey: "/my/custom/key", }, } { t.Run(tt.name, func(t *testing.T) { jaeger := v1.NewJaeger(types.NamespacedName{Name: "my-instance"}) jaeger.Spec.Collector.Options = tt.options a := NewCollector(jaeger) dep := a.Get() assert.Len(t, dep.Spec.Template.Spec.Containers, 1) assert.Len(t, dep.Spec.Template.Spec.Containers[0].Args, 5) assert.Greater(t, len(util.FindItem("--a-option=a-value", dep.Spec.Template.Spec.Containers[0].Args)), 0) // the following are added automatically assert.Greater(t, len(util.FindItem("--collector.grpc.tls.enabled=true", dep.Spec.Template.Spec.Containers[0].Args)), 0) assert.Greater(t, len(util.FindItem("--collector.grpc.tls.cert="+tt.expectedCert, dep.Spec.Template.Spec.Containers[0].Args)), 0) assert.Greater(t, len(util.FindItem("--collector.grpc.tls.key="+tt.expectedKey, dep.Spec.Template.Spec.Containers[0].Args)), 0) assert.Greater(t, len(util.FindItem("--sampling.strategies-file", dep.Spec.Template.Spec.Containers[0].Args)), 0) }) } }
explode_data.jsonl/59538
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 793 }
[ 2830, 3393, 47504, 78, 19139, 36771, 47833, 45439, 1155, 353, 8840, 836, 8, 341, 5195, 12858, 4202, 445, 15734, 497, 348, 16, 80911, 17296, 5002, 24841, 340, 16867, 95132, 36660, 2822, 2023, 8358, 17853, 1669, 2088, 3056, 1235, 341, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultiWidthCodedIndex_MarshalUnmarshal(t *testing.T) { rng := rand.New(rand.NewSource(1413)) records := generateIndexRecords(t, multihash.SHA2_256, rng) // Create a new mh sorted index and load randomly generated records into it. subject, err := index.New(multicodec.CarMultihashIndexSorted) require.NoError(t, err) err = subject.Load(records) require.NoError(t, err) // Marshal the index. buf := new(bytes.Buffer) _, err = subject.Marshal(buf) require.NoError(t, err) // Unmarshal it back to another instance of mh sorted index. umSubject, err := index.New(multicodec.CarMultihashIndexSorted) require.NoError(t, err) err = umSubject.Unmarshal(buf) require.NoError(t, err) // Assert original records are present in both index instances with expected offset. requireContainsAll(t, subject, records) requireContainsAll(t, umSubject, records) }
explode_data.jsonl/71542
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 302 }
[ 2830, 3393, 20358, 3327, 34, 6737, 1552, 1245, 28423, 1806, 27121, 1155, 353, 8840, 836, 8, 341, 7000, 968, 1669, 10382, 7121, 37595, 7121, 3608, 7, 16, 19, 16, 18, 1171, 197, 26203, 1669, 6923, 1552, 25876, 1155, 11, 2745, 6996, 988,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRunPipeWithProxiedRepo(t *testing.T) { folder := testlib.Mktmp(t) proxied := filepath.Join(folder, "dist/proxy/default") require.NoError(t, os.MkdirAll(proxied, 0o750)) require.NoError(t, ioutil.WriteFile( filepath.Join(proxied, "main.go"), []byte("// +build: main\npackage main\nimport github.com/goreleaser/goreleaser"), 0o666, )) require.NoError(t, ioutil.WriteFile( filepath.Join(proxied, "go.mod"), []byte("module foo\nrequire github.com/goreleaser/goreleaser v0.161.1"), 0o666, )) cmd := exec.Command("go", "mod", "download") cmd.Dir = proxied require.NoError(t, cmd.Run()) config := config.Project{ GoMod: config.GoMod{ Proxy: true, }, Builds: []config.Build{ { Binary: "foo", Hooks: config.HookConfig{}, Main: "github.com/goreleaser/goreleaser", Dir: proxied, Targets: []string{ runtimeTarget, }, GoBinary: "go", }, }, } ctx := context.New(config) require.NoError(t, Default.Build(ctx, ctx.Config.Builds[0], api.Options{ Target: runtimeTarget, })) }
explode_data.jsonl/54151
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 477 }
[ 2830, 3393, 6727, 34077, 2354, 1336, 87, 1122, 25243, 1155, 353, 8840, 836, 8, 341, 1166, 2018, 1669, 1273, 2740, 1321, 74, 5173, 1155, 340, 197, 41498, 1122, 1669, 26054, 22363, 33929, 11, 330, 12211, 18008, 4130, 28989, 1138, 17957, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFileHelper_GetPathAndFileNameExt_05(t *testing.T) { fh := FileHelper{} pathDir, fileNameExt, bothAreEmpty, err := fh.GetPathAndFileNameExt("") if err == nil { t.Error("Expected error return from fh.GetPathAndFileNameExt(\"\") because " + "the input parameter is an empty string. " + "However, NO ERROR WAS RETURNED!") } if pathDir != "" { t.Errorf("Expected pathDir would be an empty string. Instead, pathDir='%v'", pathDir) } if fileNameExt != "" { t.Errorf("Expected fileNameExt would be an empty string. Instead, pathDir='%v'", fileNameExt) } if bothAreEmpty == false { t.Error("Expected bothAreEmpty='true'. Instead, bothArEmpty='false'. ") } }
explode_data.jsonl/14477
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 269 }
[ 2830, 3393, 1703, 5511, 13614, 1820, 3036, 10903, 6756, 62, 15, 20, 1155, 353, 8840, 836, 8, 1476, 220, 36075, 1669, 2887, 5511, 31483, 220, 1815, 6184, 11, 12665, 6756, 11, 2176, 11526, 3522, 11, 1848, 1669, 36075, 2234, 1820, 3036, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func Test_parseCliConfiguration(t *testing.T) { tests := []struct { name string wantCfg *Configuration wantErr bool }{ // TODO: Add test cases. } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { gotCfg, err := parseCliConfiguration() if (err != nil) != tt.wantErr { t.Errorf("parseCliConfiguration() error = %v, wantErr %v", err, tt.wantErr) return } if !reflect.DeepEqual(gotCfg, tt.wantCfg) { t.Errorf("parseCliConfiguration() = %v, want %v", gotCfg, tt.wantCfg) } }) } }
explode_data.jsonl/75686
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 244 }
[ 2830, 3393, 21039, 87014, 7688, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 50780, 42467, 353, 7688, 198, 197, 50780, 7747, 1807, 198, 197, 59403, 197, 197, 322, 5343, 25, 2691, 1273, 50...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestValidateSignalfxRelativeTimeWeeks(t *testing.T) { _, errors := validateSignalfxRelativeTime("-5w", "time_range") assert.Equal(t, 0, len(errors)) }
explode_data.jsonl/32251
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 60 }
[ 2830, 3393, 17926, 7264, 3104, 87, 28442, 1462, 17053, 82, 1155, 353, 8840, 836, 8, 341, 197, 6878, 5975, 1669, 9593, 7264, 3104, 87, 28442, 1462, 13645, 20, 86, 497, 330, 1678, 9698, 1138, 6948, 12808, 1155, 11, 220, 15, 11, 2422, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSerialReap(t *testing.T) { app := counter.NewApplication(true) cc := proxy.NewLocalClientCreator(app) mempool, cleanup := newMempoolWithApp(cc) defer cleanup() appConnCon, _ := cc.NewABCIClient() appConnCon.SetLogger(log.TestingLogger().With("module", "abci-client", "connection", "consensus")) err := appConnCon.Start() require.Nil(t, err) cacheMap := make(map[string]struct{}) deliverTxsRange := func(start, end int) { // Deliver some txs. for i := start; i < end; i++ { // This will succeed txBytes := make([]byte, 8) binary.BigEndian.PutUint64(txBytes, uint64(i)) err := mempool.CheckTx(txBytes, nil, TxInfo{}) _, cached := cacheMap[string(txBytes)] if cached { require.NotNil(t, err, "expected error for cached tx") } else { require.Nil(t, err, "expected no err for uncached tx") } cacheMap[string(txBytes)] = struct{}{} // Duplicates are cached and should return error err = mempool.CheckTx(txBytes, nil, TxInfo{}) require.NotNil(t, err, "Expected error after CheckTx on duplicated tx") } } reapCheck := func(exp int) { txs := mempool.ReapMaxBytesMaxGas(-1, -1) require.Equal(t, len(txs), exp, fmt.Sprintf("Expected to reap %v txs but got %v", exp, len(txs))) } updateRange := func(start, end int) { txs := make([]types.Tx, 0) for i := start; i < end; i++ { txBytes := make([]byte, 8) binary.BigEndian.PutUint64(txBytes, uint64(i)) txs = append(txs, txBytes) } if err := mempool.Update(0, txs, abciResponses(len(txs), abci.CodeTypeOK), nil, nil); err != nil { t.Error(err) } } commitRange := func(start, end int) { ctx := context.Background() // Deliver some txs. for i := start; i < end; i++ { txBytes := make([]byte, 8) binary.BigEndian.PutUint64(txBytes, uint64(i)) res, err := appConnCon.DeliverTxSync(ctx, abci.RequestDeliverTx{Tx: txBytes}) if err != nil { t.Errorf("client error committing tx: %v", err) } if res.IsErr() { t.Errorf("error committing tx. Code:%v result:%X log:%v", res.Code, res.Data, res.Log) } } res, err := appConnCon.CommitSync(ctx) if err != nil { t.Errorf("client error committing: %v", err) } if len(res.Data) != 8 { t.Errorf("error committing. Hash:%X", res.Data) } } //---------------------------------------- // Deliver some txs. deliverTxsRange(0, 100) // Reap the txs. reapCheck(100) // Reap again. We should get the same amount reapCheck(100) // Deliver 0 to 999, we should reap 900 new txs // because 100 were already counted. deliverTxsRange(0, 1000) // Reap the txs. reapCheck(1000) // Reap again. We should get the same amount reapCheck(1000) // Commit from the conensus AppConn commitRange(0, 500) updateRange(0, 500) // We should have 500 left. reapCheck(500) // Deliver 100 invalid txs and 100 valid txs deliverTxsRange(900, 1100) // We should have 600 now. reapCheck(600) }
explode_data.jsonl/14613
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1182 }
[ 2830, 3393, 5915, 693, 391, 1155, 353, 8840, 836, 8, 341, 28236, 1669, 5546, 7121, 4988, 3715, 340, 63517, 1669, 13291, 7121, 7319, 2959, 31865, 11462, 692, 2109, 3262, 1749, 11, 21290, 1669, 501, 44, 3262, 1749, 2354, 2164, 31424, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRejectBadExplainer(t *testing.T) { g := gomega.NewGomegaWithT(t) isvc := makeTestInferenceService() isvc.Spec.Default.Explainer = &ExplainerSpec{} g.Expect(isvc.validate(c)).Should(gomega.MatchError(ExactlyOneExplainerViolatedError)) }
explode_data.jsonl/1498
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 95 }
[ 2830, 3393, 78413, 17082, 43953, 1743, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 342, 32696, 7121, 38, 32696, 2354, 51, 1155, 340, 19907, 7362, 1669, 1281, 2271, 641, 2202, 1860, 741, 19907, 7362, 36473, 13275, 5121, 500, 1743, 284, 609...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMergeAndGet(t *testing.T) { cases := []struct { inputDoc jsoniter.RawMessage existingDoc jsoniter.RawMessage outputDoc jsoniter.RawMessage apply FieldOPType }{ { []byte(`{"a": 10}`), []byte(`{"a": 1, "b": "foo", "c": 1.01, "d": {"f": 22, "g": 44}}`), []byte(`{"a": 10, "b": "foo", "c": 1.01, "d": {"f": 22, "g": 44}}`), set, }, { []byte(`{"b": "bar", "a": 10}`), []byte(`{"a": 1, "b": "foo", "c": 1.01, "d": {"f": 22, "g": 44}}`), []byte(`{"a": 10, "b": "bar", "c": 1.01, "d": {"f": 22, "g": 44}}`), set, }, { []byte(`{"b": "test", "c": 10.22}`), []byte(`{"a": 1, "b": "foo", "c": 1.01, "d": {"f": 22, "g": 44}}`), []byte(`{"a": 1, "b": "test", "c": 10.22, "d": {"f": 22, "g": 44}}`), set, }, { []byte(`{"c": 10.000022, "e": "new"}`), []byte(`{"a": 1, "b": "foo", "c": 1.01, "d": {"f": 22, "g": 44}}`), []byte(`{"a": 1, "b": "foo", "c": 10.000022, "d": {"f": 22, "g": 44},"e":"new"}`), set, }, { []byte(`{"e": "again", "a": 1.000000022, "c": 23}`), []byte(`{"a": 1, "b": "foo", "c": 1.01, "d": {"f": 22, "g": 44}}`), []byte(`{"a": 1.000000022, "b": "foo", "c": 23, "d": {"f": 22, "g": 44},"e":"again"}`), set, }, } for _, c := range cases { reqInput := []byte(fmt.Sprintf(`{"%s": %s}`, c.apply, c.inputDoc)) f, err := BuildFieldOperators(reqInput) require.NoError(t, err) actualOut, err := f.MergeAndGet(c.existingDoc) require.NoError(t, err) require.Equal(t, c.outputDoc, actualOut, fmt.Sprintf("exp '%s' actual '%s'", string(c.outputDoc), string(actualOut))) } }
explode_data.jsonl/41291
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 818 }
[ 2830, 3393, 52096, 97726, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 22427, 9550, 262, 2951, 2015, 50575, 2052, 198, 197, 8122, 11083, 9550, 2951, 2015, 50575, 2052, 198, 197, 21170, 9550, 256, 2951, 2015, 505...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStakingResponse_ValidateSanityData(t *testing.T) { type fields struct { MetadataBase metadataCommon.MetadataBase status string txReqID string } type args struct { chainRetriever metadataCommon.ChainRetriever shardViewRetriever metadataCommon.ShardViewRetriever beaconViewRetriever metadataCommon.BeaconViewRetriever beaconHeight uint64 tx metadataCommon.Transaction } tests := []struct { name string fields fields args args want bool want1 bool wantErr bool }{ { name: "Invalid status", fields: fields{ status: common.Pdexv3AcceptStakingStatus, }, args: args{}, want: false, want1: false, wantErr: true, }, { name: "txReqID is invalid", fields: fields{ status: common.Pdexv3RejectStakingStatus, }, args: args{}, want: false, want1: false, wantErr: true, }, { name: "txReqID is empty", fields: fields{ status: common.Pdexv3RejectStakingStatus, txReqID: common.Hash{}.String(), }, args: args{}, want: false, want1: false, wantErr: true, }, { name: "Valid Input", fields: fields{ status: common.Pdexv3RejectStakingStatus, txReqID: common.PRVIDStr, }, args: args{}, want: true, want1: true, wantErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { response := &StakingResponse{ MetadataBase: tt.fields.MetadataBase, status: tt.fields.status, txReqID: tt.fields.txReqID, } got, got1, err := response.ValidateSanityData(tt.args.chainRetriever, tt.args.shardViewRetriever, tt.args.beaconViewRetriever, tt.args.beaconHeight, tt.args.tx) if (err != nil) != tt.wantErr { t.Errorf("StakingResponse.ValidateSanityData() error = %v, wantErr %v", err, tt.wantErr) return } if got != tt.want { t.Errorf("StakingResponse.ValidateSanityData() got = %v, want %v", got, tt.want) } if got1 != tt.want1 { t.Errorf("StakingResponse.ValidateSanityData() got1 = %v, want %v", got1, tt.want1) } }) } }
explode_data.jsonl/80859
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1016 }
[ 2830, 3393, 623, 1765, 2582, 62, 17926, 23729, 487, 1043, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 9209, 7603, 3978, 11160, 10839, 46475, 3978, 198, 197, 23847, 981, 914, 198, 197, 46237, 27234, 915, 414, 914, 198, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestPostWithRetries(t *testing.T) { var count int ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { count++ w.Header().Set("Replay-Nonce", fmt.Sprintf("nonce%d", count)) if r.Method == "HEAD" { // We expect the client to do 2 head requests to fetch // nonces, one to start and another after getting badNonce return } head, err := decodeJWSHead(r.Body) switch { case err != nil: t.Errorf("decodeJWSHead: %v", err) case head.Nonce == "": t.Error("head.Nonce is empty") case head.Nonce == "nonce1": // Return a badNonce error to force the call to retry. w.Header().Set("Retry-After", "0") w.WriteHeader(http.StatusBadRequest) w.Write([]byte(`{"type":"urn:ietf:params:acme:error:badNonce"}`)) return } // Make client.Authorize happy; we're not testing its result. w.WriteHeader(http.StatusCreated) w.Write([]byte(`{"status":"valid"}`)) })) defer ts.Close() client := &Client{ Key: testKey, DirectoryURL: ts.URL, dir: &Directory{AuthzURL: ts.URL}, } // This call will fail with badNonce, causing a retry if _, err := client.Authorize(context.Background(), "example.com"); err != nil { t.Errorf("client.Authorize 1: %v", err) } if count != 4 { t.Errorf("total requests count: %d; want 4", count) } }
explode_data.jsonl/38187
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 541 }
[ 2830, 3393, 4133, 2354, 12020, 4019, 1155, 353, 8840, 836, 8, 341, 2405, 1760, 526, 198, 57441, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 18032, 22940, 197, 6692, 1575...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func Test_BuildSubjectFromRTB(t *testing.T) { type testCase struct { from interface{} to rbacv1.Subject iserr bool } userSubject := rbacv1.Subject{ Kind: "User", Name: "tmp-user", } groupSubject := rbacv1.Subject{ Kind: "Group", Name: "tmp-group", } saSubject := rbacv1.Subject{ Kind: "ServiceAccount", Name: "tmp-sa", Namespace: "tmp-namespace", } testCases := []testCase{ testCase{ from: nil, iserr: true, }, testCase{ from: &v3.ProjectRoleTemplateBinding{ UserName: userSubject.Name, }, to: userSubject, }, testCase{ from: &v3.ProjectRoleTemplateBinding{ GroupName: groupSubject.Name, }, to: groupSubject, }, testCase{ from: &v3.ProjectRoleTemplateBinding{ ServiceAccount: fmt.Sprintf("%s:%s", saSubject.Namespace, saSubject.Name), }, to: saSubject, }, testCase{ from: &v3.ClusterRoleTemplateBinding{ UserName: userSubject.Name, }, to: userSubject, }, testCase{ from: &v3.ClusterRoleTemplateBinding{ GroupName: groupSubject.Name, }, to: groupSubject, }, testCase{ from: &v3.ProjectRoleTemplateBinding{ ServiceAccount: "wrong-format", }, iserr: true, }, } for _, tcase := range testCases { output, err := BuildSubjectFromRTB(tcase.from) if tcase.iserr && err == nil { t.Errorf("roletemplatebinding %v should return error", tcase.from) } else if !tcase.iserr && !reflect.DeepEqual(tcase.to, output) { t.Errorf("the subject %v from roletemplatebinding %v is mismatched, expect %v", output, tcase.from, tcase.to) } } }
explode_data.jsonl/13085
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 704 }
[ 2830, 3393, 96686, 13019, 3830, 5350, 33, 1155, 353, 8840, 836, 8, 341, 13158, 54452, 2036, 341, 197, 42727, 220, 3749, 16094, 197, 31709, 262, 18717, 580, 85, 16, 57388, 198, 197, 19907, 615, 1807, 198, 197, 630, 19060, 13019, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestGetChannelsForTeamForUser(t *testing.T) { th := Setup().InitBasic().InitSystemAdmin() defer th.TearDown() Client := th.Client channels, resp := Client.GetChannelsForTeamForUser(th.BasicTeam.Id, th.BasicUser.Id, "") CheckNoError(t, resp) found := make([]bool, 3) for _, c := range channels { if c.Id == th.BasicChannel.Id { found[0] = true } else if c.Id == th.BasicChannel2.Id { found[1] = true } else if c.Id == th.BasicPrivateChannel.Id { found[2] = true } if c.TeamId != th.BasicTeam.Id && c.TeamId != "" { t.Fatal("wrong team") } } for _, f := range found { if !f { t.Fatal("missing a channel") } } channels, resp = Client.GetChannelsForTeamForUser(th.BasicTeam.Id, th.BasicUser.Id, resp.Etag) CheckEtag(t, channels, resp) _, resp = Client.GetChannelsForTeamForUser(th.BasicTeam.Id, "junk", "") CheckBadRequestStatus(t, resp) _, resp = Client.GetChannelsForTeamForUser("junk", th.BasicUser.Id, "") CheckBadRequestStatus(t, resp) _, resp = Client.GetChannelsForTeamForUser(th.BasicTeam.Id, th.BasicUser2.Id, "") CheckForbiddenStatus(t, resp) _, resp = Client.GetChannelsForTeamForUser(model.NewId(), th.BasicUser.Id, "") CheckForbiddenStatus(t, resp) _, resp = th.SystemAdminClient.GetChannelsForTeamForUser(th.BasicTeam.Id, th.BasicUser.Id, "") CheckNoError(t, resp) }
explode_data.jsonl/65651
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 535 }
[ 2830, 3393, 1949, 35925, 2461, 14597, 2461, 1474, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1005, 3803, 15944, 1005, 3803, 2320, 7210, 741, 16867, 270, 836, 682, 4454, 741, 71724, 1669, 270, 11716, 271, 23049, 6680, 11, 9039, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestEquals(t *testing.T) { privateKey, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) if err != nil { t.Errorf("Failed to generate private/public key pair") } pEncoded, _ := Encode(privateKey) aurumPVKey := AurumPrivateKey{ privateKey, pEncoded, hex.EncodeToString(pEncoded), } privateKey2, err := ecdsa.GenerateKey(elliptic.P256(), rand.Reader) if err != nil { t.Errorf("Failed to generate private/public key pair") } tests := []struct { name string privKey AurumPrivateKey privKey2 *ecdsa.PrivateKey want bool }{ { "Equals", aurumPVKey, privateKey, true, }, { "Not Equals", aurumPVKey, privateKey2, false, }, { "Not Equals", aurumPVKey, nil, false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if result := tt.privKey.Equals(tt.privKey2); result != tt.want { t.Errorf("Failed to return %v (got %v) for private keys that are: %v", tt.want, result, tt.name) } }) } }
explode_data.jsonl/42751
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 467 }
[ 2830, 3393, 4315, 1155, 353, 8840, 836, 8, 341, 2455, 1592, 11, 1848, 1669, 384, 4385, 9081, 57582, 1592, 7, 613, 11442, 292, 1069, 17, 20, 21, 1507, 10382, 47431, 340, 743, 1848, 961, 2092, 341, 197, 3244, 13080, 445, 9408, 311, 69...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFailedTaskRestart(t *testing.T) { assert := assert.New(t) require.NoError(t, db.ClearCollections(task.Collection, task.OldCollection, build.Collection, VersionCollection), "Error clearing task and build collections") userName := "testUser" b := &build.Build{ Id: "buildtest", Status: evergreen.BuildStarted, Version: "abc", } v := &Version{ Id: b.Version, Status: evergreen.VersionStarted, Config: "identifier: sample", } testTask1 := &task.Task{ Id: "taskToRestart", Activated: false, BuildId: b.Id, Execution: 1, Project: "sample", StartTime: time.Date(2017, time.June, 12, 12, 0, 0, 0, time.Local), Status: evergreen.TaskFailed, Details: apimodels.TaskEndDetail{Type: evergreen.CommandTypeSystem}, Version: b.Version, } testTask2 := &task.Task{ Id: "taskThatSucceeded", Activated: false, BuildId: b.Id, Execution: 1, Project: "sample", StartTime: time.Date(2017, time.June, 12, 12, 0, 0, 0, time.Local), Status: evergreen.TaskSucceeded, Version: b.Version, } testTask3 := &task.Task{ Id: "taskOutsideOfTimeRange", Activated: false, BuildId: b.Id, Execution: 1, Project: "sample", StartTime: time.Date(2017, time.June, 11, 12, 0, 0, 0, time.Local), Status: evergreen.TaskFailed, Details: apimodels.TaskEndDetail{Type: "test"}, Version: b.Version, } testTask4 := &task.Task{ Id: "setupFailed", Activated: false, BuildId: b.Id, Execution: 1, Project: "sample", StartTime: time.Date(2017, time.June, 12, 12, 0, 0, 0, time.Local), Status: evergreen.TaskFailed, Details: apimodels.TaskEndDetail{Type: "setup"}, Version: b.Version, } b.Tasks = []build.TaskCache{ { Id: testTask1.Id, }, { Id: testTask2.Id, }, { Id: testTask3.Id, }, { Id: testTask4.Id, }, } assert.NoError(b.Insert()) assert.NoError(v.Insert()) assert.NoError(testTask1.Insert()) assert.NoError(testTask2.Insert()) assert.NoError(testTask3.Insert()) assert.NoError(testTask4.Insert()) // test a dry run opts := RestartOptions{ DryRun: true, IncludeTestFailed: true, IncludeSysFailed: false, IncludeSetupFailed: false, StartTime: time.Date(2017, time.June, 11, 11, 0, 0, 0, time.Local), EndTime: time.Date(2017, time.June, 12, 13, 0, 0, 0, time.Local), User: userName, } results, err := RestartFailedTasks(opts) assert.NoError(err) assert.Nil(results.ItemsErrored) assert.Equal(1, len(results.ItemsRestarted)) assert.Equal("taskOutsideOfTimeRange", results.ItemsRestarted[0]) opts.IncludeTestFailed = true opts.IncludeSysFailed = true results, err = RestartFailedTasks(opts) assert.NoError(err) assert.Nil(results.ItemsErrored) assert.Equal(2, len(results.ItemsRestarted)) assert.Equal("taskToRestart", results.ItemsRestarted[0]) opts.IncludeTestFailed = false opts.IncludeSysFailed = false opts.IncludeSetupFailed = true results, err = RestartFailedTasks(opts) assert.NoError(err) assert.Nil(results.ItemsErrored) assert.Equal(1, len(results.ItemsRestarted)) assert.Equal("setupFailed", results.ItemsRestarted[0]) // test restarting all tasks opts.StartTime = time.Date(2017, time.June, 12, 11, 0, 0, 0, time.Local) opts.DryRun = false opts.IncludeTestFailed = false opts.IncludeSysFailed = false opts.IncludeSetupFailed = false results, err = RestartFailedTasks(opts) assert.NoError(err) assert.Equal(0, len(results.ItemsErrored)) assert.Equal(2, len(results.ItemsRestarted)) assert.Equal(testTask1.Id, results.ItemsRestarted[0]) dbTask, err := task.FindOne(task.ById(testTask1.Id)) assert.NoError(err) assert.Equal(dbTask.Status, evergreen.TaskUndispatched) assert.True(dbTask.Execution > 1) dbTask, err = task.FindOne(task.ById(testTask2.Id)) assert.NoError(err) assert.Equal(dbTask.Status, evergreen.TaskSucceeded) assert.Equal(1, dbTask.Execution) dbTask, err = task.FindOne(task.ById(testTask3.Id)) assert.NoError(err) assert.Equal(dbTask.Status, evergreen.TaskFailed) assert.Equal(1, dbTask.Execution) dbTask, err = task.FindOne(task.ById(testTask4.Id)) assert.NoError(err) assert.Equal(dbTask.Status, evergreen.TaskUndispatched) assert.Equal(2, dbTask.Execution) }
explode_data.jsonl/60439
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1813 }
[ 2830, 3393, 9408, 6262, 59354, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 17957, 35699, 1155, 11, 2927, 13524, 52730, 17483, 28629, 11, 3383, 8382, 507, 6482, 11, 1936, 28629, 11, 6079, 6482, 1326, 197, 197, 1, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddLogMutil(t *testing.T) { mockdb := &mockMongo{ data: nil, err: nil, } DB = mockdb contents := []auditoplog.AuditLogContext{ auditoplog.AuditLogContext{ID: 1, Content: "sss"}, } err := AddLogMulti(1, auditoplog.AuditOpTypeAdd, common.BKInnerObjIDHost, contents, "mock desc", common.BKDefaultOwnerID, "user") if err != mockdb.err { t.Error(err) } }
explode_data.jsonl/56518
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 165 }
[ 2830, 3393, 2212, 2201, 44, 1314, 1155, 353, 8840, 836, 8, 1476, 77333, 1999, 1669, 609, 16712, 54998, 515, 197, 8924, 25, 2092, 345, 197, 9859, 25, 220, 2092, 345, 197, 532, 45409, 284, 7860, 1999, 271, 197, 17610, 1669, 3056, 48545,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestHasManyChildrenWithOneStruct(t *testing.T) { category := Category{ Name: "main", Categories: []Category{ {Name: "sub1"}, {Name: "sub2"}, }, } DB.Save(&category) }
explode_data.jsonl/15889
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 78 }
[ 2830, 3393, 10281, 8441, 11539, 2354, 3966, 9422, 1155, 353, 8840, 836, 8, 341, 75370, 1669, 10054, 515, 197, 21297, 25, 330, 3817, 756, 197, 6258, 5268, 25, 3056, 6746, 515, 298, 197, 63121, 25, 330, 1966, 16, 7115, 298, 197, 63121, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func Test_dkTags_setAttributesToTags(t *testing.T) { type fields struct { tags map[string]string } type args struct { attr []*commonpb.KeyValue } tests := []struct { name string fields fields args args want *dkTags }{ { name: "case1", fields: fields{tags: map[string]string{}}, args: args{attr: testKV}, want: &dkTags{tags: allTag}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { dt := &dkTags{ tags: tt.fields.tags, } if got := dt.setAttributesToTags(tt.args.attr); !reflect.DeepEqual(got, tt.want) { t.Errorf("setAttributesToTags() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/14403
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 308 }
[ 2830, 3393, 814, 74, 15930, 2602, 10516, 1249, 15930, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 3244, 2032, 2415, 14032, 30953, 198, 197, 532, 13158, 2827, 2036, 341, 197, 60943, 29838, 5464, 16650, 9610, 1130, 198, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSchemaHooks(t *testing.T) { ctx := context.Background() client := enttest.Open(t, "sqlite3", "file:ent?mode=memory&cache=shared&_fk=1", enttest.WithMigrateOptions(migrate.WithGlobalUniqueID(true))) defer client.Close() err := client.Card.Create().SetNumber("123").Exec(ctx) require.EqualError(t, err, "card number is too short", "error is returned from hook") crd := client.Card.Create().SetNumber("1234").SaveX(ctx) require.Equal(t, "unknown", crd.Name, "name was set by hook") client.Use(func(next ent.Mutator) ent.Mutator { return hook.CardFunc(func(ctx context.Context, m *ent.CardMutation) (ent.Value, error) { name, ok := m.Name() require.True(t, !ok && name == "", "should be the first hook to execute") return next.Mutate(ctx, m) }) }) client.Card.Create().SetNumber("1234").SaveX(ctx) err = client.Card.Update().Exec(ctx) require.EqualError(t, err, "OpUpdate operation is not allowed") }
explode_data.jsonl/36044
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 340 }
[ 2830, 3393, 8632, 67769, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 25291, 1669, 1197, 1944, 12953, 1155, 11, 330, 37042, 18, 497, 330, 1192, 25, 306, 30, 8516, 28, 17269, 5, 9360, 28, 6100, 85047, 41718, 28, 16, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_Image_StatusARM_WhenSerializedToJson_DeserializesAsEqual(t *testing.T) { t.Parallel() parameters := gopter.DefaultTestParameters() parameters.MaxSize = 10 properties := gopter.NewProperties(parameters) properties.Property( "Round trip of Image_StatusARM via JSON returns original", prop.ForAll(RunJSONSerializationTestForImageStatusARM, ImageStatusARMGenerator())) properties.TestingRun(t, gopter.NewFormatedReporter(true, 240, os.Stdout)) }
explode_data.jsonl/59649
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 45949, 36449, 17911, 62, 4498, 77521, 78967, 98054, 2848, 4756, 2121, 2993, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 67543, 1669, 728, 73137, 13275, 2271, 9706, 741, 67543, 14535, 1695, 284, 220, 16, 15, 198, 86...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTransportSocketLateBinding(t *testing.T) { defer afterTest(t) mux := NewServeMux() fooGate := make(chan bool, 1) mux.HandleFunc("/foo", func(w ResponseWriter, r *Request) { w.Header().Set("foo-ipport", r.RemoteAddr) w.(Flusher).Flush() <-fooGate }) mux.HandleFunc("/bar", func(w ResponseWriter, r *Request) { w.Header().Set("bar-ipport", r.RemoteAddr) }) ts := httptest.NewServer(mux) defer ts.Close() dialGate := make(chan bool, 1) tr := &Transport{ Dial: func(n, addr string) (net.Conn, error) { if <-dialGate { return net.Dial(n, addr) } return nil, errors.New("manually closed") }, DisableKeepAlives: false, } defer tr.CloseIdleConnections() c := &Client{ Transport: tr, } dialGate <- true // only allow one dial fooRes, err := c.Get(ts.URL + "/foo") if err != nil { t.Fatal(err) } fooAddr := fooRes.Header.Get("foo-ipport") if fooAddr == "" { t.Fatal("No addr on /foo request") } time.AfterFunc(200*time.Millisecond, func() { // let the foo response finish so we can use its // connection for /bar fooGate <- true io.Copy(ioutil.Discard, fooRes.Body) fooRes.Body.Close() }) barRes, err := c.Get(ts.URL + "/bar") if err != nil { t.Fatal(err) } barAddr := barRes.Header.Get("bar-ipport") if barAddr != fooAddr { t.Fatalf("/foo came from conn %q; /bar came from %q instead", fooAddr, barAddr) } barRes.Body.Close() dialGate <- false }
explode_data.jsonl/4908
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 603 }
[ 2830, 3393, 27560, 10286, 61457, 15059, 1155, 353, 8840, 836, 8, 341, 16867, 1283, 2271, 1155, 692, 2109, 2200, 1669, 1532, 60421, 44, 2200, 741, 197, 7975, 42318, 1669, 1281, 35190, 1807, 11, 220, 16, 340, 2109, 2200, 63623, 4283, 7975...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestListFind(t *testing.T) { list := New() list.Add("a", "b", "c") foundIndex, foundValue := list.Find(func(index int, value interface{}) bool { return value.(string) == "c" }) if foundValue != "c" || foundIndex != 2 { t.Errorf("Got %v at %v expected %v at %v", foundValue, foundIndex, "c", 2) } foundIndex, foundValue = list.Find(func(index int, value interface{}) bool { return value.(string) == "x" }) if foundValue != nil || foundIndex != -1 { t.Errorf("Got %v at %v expected %v at %v", foundValue, foundIndex, nil, nil) } }
explode_data.jsonl/18298
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 208 }
[ 2830, 3393, 852, 9885, 1155, 353, 8840, 836, 8, 341, 14440, 1669, 1532, 741, 14440, 1904, 445, 64, 497, 330, 65, 497, 330, 66, 1138, 58102, 1552, 11, 1730, 1130, 1669, 1140, 9998, 18552, 7195, 526, 11, 897, 3749, 28875, 1807, 341, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBoolColumnValue_AsTime(t *testing.T) { tests := []struct { name string b *BoolColumnValue want time.Time wantErr bool }{ { name: "true", b: NewBoolColumnValue(true).(*BoolColumnValue), wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := tt.b.AsTime() if (err != nil) != tt.wantErr { t.Errorf("BoolColumnValue.AsTime() error = %v, wantErr %v", err, tt.wantErr) return } if !reflect.DeepEqual(got, tt.want) { t.Errorf("BoolColumnValue.AsTime() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/73244
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 307 }
[ 2830, 3393, 11233, 2933, 1130, 62741, 1462, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 2233, 981, 353, 11233, 2933, 1130, 198, 197, 50780, 262, 882, 16299, 198, 197, 50780, 7747, 1807, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestStartCmdWithBlankArg(t *testing.T) { t.Run("test blank host url arg", func(t *testing.T) { startCmd := GetStartCmd() args := []string{"--" + hostURLFlagName, ""} startCmd.SetArgs(args) err := startCmd.Execute() require.Error(t, err) require.Equal(t, "host-url value is empty", err.Error()) }) t.Run("test blank host metrics url arg", func(t *testing.T) { startCmd := GetStartCmd() args := []string{"--" + hostURLFlagName, "test", "--" + hostMetricsURLFlagName, ""} startCmd.SetArgs(args) err := startCmd.Execute() require.Error(t, err) require.Equal(t, "host-metrics-url value is empty", err.Error()) }) t.Run("test blank cas type arg", func(t *testing.T) { startCmd := GetStartCmd() args := []string{ "--" + hostURLFlagName, "test", "--" + hostMetricsURLFlagName, "test", "--" + casTypeFlagName, "", "--" + vctURLFlagName, "test", } startCmd.SetArgs(args) err := startCmd.Execute() require.Error(t, err) require.Equal(t, "cas-type value is empty", err.Error()) }) t.Run("test blank did namespace arg", func(t *testing.T) { startCmd := GetStartCmd() args := []string{ "--" + hostURLFlagName, "test", "--" + hostMetricsURLFlagName, "test", "--" + casTypeFlagName, "local", "--" + vctURLFlagName, "test", "--" + didNamespaceFlagName, "", } startCmd.SetArgs(args) err := startCmd.Execute() require.Error(t, err) require.Equal(t, "did-namespace value is empty", err.Error()) }) t.Run("test blank database type arg", func(t *testing.T) { startCmd := GetStartCmd() args := []string{ "--" + hostURLFlagName, "test", "--" + hostMetricsURLFlagName, "test", "--" + casTypeFlagName, "local", "--" + vctURLFlagName, "test", "--" + didNamespaceFlagName, "namespace", "--" + databaseTypeFlagName, "", } startCmd.SetArgs(args) err := startCmd.Execute() require.Error(t, err) require.Equal(t, "database-type value is empty", err.Error()) }) }
explode_data.jsonl/31121
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 771 }
[ 2830, 3393, 3479, 15613, 2354, 22770, 2735, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 1944, 10113, 3468, 2515, 1392, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 21375, 15613, 1669, 2126, 3479, 15613, 2822, 197, 31215, 1669, 3056...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTreapToSlice(t *testing.T) { treap := NewTreap() treap.Put(c(4), "4") treap.Put(c(6), "6") treap.Put(c(1), "1") treap.Put(c(8), "8") treap.Put(c(5), "5") assertTreapSlice(t, treap, []Comparable{c(1), c(4), c(5), c(6), c(8)}, []Thing{"1", "4", "5", "6", "8"}) }
explode_data.jsonl/51550
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 65775, 391, 1249, 33236, 1155, 353, 8840, 836, 8, 341, 3244, 265, 391, 1669, 1532, 65775, 391, 741, 3244, 265, 391, 39825, 1337, 7, 19, 701, 330, 19, 1138, 3244, 265, 391, 39825, 1337, 7, 21, 701, 330, 21, 1138, 3244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMats_CreateServiceOnlyByJsonFile(t *testing.T) { sName := "serviceOnly" sType := pms.TypeApplication jsonFile := "/tmp/service.json" //pdlFile := "/tmp/pdl.txt" context := &testutil.TestContext{ NameIDMap: make(map[string]string), NameObjectMap: make(map[string]interface{}), FileName: jsonFile, } data := &[]testutil.TestCase{ { Name: "TestCreateServiceOnly", Enabled: true, Executer: testutil.NewCmdTest, Method: testutil.METHOD_CREATE_SERVICE, Data: &testutil.CmdTestData{ Param: param.CREATE_SERVICE_WITH_JSONFILE(jsonFile), FileContent: &pms.Service{ Name: sName, Type: sType, }, ExpectedMsg: msg.OUTPUT_SERVICE_CREATED(), OutputBody: &pms.Service{}, ExpectedBody: &pms.Service{ Name: sName, Type: sType, }, }, PreTestFunc: func(data interface{}, context *testutil.TestContext) { cmdTD := data.(*testutil.CmdTestData) tmpService := cmdTD.FileContent.(*pms.Service) testutil.GenerateJsonFileWithService(context.FileName, tmpService) }, }, { Name: "TestDeleteServiceOnly", Enabled: true, Executer: testutil.NewCmdTest, Method: testutil.METHOD_DELETE_SERVICE, Data: &testutil.CmdTestData{ Param: param.DELETE_SERVICE("serviceOnly"), ExpectedMsg: msg.OUTPUT_SERVICE_DELETED("serviceOnly"), }, }, } testutil.RunTestCases(t, data, context) }
explode_data.jsonl/577
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 621 }
[ 2830, 3393, 44, 1862, 34325, 1860, 7308, 1359, 5014, 1703, 1155, 353, 8840, 836, 8, 341, 1903, 675, 1669, 330, 7936, 7308, 698, 1903, 929, 1669, 281, 1011, 10184, 4988, 198, 30847, 1703, 1669, 3521, 5173, 34186, 4323, 698, 197, 322, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSuperHashDelete(t *testing.T) { hashmap := New() k1, k2, k3, value := 1, true, 3, 4 hashmap.Set(k1, k2, k3, value) hashmap.Delete(k1, k2, k3) if hashmap.Get(k1, k2, k3) != nil { t.Error("deleted keys still accessible") } }
explode_data.jsonl/82211
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 112 }
[ 2830, 3393, 19284, 6370, 6435, 1155, 353, 8840, 836, 8, 341, 50333, 2186, 1669, 1532, 741, 16463, 16, 11, 595, 17, 11, 595, 18, 11, 897, 1669, 220, 16, 11, 830, 11, 220, 18, 11, 220, 19, 198, 50333, 2186, 4202, 5969, 16, 11, 595...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestQueryCollectionsConfig(t *testing.T) { sdk := mainSDK orgsContext := setupMultiOrgContext(t, sdk) err := integration.EnsureChannelCreatedAndPeersJoined(t, sdk, orgChannelID, "orgchannel.tx", orgsContext) require.NoError(t, err) ccID := integration.GenerateExamplePvtID(true) collConfig, err := newCollectionConfig(collCfgName, collCfgPolicy, collCfgRequiredPeerCount, collCfgMaximumPeerCount, collCfgBlockToLive) require.NoError(t, err) err = integration.InstallExamplePvtChaincode(orgsContext, ccID) require.NoError(t, err) err = integration.InstantiateExamplePvtChaincode(orgsContext, orgChannelID, ccID, "OR('Org1MSP.member','Org2MSP.member')", collConfig) require.NoError(t, err) org1AdminClientContext := sdk.Context(fabsdk.WithUser(org1AdminUser), fabsdk.WithOrg(org1Name)) client, err := resmgmt.New(org1AdminClientContext) if err != nil { t.Fatalf("Failed to create new resource management client: %s", err) } resp, err := client.QueryCollectionsConfig(orgChannelID, ccID) if err != nil { t.Fatalf("QueryCollectionsConfig return error: %s", err) } if len(resp.Config) != 1 { t.Fatalf("The number of collection config is incorrect, expected 1, got %d", len(resp.Config)) } conf := resp.Config[0] switch cconf := conf.Payload.(type) { case *cb.CollectionConfig_StaticCollectionConfig: checkStaticCollectionConfig(t, cconf.StaticCollectionConfig) default: t.Fatalf("The CollectionConfig.Payload's type is incorrect, expected `CollectionConfig_StaticCollectionConfig`, got %+v", reflect.TypeOf(conf.Payload)) } }
explode_data.jsonl/46759
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 528 }
[ 2830, 3393, 2859, 52730, 2648, 1155, 353, 8840, 836, 8, 341, 1903, 7584, 1669, 1887, 31534, 271, 87625, 82, 1972, 1669, 6505, 20358, 42437, 1972, 1155, 11, 45402, 340, 9859, 1669, 17590, 22834, 19098, 9629, 11694, 3036, 10197, 388, 41373,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestDefaultProcessor_PrepareForAnchoring(t *testing.T) { srv := &testingcommons.MockIdentityService{} dp := DefaultProcessor(srv, nil, nil, cfg).(defaultProcessor) ctxh := testingconfig.CreateAccountContext(t, cfg) self, err := contextutil.Account(ctxh) assert.NoError(t, err) sr := utils.RandomSlice(32) sig, err := self.SignMsg(sr) assert.NoError(t, err) // validation failed model := new(mockModel) id := utils.RandomSlice(32) next := utils.RandomSlice(32) model.On("ID").Return(id) model.On("CurrentVersion").Return(id) model.On("NextVersion").Return(next) model.On("CalculateSigningRoot").Return(sr, nil) model.On("Signatures").Return() model.sigs = append(model.sigs, sig) srv = &testingcommons.MockIdentityService{} srv.On("ValidateSignature", sig, sr).Return(errors.New("validation failed")).Once() dp.identityService = srv err = dp.PrepareForAnchoring(model) model.AssertExpectations(t) srv.AssertExpectations(t) assert.Error(t, err) // success model = new(mockModel) model.On("ID").Return(id) model.On("CurrentVersion").Return(id) model.On("NextVersion").Return(next) model.On("CalculateSigningRoot").Return(sr, nil) model.On("Signatures").Return() model.sigs = append(model.sigs, sig) srv = &testingcommons.MockIdentityService{} srv.On("ValidateSignature", sig, sr).Return(nil).Once() dp.identityService = srv err = dp.PrepareForAnchoring(model) model.AssertExpectations(t) srv.AssertExpectations(t) assert.NoError(t, err) }
explode_data.jsonl/57868
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 573 }
[ 2830, 3393, 3675, 22946, 79561, 3380, 2461, 2082, 331, 5503, 1155, 353, 8840, 836, 8, 341, 1903, 10553, 1669, 609, 8840, 52361, 24664, 18558, 1860, 16094, 55256, 1669, 7899, 22946, 1141, 10553, 11, 2092, 11, 2092, 11, 13286, 68615, 2258, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCleanTempDirectory_ShouldRemoveWithSuccessStatus(t *testing.T) { cb := &v1alpha1.CodebaseBranch{ ObjectMeta: metav1.ObjectMeta{ Name: "stub-name", Namespace: "stub-namespace", }, Spec: v1alpha1.CodebaseBranchSpec{ CodebaseName: "stub-codebase-name", BranchName: "stub-branch-name", }, } directory := CleanTempDirectory{} err := directory.ServeRequest(cb) assert.NoError(t, err) }
explode_data.jsonl/14731
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 175 }
[ 2830, 3393, 27529, 12151, 9310, 36578, 616, 13021, 2354, 7188, 2522, 1155, 353, 8840, 836, 8, 341, 63810, 1669, 609, 85, 16, 7141, 16, 20274, 3152, 18197, 515, 197, 23816, 12175, 25, 77520, 16, 80222, 515, 298, 21297, 25, 414, 330, 59...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1