text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestRestoreFailDatabaseCleanup(t *testing.T) { defer leaktest.AfterTest(t)() params := base.TestServerArgs{} // Disable external processing of mutations so that the final check of // crdb_internal.tables is guaranteed to not be cleaned up. Although this // was never observed by a stress test, it is here for safety. blockGC := make(chan struct{}) params.Knobs.GCJob = &sql.GCJobTestingKnobs{RunBeforeResume: func(_ int64) error { <-blockGC; return nil }} const numAccounts = 1000 _, _, sqlDB, dir, cleanup := backupRestoreTestSetupWithParams(t, singleNode, numAccounts, InitNone, base.TestClusterArgs{ServerArgs: params}) defer cleanup() dir = dir + "/foo" sqlDB.Exec(t, `BACKUP DATABASE data TO $1`, LocalFoo) // Bugger the backup by removing the SST files. if err := filepath.Walk(dir, func(path string, info os.FileInfo, err error) error { if err != nil { t.Fatal(err) } if info.Name() == BackupManifestName || !strings.HasSuffix(path, ".sst") { return nil } return os.Remove(path) }); err != nil { t.Fatal(err) } sqlDB.Exec(t, `DROP DATABASE data`) sqlDB.ExpectErr( t, "sst: no such file", `RESTORE DATABASE data FROM $1`, LocalFoo, ) sqlDB.ExpectErr( t, `database "data" does not exist`, `DROP DATABASE data`, ) close(blockGC) }
explode_data.jsonl/57586
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 484 }
[ 2830, 3393, 56284, 19524, 5988, 67335, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 2822, 25856, 1669, 2331, 8787, 5475, 4117, 16094, 197, 322, 28027, 9250, 8692, 315, 33584, 773, 429, 279, 1590, 1779, 315, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFactoryCreate(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() fs := sysutil.NewMockFileSystemInterface(mockCtrl) // ReadLines error -> return nil lists and error gomock.InOrder( fs.EXPECT().ReadLines("/blacklist").Times(1).Return(nil, fmt.Errorf("error")), ) tc := testCase{[]string{}, "/repo", "/blacklist", "/whitelist", fs, nil, nil, fmt.Errorf("error")} createAndAssert(t, tc) // ReadLines error -> return nil lists and error gomock.InOrder( fs.EXPECT().ReadLines("/blacklist").Times(1).Return([]string{}, nil), fs.EXPECT().ReadLines("/whitelist").Times(1).Return(nil, fmt.Errorf("error")), ) tc = testCase{[]string{}, "/repo", "/blacklist", "/whitelist", fs, nil, nil, fmt.Errorf("error")} createAndAssert(t, tc) // Single .json file, empty whitelist, empty blacklist -> file in applyList gomock.InOrder( fs.EXPECT().ReadLines("/blacklist").Times(1).Return([]string{}, nil), fs.EXPECT().ReadLines("/whitelist").Times(1).Return([]string{}, nil), ) tc = testCase{[]string{"/repo/a.json"}, "/repo", "/blacklist", "/whitelist", fs, []string{"/repo/a.json"}, []string{}, nil} createAndAssert(t, tc) // Single .yaml file, empty blacklist empty whitelist -> file in applyList gomock.InOrder( fs.EXPECT().ReadLines("/blacklist").Times(1).Return([]string{}, nil), fs.EXPECT().ReadLines("/whitelist").Times(1).Return([]string{}, nil), ) tc = testCase{[]string{"/repo/a.yaml"}, "/repo", "/blacklist", "/whitelist", fs, []string{"/repo/a.yaml"}, []string{}, nil} createAndAssert(t, tc) // Single non-.json & non-.yaml file, empty blacklist empty whitelist // -> file not in applyList gomock.InOrder( fs.EXPECT().ReadLines("/blacklist").Times(1).Return([]string{}, nil), fs.EXPECT().ReadLines("/whitelist").Times(1).Return([]string{}, nil), ) tc = testCase{[]string{"/repo/a"}, "/repo", "/blacklist", "/whitelist", fs, []string{}, []string{}, nil} createAndAssert(t, tc) // Multiple files (mixed extensions), empty blacklist, emptry whitelist gomock.InOrder( fs.EXPECT().ReadLines("/blacklist").Times(1).Return([]string{}, nil), fs.EXPECT().ReadLines("/whitelist").Times(1).Return([]string{}, nil), ) rawList := []string{"/repo/a.json", "/repo/b.jpg", "/repo/a/b.yaml", "/repo/a/b"} tc = testCase{rawList, "/repo", "/blacklist", "/whitelist", fs, []string{"/repo/a.json", "/repo/a/b.yaml"}, []string{}, nil} createAndAssert(t, tc) // Multiple files (mixed extensions), blacklist, empty whitelist gomock.InOrder( fs.EXPECT().ReadLines("/blacklist").Times(1).Return([]string{"b.json", "b/c.json"}, nil), fs.EXPECT().ReadLines("/whitelist").Times(1).Return([]string{}, nil), ) rawList = []string{"/repo/a.json", "/repo/b.json", "/repo/a/b/c.yaml", "/repo/a/b", "/repo/b/c.json"} tc = testCase{rawList, "/repo", "/blacklist", "/whitelist", fs, []string{"/repo/a.json", "/repo/a/b/c.yaml"}, []string{"/repo/b.json", "/repo/b/c.json"}, nil} createAndAssert(t, tc) // File in blacklist but not in repo // (Ends up on returned blacklist anyway) gomock.InOrder( fs.EXPECT().ReadLines("/blacklist").Times(1).Return([]string{"a/b/c.yaml", "f.json"}, nil), fs.EXPECT().ReadLines("/whitelist").Times(1).Return([]string{}, nil), ) rawList = []string{"/repo/a/b.json", "/repo/b/c", "/repo/a/b/c.yaml", "/repo/a/b/c", "/repo/c.json"} tc = testCase{rawList, "/repo", "/blacklist", "/whitelist", fs, []string{"/repo/a/b.json", "/repo/c.json"}, []string{"/repo/a/b/c.yaml", "/repo/f.json"}, nil} createAndAssert(t, tc) // Empty blacklist, valid whitelist all whitelist is in the repo gomock.InOrder( fs.EXPECT().ReadLines("/blacklist").Times(1).Return([]string{}, nil), fs.EXPECT().ReadLines("/whitelist").Times(1).Return([]string{"a/b/c.yaml", "c.json"}, nil), ) rawList = []string{"/repo/a/b.json", "/repo/b/c", "/repo/a/b/c.yaml", "/repo/a/b/c", "/repo/c.json"} tc = testCase{rawList, "/repo", "/blacklist", "/whitelist", fs, []string{"/repo/a/b/c.yaml", "/repo/c.json"}, []string{}, nil} createAndAssert(t, tc) // Empty blacklist, valid whitelist some whitelist is not included in repo gomock.InOrder( fs.EXPECT().ReadLines("/blacklist").Times(1).Return([]string{}, nil), fs.EXPECT().ReadLines("/whitelist").Times(1).Return([]string{"a/b/c.yaml", "c.json", "someRandomFile.yaml"}, nil), ) rawList = []string{"/repo/a/b.json", "/repo/b/c", "/repo/a/b/c.yaml", "/repo/a/b/c", "/repo/c.json"} tc = testCase{rawList, "/repo", "/blacklist", "/whitelist", fs, []string{"/repo/a/b/c.yaml", "/repo/c.json"}, []string{}, nil} createAndAssert(t, tc) // Both whitelist and blacklist contain the same file gomock.InOrder( fs.EXPECT().ReadLines("/blacklist").Times(1).Return([]string{"a/b/c.yaml"}, nil), fs.EXPECT().ReadLines("/whitelist").Times(1).Return([]string{"a/b/c.yaml", "c.json"}, nil), ) rawList = []string{"/repo/a/b.json", "/repo/b/c", "/repo/a/b/c.yaml", "/repo/a/b/c", "/repo/c.json"} tc = testCase{rawList, "/repo", "/blacklist", "/whitelist", fs, []string{"/repo/c.json"}, []string{"/repo/a/b/c.yaml"}, nil} createAndAssert(t, tc) // Both whitelist and blacklist contain the same file and other comments. gomock.InOrder( fs.EXPECT().ReadLines("/blacklist").Times(1).Return([]string{"a/b/c.yaml", "# c.json"}, nil), fs.EXPECT().ReadLines("/whitelist").Times(1).Return([]string{"a/b/c.yaml", "c.json", "# a/b/c.yaml"}, nil), ) rawList = []string{"/repo/a/b.json", "/repo/b/c", "/repo/a/b/c.yaml", "/repo/a/b/c", "/repo/c.json"} tc = testCase{rawList, "/repo", "/blacklist", "/whitelist", fs, []string{"/repo/c.json"}, []string{"/repo/a/b/c.yaml"}, nil} createAndAssert(t, tc) }
explode_data.jsonl/42183
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2325 }
[ 2830, 3393, 4153, 4021, 1155, 353, 8840, 836, 8, 341, 77333, 15001, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 7860, 15001, 991, 18176, 741, 53584, 1669, 5708, 1314, 7121, 11571, 50720, 5051, 30389, 15001, 692, 197, 322, 4457, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSchema_Get(t *testing.T) { var ( serviceId string serviceId1 string ) var ( schemaId1 string = "all_schema1_ms" schemaId2 string = "all_schema2_ms" schemaId3 string = "all_schema3_ms" summary string = "this0is1a2test3ms" schemaContent string = "the content is vary large" ) t.Run("register service and instance", func(t *testing.T) { respCreateService, err := datasource.GetMetadataManager().RegisterService(getContext(), &pb.CreateServiceRequest{ Service: &pb.MicroService{ AppId: "get_schema_group_ms", ServiceName: "get_schema_service_ms", Version: "1.0.0", Level: "FRONT", Schemas: []string{ "non-schema-content", }, Status: pb.MS_UP, Environment: pb.ENV_DEV, }, }) assert.NoError(t, err) assert.Equal(t, pb.ResponseSuccess, respCreateService.Response.GetCode()) serviceId = respCreateService.ServiceId respCreateSchema, err := datasource.GetMetadataManager().ModifySchema(getContext(), &pb.ModifySchemaRequest{ ServiceId: serviceId, SchemaId: "com.huawei.test.ms", Schema: "get schema ms", Summary: "schema0summary1ms", }) assert.NoError(t, err) assert.Equal(t, pb.ResponseSuccess, respCreateSchema.Response.GetCode()) respCreateService, err = datasource.GetMetadataManager().RegisterService(getContext(), &pb.CreateServiceRequest{ Service: &pb.MicroService{ AppId: "get_all_schema_ms", ServiceName: "get_all_schema_ms", Version: "1.0.0", Level: "FRONT", Schemas: []string{ schemaId1, schemaId2, schemaId3, }, Status: pb.MS_UP, }, }) assert.NoError(t, err) assert.Equal(t, pb.ResponseSuccess, respCreateService.Response.GetCode()) serviceId1 = respCreateService.ServiceId respPutData, err := datasource.GetMetadataManager().ModifySchema(getContext(), &pb.ModifySchemaRequest{ ServiceId: serviceId1, SchemaId: schemaId2, Schema: schemaContent, }) assert.NoError(t, err) assert.Equal(t, pb.ResponseSuccess, respPutData.Response.GetCode()) respPutData, err = datasource.GetMetadataManager().ModifySchema(getContext(), &pb.ModifySchemaRequest{ ServiceId: serviceId1, SchemaId: schemaId3, Schema: schemaContent, Summary: summary, }) assert.NoError(t, err) assert.Equal(t, pb.ResponseSuccess, respPutData.Response.GetCode()) respGetAllSchema, err := datasource.GetMetadataManager().GetAllSchemas(getContext(), &pb.GetAllSchemaRequest{ ServiceId: serviceId1, WithSchema: false, }) assert.NoError(t, err) assert.Equal(t, pb.ResponseSuccess, respGetAllSchema.Response.GetCode()) schemas := respGetAllSchema.Schemas for _, schema := range schemas { if schema.SchemaId == schemaId1 && schema.SchemaId == schemaId2 { assert.Empty(t, schema.Summary) assert.Empty(t, schema.Schema) } if schema.SchemaId == schemaId3 { assert.Equal(t, summary, schema.Summary) assert.Empty(t, schema.Schema) } } respGetAllSchema, err = datasource.GetMetadataManager().GetAllSchemas(getContext(), &pb.GetAllSchemaRequest{ ServiceId: serviceId1, WithSchema: true, }) assert.NoError(t, err) assert.Equal(t, pb.ResponseSuccess, respGetAllSchema.Response.GetCode()) schemas = respGetAllSchema.Schemas for _, schema := range schemas { switch schema.SchemaId { case schemaId1: assert.Empty(t, schema.Summary) assert.Empty(t, schema.Schema) case schemaId2: assert.Empty(t, schema.Summary) assert.Equal(t, schemaContent, schema.Schema) case schemaId3: assert.Equal(t, summary, schema.Summary) assert.Equal(t, schemaContent, schema.Schema) } } }) t.Run("test get when request is invalid", func(t *testing.T) { log.Info("service does not exist") respGetSchema, err := datasource.GetMetadataManager().GetSchema(getContext(), &pb.GetSchemaRequest{ ServiceId: "none_exist_service", SchemaId: "com.huawei.test", }) assert.NoError(t, err) assert.Equal(t, pb.ErrServiceNotExists, respGetSchema.Response.GetCode()) respGetAllSchemas, err := datasource.GetMetadataManager().GetAllSchemas(getContext(), &pb.GetAllSchemaRequest{ ServiceId: "none_exist_service", }) assert.NoError(t, err) assert.Equal(t, pb.ErrServiceNotExists, respGetAllSchemas.Response.GetCode()) log.Info("schema id doest not exist") respGetSchema, err = datasource.GetMetadataManager().GetSchema(getContext(), &pb.GetSchemaRequest{ ServiceId: serviceId, SchemaId: "none_exist_schema", }) assert.NoError(t, err) assert.Equal(t, pb.ErrSchemaNotExists, respGetSchema.Response.GetCode()) }) t.Run("test get when request is valid", func(t *testing.T) { resp, err := datasource.GetMetadataManager().GetSchema(getContext(), &pb.GetSchemaRequest{ ServiceId: serviceId, SchemaId: "com.huawei.test.ms", }) assert.NoError(t, err) assert.Equal(t, pb.ResponseSuccess, resp.Response.GetCode()) assert.Equal(t, "get schema ms", resp.Schema) assert.Equal(t, "schema0summary1ms", resp.SchemaSummary) }) }
explode_data.jsonl/49818
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2085 }
[ 2830, 3393, 8632, 13614, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 52934, 764, 220, 914, 198, 197, 52934, 764, 16, 914, 198, 197, 692, 2405, 2399, 197, 1903, 3416, 764, 16, 257, 914, 284, 330, 541, 25371, 16, 21416, 698, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestGetRulesUtil(t *testing.T) { _, err := GetRulesUtil(context.Background(), "", "") if err != nil { t.Fatalf("GetRulesUtil failed") } _, err = GetOneRule(context.Background(), "", "", "") if err != nil { t.Fatalf("GetOneRule failed") } }
explode_data.jsonl/74433
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 102 }
[ 2830, 3393, 1949, 26008, 2742, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 2126, 26008, 2742, 5378, 19047, 1507, 7342, 14676, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 1949, 26008, 2742, 4641, 1138, 197, 630, 197, 6878,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetOrderbook(t *testing.T) { t.Parallel() _, err := o.GetOrderBook(okgroup.GetOrderBookRequest{InstrumentID: "BTC-USDT"}, asset.Spot) if err != nil { t.Error(err) } _, err = o.GetOrderBook(okgroup.GetOrderBookRequest{InstrumentID: "Payload"}, asset.Futures) if err == nil { t.Error("error cannot be nil") } _, err = o.GetOrderBook(okgroup.GetOrderBookRequest{InstrumentID: "BTC-USD-SWAP"}, asset.PerpetualSwap) if err == nil { t.Error("error cannot be nil") } }
explode_data.jsonl/30208
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 205 }
[ 2830, 3393, 1949, 4431, 2190, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 6878, 1848, 1669, 297, 2234, 4431, 7134, 60207, 4074, 2234, 4431, 7134, 1900, 90, 56324, 915, 25, 330, 59118, 32340, 10599, 7115, 197, 197, 9852, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAttachNil(t *testing.T) { got := Wrap(nil, "", "no error", "no error") if got != nil { t.Errorf("Attach(nil, \"no error\"): got %#v, expected nil", got) } }
explode_data.jsonl/60773
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 30485, 19064, 1155, 353, 8840, 836, 8, 341, 3174, 354, 1669, 42187, 27907, 11, 7342, 330, 2152, 1465, 497, 330, 2152, 1465, 1138, 743, 2684, 961, 2092, 341, 197, 3244, 13080, 445, 30485, 27907, 11, 7245, 2152, 1465, 59, 37...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestQPS(t *testing.T) { tc := testCase{ replicas: 3, desiredMetricValues: PodMetricsInfo{ "test-pod-0": 10, "test-pod-1": 20, "test-pod-2": 10, }, metricName: "qps", targetTimestamp: 1, reportedMetricsPoints: [][]metricPoint{{{10, 1}}, {{20, 1}}, {{10, 1}}}, } tc.runTest(t) }
explode_data.jsonl/66346
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 160 }
[ 2830, 3393, 48, 5012, 1155, 353, 8840, 836, 8, 341, 78255, 1669, 54452, 515, 197, 73731, 52210, 25, 220, 18, 345, 197, 52912, 2690, 54310, 6227, 25, 16821, 27328, 1731, 515, 298, 197, 1, 1944, 2268, 347, 12, 15, 788, 220, 16, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCoordinatorGetBlocks(t *testing.T) { metrics := metrics.NewGossipMetrics(&disabled.Provider{}).PrivdataMetrics sd := common.SignedData{ Identity: []byte{0, 1, 2}, Signature: []byte{3, 4, 5}, Data: []byte{6, 7, 8}, } cs := createcollectionStore(sd).thatAcceptsAll() committer := &mocks.Committer{} store := &mockTransientStore{t: t} fetcher := &fetcherMock{t: t} coordinator := NewCoordinator(Support{ CollectionStore: cs, Committer: committer, Fetcher: fetcher, TransientStore: store, Validator: &validatorMock{}, }, sd, metrics, testConfig) hash := util2.ComputeSHA256([]byte("rws-pre-image")) bf := &blockFactory{ channelID: "test", } block := bf.AddTxn("tx1", "ns1", hash, "c1", "c2").AddTxn("tx2", "ns2", hash, "c1").create() // Green path - block and private data is returned, but the requester isn't eligible for all the private data, // but only to a subset of it. cs = createcollectionStore(sd).thatAccepts(CollectionCriteria{ Namespace: "ns1", Collection: "c2", TxId: "tx1", Channel: "test", }) committer.Mock = mock.Mock{} committer.On("GetPvtDataAndBlockByNum", mock.Anything).Return(&ledger.BlockAndPvtData{ Block: block, PvtData: expectedCommittedPrivateData1, }, nil) coordinator = NewCoordinator(Support{ CollectionStore: cs, Committer: committer, Fetcher: fetcher, TransientStore: store, Validator: &validatorMock{}, }, sd, metrics, testConfig) expectedPrivData := (&pvtDataFactory{}).addRWSet().addNSRWSet("ns1", "c2").create() block2, returnedPrivateData, err := coordinator.GetPvtDataAndBlockByNum(1, sd) assert.NoError(t, err) assert.Equal(t, block, block2) assert.Equal(t, expectedPrivData, []*ledger.TxPvtData(returnedPrivateData)) // Bad path - error occurs when trying to retrieve the block and private data committer.Mock = mock.Mock{} committer.On("GetPvtDataAndBlockByNum", mock.Anything).Return(nil, errors.New("uh oh")) block2, returnedPrivateData, err = coordinator.GetPvtDataAndBlockByNum(1, sd) assert.Nil(t, block2) assert.Empty(t, returnedPrivateData) assert.Error(t, err) }
explode_data.jsonl/36187
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 845 }
[ 2830, 3393, 64304, 1949, 29804, 1155, 353, 8840, 836, 8, 341, 2109, 13468, 1669, 16734, 7121, 38, 41473, 27328, 2099, 11978, 36208, 6257, 568, 32124, 691, 27328, 198, 99000, 1669, 4185, 808, 1542, 1043, 515, 197, 197, 18558, 25, 220, 30...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIpcDuplicateListen(t *testing.T) { switch runtime.GOOS { case "plan9": t.Skip("IPC not supported on Plan9") default: tt.TestDuplicateListen(t) } }
explode_data.jsonl/40690
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 40, 3992, 53979, 38714, 1155, 353, 8840, 836, 8, 341, 8961, 15592, 97574, 3126, 341, 2722, 330, 10393, 24, 4660, 197, 3244, 57776, 445, 62119, 537, 7248, 389, 9680, 24, 1138, 11940, 510, 197, 3244, 83, 8787, 53979, 38714, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func Test_FullLoggerNameGenerator_panics_withNil(t *testing.T) { assert.Execution(t, func() { FullLoggerNameGenerator(nil) }).WillPanicWith("^invalid value to receive a package from: <nil>$") }
explode_data.jsonl/57920
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 74 }
[ 2830, 3393, 1400, 617, 7395, 675, 12561, 71099, 1211, 6615, 19064, 1155, 353, 8840, 836, 8, 341, 6948, 68352, 1155, 11, 2915, 368, 341, 197, 197, 9432, 7395, 675, 12561, 27907, 340, 197, 16630, 9945, 47, 31270, 2354, 48654, 11808, 897, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestIsNil(t *testing.T) { // These implement IsNil. // Wrap in extra struct to hide interface type. doNil := []interface{}{ struct{ x *int }{}, struct{ x interface{} }{}, struct{ x map[string]int }{}, struct{ x func() bool }{}, struct{ x chan int }{}, struct{ x []string }{}, } for _, ts := range doNil { ty := TypeOf(ts).Field(0).Type v := Zero(ty) v.IsNil() // panics if not okay to call } // Check the implementations var pi struct { x *int } Nil(pi, t) pi.x = new(int) NotNil(pi, t) var si struct { x []int } Nil(si, t) si.x = make([]int, 10) NotNil(si, t) var ci struct { x chan int } Nil(ci, t) ci.x = make(chan int) NotNil(ci, t) var mi struct { x map[int]int } Nil(mi, t) mi.x = make(map[int]int) NotNil(mi, t) var ii struct { x interface{} } Nil(ii, t) ii.x = 2 NotNil(ii, t) var fi struct { x func(t *testing.T) } Nil(fi, t) fi.x = TestIsNil NotNil(fi, t) }
explode_data.jsonl/29544
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 464 }
[ 2830, 3393, 3872, 19064, 1155, 353, 8840, 836, 8, 341, 197, 322, 4220, 4211, 2160, 19064, 624, 197, 322, 42187, 304, 4960, 2036, 311, 10265, 3749, 943, 624, 19935, 19064, 1669, 3056, 4970, 67066, 197, 6472, 90, 856, 353, 396, 335, 388...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConnectRace(t *testing.T) { l, err := ListenPipe(testPipeName, nil) if err != nil { t.Fatal(err) } defer l.Close() go func() { for { s, err := l.Accept() if err == ErrPipeListenerClosed { return } if err != nil { t.Fatal(err) } s.Close() } }() // Dial all in background var wg sync.WaitGroup for i := 0; i < 1000; i++ { wg.Add(1) go func() { defer wg.Done() c, err := DialPipe(testPipeName, nil) if err != nil { t.Fatal(err) } c.Close() }() } wg.Wait() // wait for all to finish }
explode_data.jsonl/11434
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 281 }
[ 2830, 3393, 14611, 55991, 1155, 353, 8840, 836, 8, 341, 8810, 11, 1848, 1669, 32149, 34077, 8623, 34077, 675, 11, 2092, 340, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 16867, 326, 10421, 741, 30680, 2915, 368, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestFieldListRestore(t *testing.T) { testCases := []NodeRestoreTestCase{ {"*", "*"}, {"t.*", "`t`.*"}, {"testdb.t.*", "`testdb`.`t`.*"}, {"col as a", "`col` AS `a`"}, {"`t`.*, s.col as a", "`t`.*, `s`.`col` AS `a`"}, } extractNodeFunc := func(node Node) Node { return node.(*SelectStmt).Fields } runNodeRestoreTest(t, testCases, "SELECT %s", extractNodeFunc) }
explode_data.jsonl/27571
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 178 }
[ 2830, 3393, 1877, 852, 56284, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1955, 56284, 16458, 515, 197, 197, 4913, 78729, 15630, 7115, 197, 197, 4913, 83, 4908, 497, 35973, 83, 63, 4908, 7115, 197, 197, 4913, 1944, 1999, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPlayback_Close_panic(t *testing.T) { p := Playback{Ops: []IO{{W: []byte{10}}}} defer func() { v := recover() err, ok := v.(error) if !ok { t.Fatal("expected error") } if !IsErr(err) { t.Fatalf("unexpected error: %v", err) } }() _ = p.Close() t.Fatal("shouldn't run") }
explode_data.jsonl/44853
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 144 }
[ 2830, 3393, 87125, 68185, 620, 31270, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 95301, 90, 38904, 25, 3056, 3810, 2979, 54, 25, 3056, 3782, 90, 16, 15, 3417, 11248, 16867, 2915, 368, 341, 197, 5195, 1669, 11731, 741, 197, 9859, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMd5DigestKey(t *testing.T) { assert.Equal(t, "md5-rL0Y20zC+Fzt72VPzMSk2A==", md5DigestKey([]byte("foo"))) assert.Equal(t, "md5-N7UdGUp1E+RbVvZSTy1R8g==", md5DigestKey([]byte("bar"))) assert.Equal(t, "md5-xWvVSA9uVBPLYqCtlmZhOg==", md5DigestKey([]byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9})) }
explode_data.jsonl/56948
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 173 }
[ 2830, 3393, 72529, 20, 45217, 1592, 1155, 353, 8840, 836, 8, 341, 6948, 12808, 1155, 11, 330, 2277, 20, 3795, 43, 15, 56, 17, 15, 89, 34, 84124, 11687, 22, 17, 13378, 89, 4826, 74, 17, 32, 418, 497, 10688, 20, 45217, 1592, 10556, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLinkedDataProofSignerAndVerifier(t *testing.T) { //nolint:lll vcJSON := ` { "@context": [ "https://www.w3.org/2018/credentials/v1", "https://www.w3.org/2018/credentials/examples/v1" ], "id": "https://example.com/credentials/1872", "type": [ "VerifiableCredential", "UniversityDegreeCredential" ], "issuer": "did:key:z6Mkj7of2aaooXhTJvJ5oCL9ZVcAS472ZBuSjYyXDa4bWT32", "issuanceDate": "2020-01-17T15:14:09.724Z", "credentialSubject": { "id": "did:example:ebfeb1f712ebc6f1c276e12ec21", "degree": { "type": "BachelorDegree" }, "name": "Jayden Doe", "spouse": "did:example:c276e12ec21ebfeb1f712ebc6f1" } } ` ed25519Signer, err := newCryptoSigner(kms.ED25519Type) require.NoError(t, err) vcWithEd25519Proof := prepareVCWithEd25519LDP(t, vcJSON, ed25519Signer) vcWithEd25519ProofBytes, err := vcWithEd25519Proof.MarshalJSON() require.NoError(t, err) ecdsaSigner, err := newCryptoSigner(kms.ECDSASecp256k1TypeIEEEP1363) require.NoError(t, err) vcWithSecp256k1Proof := prepareVCWithSecp256k1LDP(t, vcJSON, ecdsaSigner) vcWithSecp256k1ProofBytes, err := vcWithSecp256k1Proof.MarshalJSON() require.NoError(t, err) require.NotEmpty(t, vcWithSecp256k1ProofBytes) t.Run("Single signature suite", func(t *testing.T) { verifierSuite := ed25519signature2018.New( suite.WithVerifier(ed25519signature2018.NewPublicKeyVerifier()), suite.WithCompactProof()) vcDecoded, err := parseTestCredential(vcWithEd25519ProofBytes, WithEmbeddedSignatureSuites(verifierSuite), WithPublicKeyFetcher(SingleKey(ed25519Signer.PublicKeyBytes(), kms.ED25519))) require.NoError(t, err) require.Equal(t, vcWithEd25519Proof, vcDecoded) }) t.Run("Several signature suites", func(t *testing.T) { verifierSuites := []verifier.SignatureSuite{ ed25519signature2018.New( suite.WithVerifier(ed25519signature2018.NewPublicKeyVerifier()), suite.WithCompactProof()), ecdsasecp256k1signature2019.New( suite.WithVerifier(ecdsasecp256k1signature2019.NewPublicKeyVerifier())), } vcDecoded, err := parseTestCredential(vcWithEd25519ProofBytes, WithEmbeddedSignatureSuites(verifierSuites...), WithPublicKeyFetcher(SingleKey(ed25519Signer.PublicKeyBytes(), kms.ED25519))) require.NoError(t, err) require.Equal(t, vcWithEd25519Proof, vcDecoded) jwk, err := jose.JWKFromPublicKey(ecdsaSigner.PublicKey()) require.NoError(t, err) vcDecoded, err = parseTestCredential(vcWithSecp256k1ProofBytes, WithEmbeddedSignatureSuites(verifierSuites...), WithPublicKeyFetcher(func(issuerID, keyID string) (*verifier.PublicKey, error) { return &verifier.PublicKey{ Type: "EcdsaSecp256k1VerificationKey2019", Value: ecdsaSigner.PublicKeyBytes(), JWK: jwk, }, nil })) require.NoError(t, err) require.Equal(t, vcWithSecp256k1Proof, vcDecoded) }) t.Run("no signature suite defined", func(t *testing.T) { vcDecoded, err := parseTestCredential(vcWithEd25519ProofBytes, WithPublicKeyFetcher(SingleKey(ed25519Signer.PublicKeyBytes(), kms.ED25519))) require.NoError(t, err) require.NotNil(t, vcDecoded) }) }
explode_data.jsonl/5864
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1346 }
[ 2830, 3393, 22070, 1043, 31076, 7264, 261, 3036, 82394, 1155, 353, 8840, 836, 8, 341, 197, 322, 77, 337, 396, 25, 654, 75, 198, 5195, 66, 5370, 1669, 22074, 515, 220, 8428, 2147, 788, 2278, 262, 330, 2428, 1110, 2136, 1418, 18, 2659...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCollectionReplica_addPartition(t *testing.T) { node := newQueryNodeMock() collectionID := UniqueID(0) initTestMeta(t, node, collectionID, 0) partitionIDs := []UniqueID{1, 2, 3} for _, id := range partitionIDs { err := node.historical.replica.addPartition(collectionID, id) assert.NoError(t, err) partition, err := node.historical.replica.getPartitionByID(id) assert.NoError(t, err) assert.Equal(t, partition.ID(), id) } err := node.Stop() assert.NoError(t, err) }
explode_data.jsonl/11483
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 189 }
[ 2830, 3393, 6482, 18327, 15317, 2891, 49978, 1155, 353, 8840, 836, 8, 341, 20831, 1669, 501, 2859, 1955, 11571, 741, 1444, 1908, 915, 1669, 28650, 915, 7, 15, 340, 28248, 2271, 12175, 1155, 11, 2436, 11, 4426, 915, 11, 220, 15, 692, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetSetValidatorMissedBlockBitArray(t *testing.T) { ctx, _, _, _, keeper := createTestInput(t, DefaultParams()) missed := keeper.getValidatorMissedBlockBitArray(ctx, sdk.ConsAddress(addrs[0]), 0) require.False(t, missed) // treat empty key as not missed keeper.setValidatorMissedBlockBitArray(ctx, sdk.ConsAddress(addrs[0]), 0, true) missed = keeper.getValidatorMissedBlockBitArray(ctx, sdk.ConsAddress(addrs[0]), 0) require.True(t, missed) // now should be missed }
explode_data.jsonl/30931
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 167 }
[ 2830, 3393, 1949, 1649, 14256, 35312, 291, 4713, 8344, 1857, 1155, 353, 8840, 836, 8, 341, 20985, 11, 8358, 8358, 8358, 53416, 1669, 1855, 2271, 2505, 1155, 11, 7899, 4870, 2398, 197, 1831, 291, 1669, 53416, 670, 14256, 35312, 291, 4713...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestShardTimeRangesSummaryString(t *testing.T) { start := time.Unix(1472824800, 0) str := shardTimeRanges{ 0: xtime.NewRanges( xtime.Range{Start: start, End: start.Add(testBlockSize)}, xtime.Range{Start: start.Add(2 * testBlockSize), End: start.Add(4 * testBlockSize)}), 1: xtime.NewRanges(xtime.Range{ Start: start, End: start.Add(2 * testBlockSize), }), } expected := "{0: 6h0m0s, 1: 4h0m0s}" assert.Equal(t, expected, str.SummaryString()) }
explode_data.jsonl/4680
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 206 }
[ 2830, 3393, 2016, 567, 1462, 74902, 19237, 703, 1155, 353, 8840, 836, 8, 341, 21375, 1669, 882, 10616, 941, 7, 16, 19, 22, 17, 23, 17, 19, 23, 15, 15, 11, 220, 15, 692, 11355, 1669, 52069, 1462, 74902, 515, 197, 197, 15, 25, 856...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetBytes(t *testing.T) { type B []byte var x B y := []byte{1, 2, 3, 4} ValueOf(&x).Elem().SetBytes(y) if !bytes.Equal(x, y) { t.Fatalf("ValueOf(%v).Bytes() = %v", x, y) } if &x[0] != &y[0] { t.Errorf("ValueOf(%p).Bytes() = %p", &x[0], &y[0]) } }
explode_data.jsonl/29585
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 144 }
[ 2830, 3393, 1649, 7078, 1155, 353, 8840, 836, 8, 341, 13158, 425, 3056, 3782, 198, 2405, 856, 425, 198, 14522, 1669, 3056, 3782, 90, 16, 11, 220, 17, 11, 220, 18, 11, 220, 19, 532, 47399, 2124, 2099, 87, 568, 25586, 1005, 1649, 70...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestIsOrganizationMember(t *testing.T) { assert.NoError(t, PrepareTestDatabase()) test := func(orgID, userID int64, expected bool) { isMember, err := IsOrganizationMember(orgID, userID) assert.NoError(t, err) assert.EqualValues(t, expected, isMember) } test(3, 2, true) test(3, 3, false) test(3, 4, true) test(6, 5, true) test(6, 4, false) test(NonexistentID, NonexistentID, false) }
explode_data.jsonl/71062
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 163 }
[ 2830, 3393, 3872, 41574, 9366, 1155, 353, 8840, 836, 8, 341, 6948, 35699, 1155, 11, 31166, 2271, 5988, 2398, 18185, 1669, 2915, 36246, 915, 11, 35204, 526, 21, 19, 11, 3601, 1807, 8, 341, 197, 19907, 9366, 11, 1848, 1669, 2160, 41574,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateActionProxyRequestHeaders(t *testing.T) { requestHeaders := &v1.ProxyRequestHeaders{ Set: []v1.Header{ { Name: "Host", Value: "nginx.org", }, }, } allErrs := validateActionProxyRequestHeaders(requestHeaders, field.NewPath("requestHeaders")) if len(allErrs) != 0 { t.Errorf("validateActionProxyRequestHeaders(%v) returned errors for valid input: %v", requestHeaders, allErrs) } }
explode_data.jsonl/65900
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 163 }
[ 2830, 3393, 17926, 2512, 16219, 1900, 10574, 1155, 353, 8840, 836, 8, 341, 23555, 10574, 1669, 609, 85, 16, 75200, 1900, 10574, 515, 197, 22212, 25, 3056, 85, 16, 15753, 515, 298, 197, 515, 571, 21297, 25, 220, 330, 9296, 756, 571, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_deleteWorkflowHandler(t *testing.T) { api, db, router := newTestAPI(t) test.NoError(t, workflow.CreateBuiltinWorkflowHookModels(db)) // Init user u, pass := assets.InsertAdminUser(t, api.mustDB()) // Init project key := sdk.RandomString(10) proj := assets.InsertTestProject(t, db, api.Cache, key, key) // Init pipeline pip := sdk.Pipeline{ Name: "pipeline1", ProjectID: proj.ID, } test.NoError(t, pipeline.InsertPipeline(api.mustDB(), &pip)) //Prepare request vars := map[string]string{ "permProjectKey": proj.Key, } uri := router.GetRoute("POST", api.postWorkflowHandler, vars) test.NotEmpty(t, uri) var wkf = &sdk.Workflow{ Name: "Name", Description: "Description", WorkflowData: sdk.WorkflowData{ Node: sdk.Node{ Type: sdk.NodeTypePipeline, Context: &sdk.NodeContext{ PipelineID: pip.ID, }, }, }, } req := assets.NewAuthentifiedRequest(t, u, pass, "POST", uri, &wkf) //Do the request w := httptest.NewRecorder() router.Mux.ServeHTTP(w, req) assert.Equal(t, 201, w.Code) test.NoError(t, json.Unmarshal(w.Body.Bytes(), &wkf)) vars = map[string]string{ "key": proj.Key, "permWorkflowName": "Name", } uri = router.GetRoute("DELETE", api.deleteWorkflowHandler, vars) test.NotEmpty(t, uri) req = assets.NewAuthentifiedRequest(t, u, pass, "DELETE", uri, nil) //Do the request w = httptest.NewRecorder() router.Mux.ServeHTTP(w, req) assert.Equal(t, 200, w.Code) // Waiting until the deletion is over ctx, cancel := context.WithTimeout(context.TODO(), 1*time.Minute) defer cancel() tickCheck := time.NewTicker(1 * time.Second) defer tickCheck.Stop() loop: for { select { case <-ctx.Done(): t.Fatal(ctx.Err()) case <-tickCheck.C: wk, _ := workflow.Load(ctx, db, api.Cache, *proj, wkf.Name, workflow.LoadOptions{Minimal: true}) if wk == nil { break loop } } } }
explode_data.jsonl/31080
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 825 }
[ 2830, 3393, 11353, 62768, 3050, 1155, 353, 8840, 836, 8, 341, 54299, 11, 2927, 11, 9273, 1669, 501, 2271, 7082, 1155, 692, 18185, 35699, 1155, 11, 28288, 7251, 33, 25628, 62768, 31679, 16969, 9791, 4390, 197, 322, 15690, 1196, 198, 1067...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestWatchOverlapDropConnContextCancel(t *testing.T) { f := func(clus *integration2.ClusterV3) { clus.Members[0].Bridge().DropConnections() } testWatchOverlapContextCancel(t, f) }
explode_data.jsonl/28944
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 14247, 82171, 19871, 9701, 1972, 9269, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 2915, 7, 4163, 353, 60168, 17, 72883, 53, 18, 8, 341, 197, 197, 4163, 91758, 58, 15, 936, 32848, 1005, 19871, 54751, 741, 197, 532, 18185, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSumPerKeyPerPartitionContributionBoundingFloat(t *testing.T) { var triples []testutils.TripleWithFloatValue for id := 1; id <= 50; id++ { triples = append(triples, testutils.TripleWithFloatValue{id, 0, 1}) // partition 0 is associated with 50 times 1 triples = append(triples, testutils.TripleWithFloatValue{id, 1, 4}) // partition 1 is associated with 50 times 4 // Additional values that are outside of range [lower, upper] triples = append(triples, testutils.TripleWithFloatValue{id, 0, -17}) // should clamp to lower bound triples = append(triples, testutils.TripleWithFloatValue{id, 1, 42}) // should clamp to upper bound } result := []testutils.TestFloat64Metric{ {0, 100.0}, // each aggregated record in partition 0 must be clamped to 2.0 {1, 150.0}, // each aggregated record in partition 1 must be clamped to 3.0 } p, s, col, want := ptest.CreateList2(triples, result) col = beam.ParDo(s, testutils.ExtractIDFromTripleWithFloatValue, col) // ε=60, δ=0.01 and l0Sensitivity=2 gives a threshold of ≈2. // We have 3 partitions. So, to get an overall flakiness of 10⁻²³, // we need to have each partition pass with 1-10⁻²⁵ probability (k=25). epsilon, delta, k, l1Sensitivity := 60.0, 0.01, 25.0, 6.0 // ε is split by 2 for noise and for partition selection, so we use 2*ε to get a Laplace noise with ε. pcol := MakePrivate(s, col, NewPrivacySpec(2*epsilon, delta)) pcol = ParDo(s, testutils.TripleWithFloatValueToKV, pcol) got := SumPerKey(s, pcol, SumParams{MinValue: 2.0, MaxValue: 3.0, MaxPartitionsContributed: 2, NoiseKind: LaplaceNoise{}}) want = beam.ParDo(s, testutils.Float64MetricToKV, want) if err := testutils.ApproxEqualsKVFloat64(s, got, want, testutils.LaplaceTolerance(k, l1Sensitivity, epsilon)); err != nil { t.Fatalf("TestSumPerKeyPerPartitionContributionBoundingFloat: %v", err) } if err := ptest.Run(p); err != nil { t.Errorf("TestSumPerKeyPerPartitionContributionBoundingFloat: SumPerKey(%v) = %v, expected %v: %v", col, got, want, err) } }
explode_data.jsonl/42970
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 727 }
[ 2830, 3393, 9190, 3889, 1592, 3889, 49978, 1109, 31140, 37909, 5442, 1155, 353, 8840, 836, 8, 341, 2405, 88561, 3056, 1944, 6031, 836, 461, 694, 2354, 5442, 1130, 198, 2023, 877, 1669, 220, 16, 26, 877, 2651, 220, 20, 15, 26, 877, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestBuildGkeCloudProvider(t *testing.T) { gkeManagerMock := &gkeManagerMock{} resourceLimiter := cloudprovider.NewResourceLimiter( map[string]int64{cloudprovider.ResourceNameCores: 1, cloudprovider.ResourceNameMemory: 10000000}, map[string]int64{cloudprovider.ResourceNameCores: 10, cloudprovider.ResourceNameMemory: 100000000}) provider, err := BuildGkeCloudProvider(gkeManagerMock, resourceLimiter) assert.NoError(t, err) assert.NotNil(t, provider) }
explode_data.jsonl/30476
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 158 }
[ 2830, 3393, 11066, 38, 440, 16055, 5179, 1155, 353, 8840, 836, 8, 341, 3174, 440, 2043, 11571, 1669, 609, 70, 440, 2043, 11571, 31483, 50346, 43, 17700, 1669, 9437, 19979, 7121, 4783, 43, 17700, 1006, 197, 19567, 14032, 63025, 21, 19, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRoleList(t *testing.T) { _, roleServ := initRoleTest() regularParam := getRegularParam("bas_roles.id asc") regularParam.Filter = "description[like]'searchTerm1'" samples := []struct { params param.Param count int64 err error }{ { params: param.Param{}, err: nil, count: 13, }, { params: regularParam, err: nil, count: 3, }, } for _, v := range samples { _, count, err := roleServ.List(v.params) if (v.err == nil && err != nil) || (v.err != nil && err == nil) || count != v.count { t.Errorf("FOR :::%+v::: \nRETURNS :::%+v:::, \nIT SHOULD BE :::%+v:::", v.params, count, v.count) } time.Sleep(1 * time.Second) } }
explode_data.jsonl/15124
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 304 }
[ 2830, 3393, 9030, 852, 1155, 353, 8840, 836, 8, 341, 197, 6878, 3476, 39159, 1669, 2930, 9030, 2271, 741, 197, 22308, 2001, 1669, 633, 30404, 2001, 445, 17797, 43061, 1764, 14601, 1138, 197, 22308, 2001, 31696, 284, 330, 4684, 58, 4803,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestMaybeRemove(t *testing.T) { defer leaktest.AfterTest(t) store, _, stopper := createTestStoreWithoutStart(t) defer stopper.Stop() // Add a queue to the scanner before starting the store and running the scanner. // This is necessary to avoid data race. fq := &fakeRangeQueue{ maybeRemovedRngs: make(chan *Replica), } store.scanner.AddQueues(fq) if err := store.Start(stopper); err != nil { t.Fatal(err) } store.WaitForInit() rng, err := store.GetReplica(1) if err != nil { t.Error(err) } if err := store.RemoveReplica(rng, *rng.Desc()); err != nil { t.Error(err) } // MaybeRemove is called. removedRng := <-fq.maybeRemovedRngs if removedRng != rng { t.Errorf("Unexpected removed range %v", removedRng) } }
explode_data.jsonl/44486
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 288 }
[ 2830, 3393, 21390, 13021, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 340, 57279, 11, 8358, 2936, 712, 1669, 1855, 2271, 6093, 26040, 3479, 1155, 340, 16867, 2936, 712, 30213, 2822, 197, 322, 2691, 264, 7177, 31...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestDyn_fixMissingTTL(t *testing.T) { assert.Equal(t, fmt.Sprintf("%v", dynDefaultTTL), fixMissingTTL(endpoint.TTL(0), 0)) // nothing to fix assert.Equal(t, "111", fixMissingTTL(endpoint.TTL(111), 25)) // apply min TTL assert.Equal(t, "1992", fixMissingTTL(endpoint.TTL(111), 1992)) }
explode_data.jsonl/9816
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 122 }
[ 2830, 3393, 95709, 36060, 25080, 51, 13470, 1155, 353, 8840, 836, 8, 341, 6948, 12808, 1155, 11, 8879, 17305, 4430, 85, 497, 31070, 3675, 51, 13470, 701, 5046, 25080, 51, 13470, 54869, 836, 13470, 7, 15, 701, 220, 15, 4390, 197, 322, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMessage(t *testing.T) { tests := []raftpb.Message{ { Type: raftpb.MsgApp, From: 1, To: 2, Term: 1, LogTerm: 1, Index: 3, Entries: []raftpb.Entry{{Term: 1, Index: 4}}, }, { Type: raftpb.MsgProp, From: 1, To: 2, Entries: []raftpb.Entry{ {Data: []byte("some data")}, {Data: []byte("some data")}, {Data: []byte("some data")}, }, }, linkHeartbeatMessage, } for i, tt := range tests { b := &bytes.Buffer{} enc := &messageEncoder{w: b} if err := enc.encode(tt); err != nil { t.Errorf("#%d: unexpected encode message error: %v", i, err) continue } dec := &messageDecoder{r: b} m, err := dec.decode() if err != nil { t.Errorf("#%d: unexpected decode message error: %v", i, err) continue } if !reflect.DeepEqual(m, tt) { t.Errorf("#%d: message = %+v, want %+v", i, m, tt) } } }
explode_data.jsonl/24892
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 444 }
[ 2830, 3393, 2052, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 2944, 16650, 8472, 515, 197, 197, 515, 298, 27725, 25, 262, 52455, 16650, 30365, 2164, 345, 298, 197, 3830, 25, 262, 220, 16, 345, 298, 197, 1249, 25, 414, 220, 17, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestServiceGetClusterResourceAttributes(t *testing.T) { ctx := context.TODO() project := "flytesnacks" domain := "development" t.Run("happy case", func(t *testing.T) { var attributes = map[string]string{ "K1": "V1", "K2": "V2", } mockAdmin := mocks.AdminServiceClient{} mockAdmin.OnGetProjectDomainAttributesMatch(ctx, mock.MatchedBy(func(req *admin.ProjectDomainAttributesGetRequest) bool { return req.Project == project && req.Domain == domain && req.ResourceType == admin.MatchableResource_CLUSTER_RESOURCE })).Return(&admin.ProjectDomainAttributesGetResponse{ Attributes: &admin.ProjectDomainAttributes{ MatchingAttributes: &admin.MatchingAttributes{ Target: &admin.MatchingAttributes_ClusterResourceAttributes{ ClusterResourceAttributes: &admin.ClusterResourceAttributes{ Attributes: attributes, }, }, }, }, }, nil) provider := serviceAdminProvider{ adminClient: &mockAdmin, } attrs, err := provider.GetClusterResourceAttributes(context.TODO(), project, domain) assert.NoError(t, err) assert.EqualValues(t, attrs.Attributes, attributes) }) t.Run("admin service error", func(t *testing.T) { mockAdmin := mocks.AdminServiceClient{} mockAdmin.OnGetProjectDomainAttributesMatch(ctx, mock.MatchedBy(func(req *admin.ProjectDomainAttributesGetRequest) bool { return req.Project == project && req.Domain == domain && req.ResourceType == admin.MatchableResource_CLUSTER_RESOURCE })).Return(&admin.ProjectDomainAttributesGetResponse{}, errFoo) provider := serviceAdminProvider{ adminClient: &mockAdmin, } _, err := provider.GetClusterResourceAttributes(context.TODO(), project, domain) assert.EqualError(t, err, errFoo.Error()) }) t.Run("wonky admin service response", func(t *testing.T) { mockAdmin := mocks.AdminServiceClient{} mockAdmin.OnGetProjectDomainAttributesMatch(ctx, mock.MatchedBy(func(req *admin.ProjectDomainAttributesGetRequest) bool { return req.Project == project && req.Domain == domain && req.ResourceType == admin.MatchableResource_CLUSTER_RESOURCE })).Return(&admin.ProjectDomainAttributesGetResponse{ Attributes: &admin.ProjectDomainAttributes{ MatchingAttributes: &admin.MatchingAttributes{ Target: &admin.MatchingAttributes_ExecutionQueueAttributes{ ExecutionQueueAttributes: &admin.ExecutionQueueAttributes{ Tags: []string{"foo", "bar", "baz"}, }, }, }, }, }, nil) provider := serviceAdminProvider{ adminClient: &mockAdmin, } attrs, err := provider.GetClusterResourceAttributes(context.TODO(), project, domain) assert.Nil(t, attrs) s, ok := status.FromError(err) assert.True(t, ok) assert.Equal(t, s.Code(), codes.NotFound) }) }
explode_data.jsonl/67474
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 954 }
[ 2830, 3393, 1860, 1949, 28678, 4783, 10516, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 90988, 741, 72470, 1669, 330, 21642, 2338, 77, 7821, 698, 2698, 3121, 1669, 330, 29571, 698, 3244, 16708, 445, 56521, 1142, 497, 2915, 1155, 35...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSlice3(t *testing.T) { xs := []int{1, 2, 3, 4, 5, 6, 7, 8} v := ValueOf(xs).Slice3(3, 5, 7).Interface().([]int) if len(v) != 2 { t.Errorf("len(xs.Slice3(3, 5, 7)) = %d", len(v)) } if cap(v) != 4 { t.Errorf("cap(xs.Slice3(3, 5, 7)) = %d", cap(v)) } if !DeepEqual(v[0:4], xs[3:7:7]) { t.Errorf("xs.Slice3(3, 5, 7)[0:4] = %v", v[0:4]) } rv := ValueOf(&xs).Elem() shouldPanic(func() { rv.Slice3(1, 2, 1) }) shouldPanic(func() { rv.Slice3(1, 1, 11) }) shouldPanic(func() { rv.Slice3(2, 2, 1) }) xa := [8]int{10, 20, 30, 40, 50, 60, 70, 80} v = ValueOf(&xa).Elem().Slice3(2, 5, 6).Interface().([]int) if len(v) != 3 { t.Errorf("len(xa.Slice(2, 5, 6)) = %d", len(v)) } if cap(v) != 4 { t.Errorf("cap(xa.Slice(2, 5, 6)) = %d", cap(v)) } if !DeepEqual(v[0:4], xa[2:6:6]) { t.Errorf("xs.Slice(2, 5, 6)[0:4] = %v", v[0:4]) } rv = ValueOf(&xa).Elem() shouldPanic(func() { rv.Slice3(1, 2, 1) }) shouldPanic(func() { rv.Slice3(1, 1, 11) }) shouldPanic(func() { rv.Slice3(2, 2, 1) }) s := "hello world" rv = ValueOf(&s).Elem() shouldPanic(func() { rv.Slice3(1, 2, 3) }) rv = ValueOf(&xs).Elem() rv = rv.Slice3(3, 5, 7) ptr2 := rv.Pointer() rv = rv.Slice3(4, 4, 4) ptr3 := rv.Pointer() if ptr3 != ptr2 { t.Errorf("xs.Slice3(3,5,7).Slice3(4,4,4).Pointer() = %#x, want %#x", ptr3, ptr2) } }
explode_data.jsonl/29578
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 747 }
[ 2830, 3393, 33236, 18, 1155, 353, 8840, 836, 8, 341, 10225, 82, 1669, 3056, 396, 90, 16, 11, 220, 17, 11, 220, 18, 11, 220, 19, 11, 220, 20, 11, 220, 21, 11, 220, 22, 11, 220, 23, 532, 5195, 1669, 5162, 2124, 53322, 568, 33236...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetDock(t *testing.T) { dck, err := fc.GetDock(c.NewAdminContext(), "") if err != nil { t.Error("Get dock failed:", err) } var expected = &SampleDocks[0] if !reflect.DeepEqual(dck, expected) { t.Errorf("Expected %+v, got %+v\n", expected, dck) } }
explode_data.jsonl/50706
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 118 }
[ 2830, 3393, 1949, 41468, 1155, 353, 8840, 836, 8, 341, 2698, 377, 11, 1848, 1669, 25563, 2234, 41468, 1337, 7121, 7210, 1972, 1507, 14676, 743, 1848, 961, 2092, 341, 197, 3244, 6141, 445, 1949, 27549, 4641, 12147, 1848, 340, 197, 630, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMapProxy_TryPutWithNonSerializableKey(t *testing.T) { _, err := mp.TryPut(student{}, "test") AssertErrorNotNil(t, err, "tryPut did not return an error for nonserializable key") mp.Clear() }
explode_data.jsonl/57044
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 72 }
[ 2830, 3393, 2227, 16219, 1139, 884, 19103, 2354, 8121, 29268, 1592, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 10490, 19824, 19103, 39004, 22655, 330, 1944, 1138, 18017, 1454, 96144, 1155, 11, 1848, 11, 330, 1539, 19103, 1521, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSugarStructuredLogging(t *testing.T) { tests := []struct { msg string expectMsg string }{ {"foo", "foo"}, {"", ""}, } // Common to all test cases. context := []interface{}{"foo", "bar"} extra := []interface{}{"baz", false} expectedFields := []Field{String("foo", "bar"), Bool("baz", false)} for _, tt := range tests { withSugar(t, TraceLevel, nil, func(logger *SugaredLogger, logs *observer.ObservedLogs) { logger.With(context...).Tracew(tt.msg, extra...) logger.With(context...).Debugw(tt.msg, extra...) logger.With(context...).Infow(tt.msg, extra...) logger.With(context...).Warnw(tt.msg, extra...) logger.With(context...).Errorw(tt.msg, extra...) logger.With(context...).DPanicw(tt.msg, extra...) expected := make([]observer.LoggedEntry, 6) for i, lvl := range []zapcore.Level{TraceLevel, DebugLevel, InfoLevel, WarnLevel, ErrorLevel, DPanicLevel} { expected[i] = observer.LoggedEntry{ Entry: zapcore.Entry{Message: tt.expectMsg, Level: lvl}, Context: expectedFields, } } assert.Equal(t, expected, logs.AllUntimed(), "Unexpected log output.") }) } }
explode_data.jsonl/5023
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 460 }
[ 2830, 3393, 83414, 97457, 34575, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 21169, 981, 914, 198, 197, 24952, 6611, 914, 198, 197, 59403, 197, 197, 4913, 7975, 497, 330, 7975, 7115, 197, 197, 4913, 497, 77496, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDelEntry(t *testing.T) { for i := range testCases { fcmd := fakeexec.FakeCmd{ CombinedOutputScript: []fakeexec.FakeAction{ // Success func() ([]byte, []byte, error) { return []byte{}, nil, nil }, // Failure func() ([]byte, []byte, error) { return []byte("ipset v6.19: Element cannot be deleted from the set: it's not added"), nil, &fakeexec.FakeExitError{Status: 1} }, }, } fexec := fakeexec.FakeExec{ CommandScript: []fakeexec.FakeCommandAction{ func(cmd string, args ...string) exec.Cmd { return fakeexec.InitFakeCmd(&fcmd, cmd, args...) }, func(cmd string, args ...string) exec.Cmd { return fakeexec.InitFakeCmd(&fcmd, cmd, args...) }, }, } runner := New(&fexec) err := runner.DelEntry(testCases[i].entry.String(), testCases[i].set.Name) if err != nil { t.Errorf("expected success, got %v", err) } if fcmd.CombinedOutputCalls != 1 { t.Errorf("expected 1 CombinedOutput() calls, got %d", fcmd.CombinedOutputCalls) } if !sets.NewString(fcmd.CombinedOutputLog[0]...).HasAll(testCases[i].delCombinedOutputLog...) { t.Errorf("wrong CombinedOutput() log, got %s", fcmd.CombinedOutputLog[0]) } err = runner.DelEntry(testCases[i].entry.String(), testCases[i].set.Name) if err == nil { t.Errorf("expected failure, got nil") } } }
explode_data.jsonl/74011
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 521 }
[ 2830, 3393, 16532, 5874, 1155, 353, 8840, 836, 8, 341, 2023, 600, 1669, 2088, 1273, 37302, 341, 197, 1166, 8710, 1669, 12418, 11748, 991, 726, 15613, 515, 298, 197, 94268, 5097, 5910, 25, 3056, 30570, 11748, 991, 726, 2512, 515, 571, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMeterDecay(t *testing.T) { ma := meterArbiter{ ticker: time.NewTicker(time.Millisecond), meters: make(map[*StandardMeter]struct{}), } m := newStandardMeter() ma.meters[m] = struct{}{} go ma.tick() m.Mark(1) rateMean := m.RateMean() time.Sleep(100 * time.Millisecond) if m.RateMean() >= rateMean { t.Error("m.RateMean() didn't decrease") } }
explode_data.jsonl/10859
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 163 }
[ 2830, 3393, 68224, 4900, 352, 1155, 353, 8840, 836, 8, 341, 197, 1728, 1669, 22962, 6953, 65, 2015, 515, 197, 3244, 5215, 25, 882, 7121, 87278, 9730, 71482, 1326, 197, 2109, 2424, 25, 1281, 9147, 33836, 19781, 68224, 60, 1235, 6257, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestHostQueueWriteErrorAfterClose(t *testing.T) { opts := newHostQueueTestOptions() queue := newTestHostQueue(opts) queue.Open() queue.Close() assert.Error(t, queue.Enqueue(&writeOperation{})) }
explode_data.jsonl/54462
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 71 }
[ 2830, 3393, 9296, 7554, 7985, 1454, 6025, 7925, 1155, 353, 8840, 836, 8, 341, 64734, 1669, 501, 9296, 7554, 2271, 3798, 741, 46993, 1669, 501, 2271, 9296, 7554, 30885, 340, 46993, 12953, 741, 46993, 10421, 741, 6948, 6141, 1155, 11, 717...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestMove(t *testing.T) { var tests = []struct { desc string a Action action string err error }{ { desc: "move OK", a: Move("nw_src", "nw_dst"), action: "move:nw_src->nw_dst", }, { desc: "both empty", a: Move("", ""), err: errMoveEmpty, }, { desc: "src empty", a: Move("", "nw_dst"), err: errMoveEmpty, }, { desc: "dst empty", a: Move("nw_src", ""), err: errMoveEmpty, }, } for _, tt := range tests { t.Run(tt.desc, func(t *testing.T) { action, err := tt.a.MarshalText() if want, got := tt.err, err; want != got { t.Fatalf("unexpected error:\n- want: %v\n- got: %v", want, got) } if err != nil { return } if want, got := tt.action, string(action); want != got { t.Fatalf("unexpected Action:\n- want: %q\n- got: %q", want, got) } }) } }
explode_data.jsonl/49519
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 460 }
[ 2830, 3393, 9860, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 3056, 1235, 341, 197, 41653, 256, 914, 198, 197, 11323, 414, 5586, 198, 197, 38933, 914, 198, 197, 9859, 262, 1465, 198, 197, 59403, 197, 197, 515, 298, 41653, 25, 256...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAcquireIfFirst(t *testing.T) { // GIVEN permitFunction := func() bool { return true } lock := state.NewLock(1) handler := NewLockHandler(lock, timeout, permitFunction) req, _ := http.NewRequest("GET", "/", nil) // WHEN rr := prepareResponseRecorder(req, handler) // THEN assertResponseStatusCode(http.StatusOK, rr.Code, t) }
explode_data.jsonl/66021
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 123 }
[ 2830, 3393, 11654, 984, 2679, 5338, 1155, 353, 8840, 836, 8, 341, 197, 322, 89836, 198, 197, 39681, 5152, 1669, 2915, 368, 1807, 341, 197, 853, 830, 198, 197, 532, 58871, 1669, 1584, 7121, 11989, 7, 16, 340, 53326, 1669, 1532, 11989, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeliveryServices(t *testing.T) { WithObjs(t, []TCObj{CDNs, Types, Tenants, Users, Parameters, Profiles, Statuses, Divisions, Regions, PhysLocations, CacheGroups, Servers, Topologies, ServerCapabilities, DeliveryServices}, func() { currentTime := time.Now().UTC().Add(-5 * time.Second) ti := currentTime.Format(time.RFC1123) var header http.Header header = make(map[string][]string) header.Set(rfc.IfModifiedSince, ti) header.Set(rfc.IfUnmodifiedSince, ti) if includeSystemTests { SSLDeliveryServiceCDNUpdateTest(t) } GetTestDeliveryServicesIMS(t) GetAccessibleToTest(t) UpdateTestDeliveryServices(t) UpdateValidateORGServerCacheGroup(t) UpdateTestDeliveryServicesWithHeaders(t, header) UpdateNullableTestDeliveryServices(t) UpdateDeliveryServiceWithInvalidRemapText(t) UpdateDeliveryServiceWithInvalidSliceRangeRequest(t) UpdateDeliveryServiceWithInvalidTopology(t) GetTestDeliveryServicesIMSAfterChange(t, header) UpdateDeliveryServiceTopologyHeaderRewriteFields(t) GetTestDeliveryServices(t) GetInactiveTestDeliveryServices(t) GetTestDeliveryServicesCapacity(t) DeliveryServiceMinorVersionsTest(t) DeliveryServiceTenancyTest(t) PostDeliveryServiceTest(t) header = make(map[string][]string) etag := rfc.ETag(currentTime) header.Set(rfc.IfMatch, etag) UpdateTestDeliveryServicesWithHeaders(t, header) }) }
explode_data.jsonl/36524
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 471 }
[ 2830, 3393, 38121, 11025, 1155, 353, 8840, 836, 8, 341, 197, 2354, 4121, 2519, 1155, 11, 3056, 7749, 5261, 90, 6484, 47360, 11, 20768, 11, 17695, 1783, 11, 14627, 11, 13522, 11, 71727, 11, 8104, 288, 11, 8765, 6805, 11, 77347, 11, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_Mock_Called_Unexpected(t *testing.T) { var mockedService = new(TestExampleImplementation) // make sure it panics if no expectation was made assert.Panics(t, func() { mockedService.Called(1, 2, 3) }, "Calling unexpected method should panic") }
explode_data.jsonl/8596
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 97 }
[ 2830, 3393, 1245, 1176, 920, 4736, 62, 29430, 1155, 353, 8840, 836, 8, 8022, 2405, 46149, 1860, 284, 501, 31159, 13314, 36850, 7229, 197, 322, 1281, 2704, 432, 7215, 1211, 421, 902, 30193, 572, 1865, 319, 6948, 1069, 276, 1211, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRateLimiter_AllowedAfterBlocked(t *testing.T) { // 10 requests per second limit limiter := New(10, time.Second) // a limit of 10 tokens should be allowed on a second for i := 0; i < 10; i++ { date := fmt.Sprintf("2000-01-01T00:00:00.%d00Z", i) mockClock(limiter, date) assert.True(t, limiter.Allow("pable")) } // the 11th token withing the same second, musn't be allowed. mockClock(limiter, "2000-01-01T00:00:00.900Z") // user is throttled assert.False(t, limiter.Allow("pable")) // one second after the first event, the session shoud have expired. // up to extra 10 have to be available again for i := 0; i < 10; i++ { date := fmt.Sprintf("2000-01-01T00:00:01.%d00Z", i) mockClock(limiter, date) assert.True(t, limiter.Allow("pable")) } }
explode_data.jsonl/74377
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 302 }
[ 2830, 3393, 11564, 43, 17700, 53629, 12817, 6025, 95847, 1155, 353, 8840, 836, 8, 341, 197, 322, 220, 16, 15, 7388, 817, 2086, 3930, 198, 197, 4659, 2015, 1669, 1532, 7, 16, 15, 11, 882, 32435, 692, 197, 322, 264, 3930, 315, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCheckAccessToDatabaseUser(t *testing.T) { dbStage, err := types.NewDatabaseServerV3("stage", map[string]string{"env": "stage"}, types.DatabaseServerSpecV3{ Protocol: "protocol", URI: "uri", Hostname: "hostname", HostID: "host_id", }) require.NoError(t, err) dbProd, err := types.NewDatabaseServerV3("prod", map[string]string{"env": "prod"}, types.DatabaseServerSpecV3{ Protocol: "protocol", URI: "uri", Hostname: "hostname", HostID: "host_id", }) require.NoError(t, err) roleDevStage := &types.RoleV4{ Metadata: types.Metadata{Name: "dev-stage", Namespace: defaults.Namespace}, Spec: types.RoleSpecV4{ Allow: types.RoleConditions{ Namespaces: []string{defaults.Namespace}, DatabaseLabels: types.Labels{"env": []string{"stage"}}, DatabaseUsers: []string{types.Wildcard}, }, Deny: types.RoleConditions{ Namespaces: []string{defaults.Namespace}, DatabaseUsers: []string{"superuser"}, }, }, } roleDevProd := &types.RoleV4{ Metadata: types.Metadata{Name: "dev-prod", Namespace: apidefaults.Namespace}, Spec: types.RoleSpecV4{ Allow: types.RoleConditions{ Namespaces: []string{apidefaults.Namespace}, DatabaseLabels: types.Labels{"env": []string{"prod"}}, DatabaseUsers: []string{"dev"}, }, }, } type access struct { server types.DatabaseServer dbUser string access bool } testCases := []struct { name string roles RoleSet access []access }{ { name: "developer allowed any username in stage database except superuser", roles: RoleSet{roleDevStage, roleDevProd}, access: []access{ {server: dbStage, dbUser: "superuser", access: false}, {server: dbStage, dbUser: "dev", access: true}, {server: dbStage, dbUser: "test", access: true}, }, }, { name: "developer allowed only specific username/database in prod database", roles: RoleSet{roleDevStage, roleDevProd}, access: []access{ {server: dbProd, dbUser: "superuser", access: false}, {server: dbProd, dbUser: "dev", access: true}, }, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { for _, access := range tc.access { err := tc.roles.CheckAccessToDatabase(access.server, AccessMFAParams{}, &DatabaseLabelsMatcher{Labels: access.server.GetAllLabels()}, &DatabaseUserMatcher{User: access.dbUser}) if access.access { require.NoError(t, err) } else { require.Error(t, err) require.True(t, trace.IsAccessDenied(err)) } } }) } }
explode_data.jsonl/40083
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1061 }
[ 2830, 3393, 3973, 6054, 1249, 5988, 1474, 1155, 353, 8840, 836, 8, 341, 20939, 19398, 11, 1848, 1669, 4494, 7121, 5988, 5475, 53, 18, 445, 20743, 756, 197, 19567, 14032, 30953, 4913, 3160, 788, 330, 20743, 7115, 197, 98785, 25008, 5475,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCreateVolumeFromSnapshotDBEntry(t *testing.T) { var in = &model.VolumeSpec{ BaseModel: &model.BaseModel{}, Name: "volume sample", Description: "This is a sample volume for testing", Size: int64(1), ProfileId: "3769855c-a102-11e7-b772-17b880d2f537", Status: model.VolumeCreating, SnapshotId: "3769855c-a102-11e7-b772-17b880d2f537", } var snap = &model.VolumeSnapshotSpec{ BaseModel: &model.BaseModel{ Id: "3769855c-a102-11e7-b772-17b880d2f537", }, Size: int64(1), Status: model.VolumeSnapAvailable, } t.Run("Everything should work well", func(t *testing.T) { mockClient := new(dbtest.Client) mockClient.On("CreateVolume", context.NewAdminContext(), in).Return(&SampleVolumes[1], nil) mockClient.On("GetVolumeSnapshot", context.NewAdminContext(), "3769855c-a102-11e7-b772-17b880d2f537").Return(snap, nil) db.C = mockClient var expected = &SampleVolumes[1] result, err := CreateVolumeDBEntry(context.NewAdminContext(), in) if err != nil { t.Errorf("failed to create volume with snapshot, err is %v\n", err) } assertTestResult(t, result, expected) }) t.Run("The status of volume snapshot should always be available", func(t *testing.T) { snap.Status = model.VolumeSnapError mockClient := new(dbtest.Client) mockClient.On("CreateVolume", context.NewAdminContext(), in).Return(&SampleVolumes[1], nil) mockClient.On("GetVolumeSnapshot", context.NewAdminContext(), "3769855c-a102-11e7-b772-17b880d2f537").Return(snap, nil) db.C = mockClient _, err := CreateVolumeDBEntry(context.NewAdminContext(), in) expectedError := "only if the snapshot is available, the volume can be created" assertTestResult(t, err.Error(), expectedError) }) t.Run("Size of volume should always be equal to or bigger than size of the snapshot", func(t *testing.T) { snap.Status, snap.Size = model.VolumeSnapAvailable, 10 mockClient := new(dbtest.Client) mockClient.On("CreateVolume", context.NewAdminContext(), in).Return(&SampleVolumes[1], nil) mockClient.On("GetVolumeSnapshot", context.NewAdminContext(), "3769855c-a102-11e7-b772-17b880d2f537").Return(snap, nil) db.C = mockClient _, err := CreateVolumeDBEntry(context.NewAdminContext(), in) expectedError := "size of volume must be equal to or bigger than size of the snapshot" assertTestResult(t, err.Error(), expectedError) }) }
explode_data.jsonl/29972
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 880 }
[ 2830, 3393, 4021, 18902, 3830, 15009, 3506, 5874, 1155, 353, 8840, 836, 8, 341, 2405, 304, 284, 609, 2528, 79106, 8327, 515, 197, 66732, 1712, 25, 256, 609, 2528, 13018, 1712, 38837, 197, 21297, 25, 286, 330, 25060, 6077, 756, 197, 47...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUnmarshalMap(t *testing.T) { n, err := Parse(exampleJSON) if err != nil || n == nil { t.Fatal(err) } m1, _ := n.Map() var m2 map[string]interface{} if err := json.Unmarshal([]byte(exampleJSON), &m2); err != nil { t.Fatal(err) } b1, err := json.Marshal(m1) if err != nil { t.Fatal(err) } b2, err := json.Marshal(m2) if err != nil { t.Fatal(err) } if !bytes.Equal(b1, b2) { t.Fatalf("b1 != b2\n b1: %v\nb2: %v", string(b1), string(b2)) } }
explode_data.jsonl/62348
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 296 }
[ 2830, 3393, 1806, 27121, 2227, 1155, 353, 8840, 836, 8, 341, 262, 308, 11, 1848, 1669, 14775, 66203, 5370, 340, 262, 421, 1848, 961, 2092, 1369, 308, 621, 2092, 341, 286, 259, 26133, 3964, 340, 262, 456, 262, 296, 16, 11, 716, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func Test_computeFloor(t *testing.T) { data := []struct { text string expected int expectedMessage string }{ {"(())", 0, ""}, {"()()", 0, ""}, {"(()(()(", 3, ""}, {"))(((((", 3, ""}, {")())())", -3, ""}, {"(X)", 0, "bad character"}, } for _, d := range data { floor, err := computeFloor(d.text) if floor != d.expected { t.Errorf("for '%s' expected %d but got %d", d.text, d.expected, floor) } actualMsg := "" if err != nil { actualMsg = err.Error() } if actualMsg != d.expectedMessage { t.Errorf("for '%s' expected error '%s' but got '%s'", d.text, d.expectedMessage, actualMsg) } } }
explode_data.jsonl/30717
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 298 }
[ 2830, 3393, 57028, 51444, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 3056, 1235, 341, 197, 15425, 310, 914, 198, 197, 42400, 286, 526, 198, 197, 42400, 2052, 914, 198, 197, 59403, 197, 197, 4913, 7, 2140, 497, 220, 15, 11, 77496, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestEverything(t *testing.T) { go server.Run() time.Sleep(time.Second * 2) paths := []string{ "sandbox/client1/", "sandbox/client2/", } ignores := make(map[string]bool) for _, value := range paths { err := boxtools.CleanTestFolder(value, ignores, true) if err != nil { panic("Could not delete folder contents") } go func(value string) { cmd := exec.Command( "go", "run", "client/client.go", value) cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr cmd.Run() }(value) } select {} }
explode_data.jsonl/15148
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 238 }
[ 2830, 3393, 34964, 1155, 353, 8840, 836, 8, 341, 30680, 3538, 16708, 741, 21957, 31586, 9730, 32435, 353, 220, 17, 340, 197, 21623, 1669, 3056, 917, 515, 197, 197, 40787, 31536, 25085, 16, 35075, 197, 197, 40787, 31536, 25085, 17, 35075...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUpdateOidcForSuspendedInstance(t *testing.T) { // given suite := NewBrokerSuiteTest(t) // uncomment to see graphql queries //suite.EnableDumpingProvisionerRequests() defer suite.TearDown() iid := uuid.New().String() resp := suite.CallAPI("PUT", fmt.Sprintf("oauth/cf-eu10/v2/service_instances/%s?accepts_incomplete=true&plan_id=7d55d31d-35ae-4438-bf13-6ffdfa107d9f&service_id=47c9dcbf-ff30-448e-ab36-d3bad66ba281", iid), `{ "service_id": "47c9dcbf-ff30-448e-ab36-d3bad66ba281", "plan_id": "7d55d31d-35ae-4438-bf13-6ffdfa107d9f", "context": { "sm_platform_credentials": { "url": "https://sm.url", "credentials": {} }, "globalaccount_id": "g-account-id", "subaccount_id": "sub-id", "user_id": "john.smith@email.com" }, "parameters": { "name": "testing-cluster", "oidc": { "clientID": "id-ooo", "signingAlgs": ["RSA256"], "issuerURL": "https://issuer.url.com" } } }`) opID := suite.DecodeOperationID(resp) suite.processProvisioningByOperationID(opID) suite.Log("*** Suspension ***") // Process Suspension // OSB context update (suspension) resp = suite.CallAPI("PATCH", fmt.Sprintf("oauth/cf-eu10/v2/service_instances/%s?accepts_incomplete=true", iid), `{ "service_id": "47c9dcbf-ff30-448e-ab36-d3bad66ba281", "plan_id": "7d55d31d-35ae-4438-bf13-6ffdfa107d9f", "context": { "globalaccount_id": "g-account-id", "user_id": "john.smith@email.com", "active": false } }`) assert.Equal(t, http.StatusOK, resp.StatusCode) suspensionOpID := suite.WaitForLastOperation(iid, domain.InProgress) suite.FinishDeprovisioningOperationByProvisioner(suspensionOpID) suite.WaitForOperationState(suspensionOpID, domain.Succeeded) // WHEN // OSB update suite.Log("*** Update ***") resp = suite.CallAPI("PATCH", fmt.Sprintf("oauth/cf-eu10/v2/service_instances/%s?accepts_incomplete=true", iid), `{ "service_id": "47c9dcbf-ff30-448e-ab36-d3bad66ba281", "plan_id": "7d55d31d-35ae-4438-bf13-6ffdfa107d9f", "context": { "globalaccount_id": "g-account-id", "user_id": "john.smith@email.com" }, "parameters": { "oidc": { "clientID": "id-oooxx", "signingAlgs": ["RSA256"], "issuerURL": "https://issuer.url.com" } } }`) assert.Equal(t, http.StatusAccepted, resp.StatusCode) updateOpID := suite.DecodeOperationID(resp) suite.WaitForOperationState(updateOpID, domain.Succeeded) // THEN instance := suite.GetInstance(iid) assert.Equal(t, "id-oooxx", instance.Parameters.Parameters.OIDC.ClientID) // Start unsuspension // OSB update (unsuspension) suite.Log("*** Update (unsuspension) ***") resp = suite.CallAPI("PATCH", fmt.Sprintf("oauth/cf-eu10/v2/service_instances/%s", iid), `{ "service_id": "47c9dcbf-ff30-448e-ab36-d3bad66ba281", "plan_id": "7d55d31d-35ae-4438-bf13-6ffdfa107d9f", "context": { "globalaccount_id": "g-account-id", "user_id": "john.smith@email.com", "active": true } }`) assert.Equal(t, http.StatusOK, resp.StatusCode) // WHEN suite.processProvisioningByInstanceID(iid) // THEN instance = suite.GetInstance(iid) assert.Equal(t, "id-oooxx", instance.Parameters.Parameters.OIDC.ClientID) input := suite.LastProvisionInput(iid) assert.Equal(t, "id-oooxx", input.ClusterConfig.GardenerConfig.OidcConfig.ClientID) }
explode_data.jsonl/8119
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1698 }
[ 2830, 3393, 4289, 46, 307, 66, 2461, 50, 66349, 2523, 1155, 353, 8840, 836, 8, 341, 197, 322, 2661, 198, 96572, 1669, 1532, 65545, 28000, 2271, 1155, 340, 197, 322, 62073, 311, 1490, 48865, 19556, 198, 197, 322, 49992, 32287, 51056, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClient_GetModifiedFilesPagination(t *testing.T) { respTemplate := ` { "pagelen": 1, "values": [ { "type": "diffstat", "status": "modified", "lines_removed": 1, "lines_added": 2, "old": { "path": "%s", "type": "commit_file", "links": { "self": { "href": "https://api.bitbucket.org/2.0/repositories/bitbucket/geordi/src/e1749643d655d7c7014001a6c0f58abaf42ad850/setup.py" } } }, "new": { "path": "%s", "type": "commit_file", "links": { "self": { "href": "https://api.bitbucket.org/2.0/repositories/bitbucket/geordi/src/d222fa235229c55dad20b190b0b571adf737d5a6/setup.py" } } } } ], "page": 1, "size": 1 ` firstResp := fmt.Sprintf(respTemplate, "file1.txt", "file2.txt") secondResp := fmt.Sprintf(respTemplate, "file2.txt", "file3.txt") var serverURL string testServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { switch r.RequestURI { // The first request should hit this URL. case "/2.0/repositories/owner/repo/pullrequests/1/diffstat": resp := firstResp + fmt.Sprintf(`,"next": "%s/2.0/repositories/owner/repo/pullrequests/1/diffstat?page=2"}`, serverURL) w.Write([]byte(resp)) // nolint: errcheck return // The second should hit this URL. case "/2.0/repositories/owner/repo/pullrequests/1/diffstat?page=2": w.Write([]byte(secondResp + "}")) // nolint: errcheck default: t.Errorf("got unexpected request at %q", r.RequestURI) http.Error(w, "not found", http.StatusNotFound) return } })) defer testServer.Close() serverURL = testServer.URL client := bitbucketcloud.NewClient(http.DefaultClient, "user", "pass", "runatlantis.io") client.BaseURL = testServer.URL files, err := client.GetModifiedFiles(models.Repo{ FullName: "owner/repo", Owner: "owner", Name: "repo", CloneURL: "", SanitizedCloneURL: "", VCSHost: models.VCSHost{ Type: models.BitbucketCloud, Hostname: "bitbucket.org", }, }, models.PullRequest{ Num: 1, }) Ok(t, err) Equals(t, []string{"file1.txt", "file2.txt", "file3.txt"}, files) }
explode_data.jsonl/53232
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1265 }
[ 2830, 3393, 2959, 13614, 19148, 10809, 44265, 1155, 353, 8840, 836, 8, 341, 34653, 7275, 1669, 22074, 515, 262, 330, 18419, 8398, 788, 220, 16, 345, 262, 330, 3661, 788, 2278, 286, 341, 310, 330, 1313, 788, 330, 13490, 9878, 756, 310,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUnmarshalErrors(t *testing.T) { invalidRecords := []string{ "ha", "ha\n", "ha:\n", "ha:_\n", "ha:+32\nma", "ha:+2\nmara", "ha:+los\nma", } // test error paths in UnmarshalRecord for _, s := range invalidRecords { _, err := UnmarshalRecord([]byte(s), nil) assert.Error(t, err, "s: '%s'", s) } }
explode_data.jsonl/44788
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 167 }
[ 2830, 3393, 1806, 27121, 13877, 1155, 353, 8840, 836, 8, 341, 197, 11808, 25876, 1669, 3056, 917, 515, 197, 197, 1, 4223, 756, 197, 197, 1, 4223, 1699, 756, 197, 197, 1, 4223, 7190, 77, 756, 197, 197, 1, 4223, 22035, 59, 77, 756, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIsPotentialMeshErrorResponse(t *testing.T) { for _, test := range []struct { statusCode int expect bool }{{ statusCode: 404, expect: false, }, { statusCode: 200, expect: false, }, { statusCode: 502, expect: false, }, { statusCode: 503, expect: true, }} { t.Run(fmt.Sprintf("statusCode=%d", test.statusCode), func(t *testing.T) { resp := &http.Response{ StatusCode: test.statusCode, } if got := IsPotentialMeshErrorResponse(resp); got != test.expect { t.Errorf("IsPotentialMeshErrorResponse({StatusCode: %d}) = %v, expected %v", resp.StatusCode, got, test.expect) } }) } }
explode_data.jsonl/58835
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 285 }
[ 2830, 3393, 3872, 95611, 14194, 55901, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 3056, 1235, 341, 197, 23847, 2078, 526, 198, 197, 24952, 257, 1807, 198, 197, 15170, 515, 197, 23847, 2078, 25, 220, 19, 15, 19, 345, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSessionPoolConfigValidation(t *testing.T) { t.Parallel() sc := testutil.NewMockCloudSpannerClient(t) for _, test := range []struct { spc SessionPoolConfig err error }{ { SessionPoolConfig{}, errNoRPCGetter(), }, { SessionPoolConfig{ getRPCClient: func() (sppb.SpannerClient, error) { return sc, nil }, MinOpened: 10, MaxOpened: 5, }, errMinOpenedGTMaxOpened(5, 10), }, } { if _, err := newSessionPool("mockdb", test.spc, nil); !testEqual(err, test.err) { t.Errorf("want %v, got %v", test.err, err) } } }
explode_data.jsonl/78691
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 267 }
[ 2830, 3393, 5283, 10551, 2648, 13799, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 29928, 1669, 1273, 1314, 7121, 11571, 16055, 12485, 1194, 2959, 1155, 340, 2023, 8358, 1273, 1669, 2088, 3056, 1235, 341, 197, 1903, 3992, 9164, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUserUpdateActive(t *testing.T) { th := Setup().InitBasic().InitSystemAdmin() Client := th.BasicClient SystemAdminClient := th.SystemAdminClient team := &model.Team{DisplayName: "Name", Name: "z-z-" + model.NewId() + "a", Email: "test@nowhere.com", Type: model.TEAM_OPEN} team = Client.Must(Client.CreateTeam(team)).Data.(*model.Team) team2 := &model.Team{DisplayName: "Name", Name: "z-z-" + model.NewId() + "a", Email: "test@nowhere.com", Type: model.TEAM_OPEN} team2 = Client.Must(Client.CreateTeam(team2)).Data.(*model.Team) Client.Logout() user := &model.User{Email: "success+" + model.NewId() + "@simulator.amazonses.com", Nickname: "Corey Hulen", Password: "passwd1"} user = Client.Must(Client.CreateUser(user, "")).Data.(*model.User) LinkUserToTeam(user, team) store.Must(app.Srv.Store.User().VerifyEmail(user.Id)) user2 := &model.User{Email: "success+" + model.NewId() + "@simulator.amazonses.com", Nickname: "Corey Hulen", Password: "passwd1"} user2 = Client.Must(Client.CreateUser(user2, "")).Data.(*model.User) LinkUserToTeam(user2, team) store.Must(app.Srv.Store.User().VerifyEmail(user2.Id)) if _, err := Client.UpdateActive(user.Id, false); err == nil { t.Fatal("Should have errored, not logged in") } Client.Login(user2.Email, "passwd1") Client.SetTeamId(team.Id) if _, err := Client.UpdateActive(user.Id, false); err == nil { t.Fatal("Should have errored, not admin") } Client.Must(Client.Logout()) user3 := &model.User{Email: "success+" + model.NewId() + "@simulator.amazonses.com", Nickname: "Corey Hulen", Password: "passwd1"} user3 = Client.Must(Client.CreateUser(user3, "")).Data.(*model.User) LinkUserToTeam(user2, team2) store.Must(app.Srv.Store.User().VerifyEmail(user3.Id)) Client.Login(user3.Email, "passwd1") Client.SetTeamId(team2.Id) if _, err := Client.UpdateActive(user.Id, false); err == nil { t.Fatal("Should have errored, not yourself") } Client.Login(user.Email, "passwd1") Client.SetTeamId(team.Id) if _, err := Client.UpdateActive("junk", false); err == nil { t.Fatal("Should have errored, bad id") } if _, err := Client.UpdateActive("12345678901234567890123456", false); err == nil { t.Fatal("Should have errored, bad id") } app.SetStatusOnline(user3.Id, "", false) if _, err := SystemAdminClient.UpdateActive(user3.Id, false); err != nil { t.Fatal(err) } if status, err := app.GetStatus(user3.Id); err != nil { t.Fatal(err) } else if status.Status != model.STATUS_OFFLINE { t.Fatal("status should have been set to offline") } }
explode_data.jsonl/13815
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 975 }
[ 2830, 3393, 1474, 4289, 5728, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1005, 3803, 15944, 1005, 3803, 2320, 7210, 741, 71724, 1669, 270, 48868, 2959, 198, 5816, 7210, 2959, 1669, 270, 16620, 7210, 2959, 271, 197, 9196, 1669, 60...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestDefaultProcessor_AnchorDocument(t *testing.T) { srv := &testingcommons.MockIdentityService{} dp := DefaultProcessor(srv, nil, nil, cfg).(defaultProcessor) ctxh := testingconfig.CreateAccountContext(t, cfg) self, err := contextutil.Account(ctxh) assert.NoError(t, err) sr := utils.RandomSlice(32) sig, err := self.SignMsg(sr) assert.NoError(t, err) // validations failed id := utils.RandomSlice(32) next := utils.RandomSlice(32) model := new(mockModel) model.On("ID").Return(id) model.On("CurrentVersion").Return(id) model.On("NextVersion").Return(next) model.On("CalculateSigningRoot").Return(sr, nil) model.On("Signatures").Return() model.On("CalculateDocumentRoot").Return(nil, errors.New("error")) model.sigs = append(model.sigs, sig) srv = &testingcommons.MockIdentityService{} srv.On("ValidateSignature", sig, sr).Return(nil).Once() dp.identityService = srv err = dp.AnchorDocument(ctxh, model) model.AssertExpectations(t) srv.AssertExpectations(t) assert.Error(t, err) assert.Contains(t, err.Error(), "pre anchor validation failed") // success model = new(mockModel) model.On("ID").Return(id) model.On("CurrentVersion").Return(id) model.On("NextVersion").Return(next) model.On("CalculateSigningRoot").Return(sr, nil) model.On("Signatures").Return() model.On("CalculateDocumentRoot").Return(utils.RandomSlice(32), nil) model.sigs = append(model.sigs, sig) srv = &testingcommons.MockIdentityService{} srv.On("ValidateSignature", sig, sr).Return(nil).Once() dp.identityService = srv repo := mockRepo{} ch := make(chan bool, 1) ch <- true repo.On("CommitAnchor", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(ch, nil).Once() dp.anchorRepository = repo err = dp.AnchorDocument(ctxh, model) model.AssertExpectations(t) srv.AssertExpectations(t) repo.AssertExpectations(t) assert.Nil(t, err) }
explode_data.jsonl/57869
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 718 }
[ 2830, 3393, 3675, 22946, 32699, 6150, 7524, 1155, 353, 8840, 836, 8, 341, 1903, 10553, 1669, 609, 8840, 52361, 24664, 18558, 1860, 16094, 55256, 1669, 7899, 22946, 1141, 10553, 11, 2092, 11, 2092, 11, 13286, 68615, 2258, 22946, 340, 20985...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPhp7AbstractClassMethod(t *testing.T) { src := `<? abstract class Foo{ public function bar(): void; }` expected := &node.Root{ Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 4, EndPos: 53, }, Stmts: []node.Node{ &stmt.Class{ Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 4, EndPos: 53, }, PhpDocComment: "", ClassName: &node.Identifier{ Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 19, EndPos: 21, }, Value: "Foo", }, Modifiers: []node.Node{ &node.Identifier{ Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 4, EndPos: 11, }, Value: "abstract", }, }, Stmts: []node.Node{ &stmt.ClassMethod{ Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 24, EndPos: 51, }, ReturnsRef: false, PhpDocComment: "", MethodName: &node.Identifier{ Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 40, EndPos: 42, }, Value: "bar", }, Modifiers: []node.Node{ &node.Identifier{ Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 24, EndPos: 29, }, Value: "public", }, }, ReturnType: &name.Name{ Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 47, EndPos: 50, }, Parts: []node.Node{ &name.NamePart{ Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 47, EndPos: 50, }, Value: "void", }, }, }, Stmt: &stmt.Nop{ Position: &position.Position{ StartLine: 1, EndLine: 1, StartPos: 51, EndPos: 51, }, }, }, }, }, }, } php7parser := php7.NewParser(bytes.NewBufferString(src), "test.php") php7parser.Parse() actual := php7parser.GetRootNode() assert.DeepEqual(t, expected, actual) }
explode_data.jsonl/65932
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1318 }
[ 2830, 3393, 50144, 22, 15949, 1957, 3523, 1155, 353, 8840, 836, 8, 341, 41144, 1669, 1565, 1316, 8115, 536, 33428, 90, 584, 729, 3619, 4555, 737, 26, 335, 19324, 42400, 1669, 609, 3509, 45345, 515, 197, 197, 3812, 25, 609, 3487, 21954...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestViewChannel(t *testing.T) { th := Setup().InitBasic().InitSystemAdmin() defer th.TearDown() Client := th.Client view := &model.ChannelView{ ChannelId: th.BasicChannel.Id, } viewResp, resp := Client.ViewChannel(th.BasicUser.Id, view) CheckNoError(t, resp) if viewResp.Status != "OK" { t.Fatal("should have passed") } channel, _ := th.App.GetChannel(th.BasicChannel.Id) if lastViewedAt := viewResp.LastViewedAtTimes[channel.Id]; lastViewedAt != channel.LastPostAt { t.Fatal("LastPostAt does not match returned LastViewedAt time") } view.PrevChannelId = th.BasicChannel.Id _, resp = Client.ViewChannel(th.BasicUser.Id, view) CheckNoError(t, resp) view.PrevChannelId = "" _, resp = Client.ViewChannel(th.BasicUser.Id, view) CheckNoError(t, resp) view.PrevChannelId = "junk" _, resp = Client.ViewChannel(th.BasicUser.Id, view) CheckNoError(t, resp) member, resp := Client.GetChannelMember(th.BasicChannel.Id, th.BasicUser.Id, "") CheckNoError(t, resp) channel, resp = Client.GetChannel(th.BasicChannel.Id, "") CheckNoError(t, resp) if member.MsgCount != channel.TotalMsgCount { t.Fatal("should match message counts") } if member.MentionCount != 0 { t.Fatal("should have no mentions") } _, resp = Client.ViewChannel("junk", view) CheckBadRequestStatus(t, resp) _, resp = Client.ViewChannel(th.BasicUser2.Id, view) CheckForbiddenStatus(t, resp) if r, err := Client.DoApiPost(fmt.Sprintf("/channels/members/%v/view", th.BasicUser.Id), "garbage"); err == nil { t.Fatal("should have errored") } else { if r.StatusCode != http.StatusBadRequest { t.Log("actual: " + strconv.Itoa(r.StatusCode)) t.Log("expected: " + strconv.Itoa(http.StatusBadRequest)) t.Fatal("wrong status code") } } Client.Logout() _, resp = Client.ViewChannel(th.BasicUser.Id, view) CheckUnauthorizedStatus(t, resp) _, resp = th.SystemAdminClient.ViewChannel(th.BasicUser.Id, view) CheckNoError(t, resp) }
explode_data.jsonl/65661
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 733 }
[ 2830, 3393, 851, 9629, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1005, 3803, 15944, 1005, 3803, 2320, 7210, 741, 16867, 270, 836, 682, 4454, 741, 71724, 1669, 270, 11716, 271, 36867, 1669, 609, 2528, 38716, 851, 515, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestContextRemove(t *testing.T) { t.Run("remove current", func(t *testing.T) { c := NewParallelE2eCLI(t, binDir) c.RunDockerCmd("context", "create", "test-context-rm", "--from", "default") res := c.RunDockerCmd("context", "use", "test-context-rm") res.Assert(t, icmd.Expected{Out: "test-context-rm"}) res = c.RunDockerOrExitError("context", "rm", "test-context-rm") res.Assert(t, icmd.Expected{ ExitCode: 1, Err: "cannot delete current context", }) }) t.Run("force remove current", func(t *testing.T) { c := NewParallelE2eCLI(t, binDir) c.RunDockerCmd("context", "create", "test-context-rmf") c.RunDockerCmd("context", "use", "test-context-rmf") res := c.RunDockerCmd("context", "rm", "-f", "test-context-rmf") res.Assert(t, icmd.Expected{Out: "test-context-rmf"}) res = c.RunDockerCmd("context", "ls") res.Assert(t, icmd.Expected{Out: "default *"}) }) }
explode_data.jsonl/5430
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 389 }
[ 2830, 3393, 1972, 13021, 1155, 353, 8840, 836, 8, 1476, 3244, 16708, 445, 5399, 1482, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 1444, 1669, 1532, 16547, 36, 17, 68, 63959, 1155, 11, 9544, 6184, 692, 197, 1444, 16708, 35, 13659, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServers(t *testing.T) { sc, c, cleanup := createContexts(t) defer cleanup() root, mts, jokes, stopper := createNamespace(t, sc) defer stopper() ns := v23.GetNamespace(c) ns.SetRoots(root.name) // Let's run some non-mount table services for _, j := range []string{j1MP, j2MP, j3MP} { testResolve(t, c, ns, j, jokes[j].name) knockKnock(t, c, j) globalName := naming.JoinAddressName(mts["mt4"].name, j) disp := &dispatcher{} gj := "g_" + j jokes[gj] = runServer(t, c, disp, globalName) testResolve(t, c, ns, "mt4/"+j, jokes[gj].name) knockKnock(t, c, "mt4/"+j) testResolveToMountTable(t, c, ns, "mt4/"+j, globalName) testResolveToMountTable(t, c, ns, "mt4/"+j+"/garbage", globalName+"/garbage") } }
explode_data.jsonl/8227
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 337 }
[ 2830, 3393, 78139, 1155, 353, 8840, 836, 8, 341, 29928, 11, 272, 11, 21290, 1669, 83674, 82, 1155, 340, 16867, 21290, 2822, 33698, 11, 296, 2576, 11, 31420, 11, 2936, 712, 1669, 1855, 22699, 1155, 11, 1136, 340, 16867, 2936, 712, 741,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestImportAbsPathAsFile(t *testing.T) { default_suite.expectBundled(t, bundled{ files: map[string]string{ "/Users/user/project/entry.js": ` import pkg from '/Users/user/project/node_modules/pkg/index' console.log(pkg) `, "/Users/user/project/node_modules/pkg/index.js": ` export default 123 `, }, entryPaths: []string{"/Users/user/project/entry.js"}, options: config.Options{ Mode: config.ModeBundle, AbsOutputDir: "/out", }, }) }
explode_data.jsonl/38530
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 210 }
[ 2830, 3393, 11511, 27778, 1820, 2121, 1703, 1155, 353, 8840, 836, 8, 341, 11940, 57239, 25952, 33, 1241, 832, 1155, 11, 51450, 515, 197, 74075, 25, 2415, 14032, 30953, 515, 298, 197, 3115, 7137, 11739, 40118, 14, 4085, 2857, 788, 22074,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContainsString(t *testing.T) { assert := require.New(t) input := []string{"one", "three", "five"} assert.True(ContainsString(input, "one")) assert.True(ContainsString(input, "three")) assert.True(ContainsString(input, "five")) assert.False(ContainsString(input, "One")) assert.False(ContainsString(input, "two")) assert.False(ContainsString(input, "Three")) assert.False(ContainsString(input, "four")) assert.False(ContainsString(input, "Five")) assert.False(ContainsString([]string{}, "one")) assert.False(ContainsString([]string{}, "two")) assert.False(ContainsString([]string{}, "")) }
explode_data.jsonl/45560
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 210 }
[ 2830, 3393, 23805, 703, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 1373, 7121, 1155, 692, 22427, 1669, 3056, 917, 4913, 603, 497, 330, 27856, 497, 330, 52670, 63159, 6948, 32443, 7, 23805, 703, 5384, 11, 330, 603, 5455, 6948, 32443, 7,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTerragruntBeforeOneArgAction(t *testing.T) { t.Parallel() cleanupTerraformFolder(t, TEST_FIXTURE_HOOKS_ONE_ARG_ACTION_PATH) tmpEnvPath := copyEnvironment(t, TEST_FIXTURE_HOOKS_ONE_ARG_ACTION_PATH) rootPath := util.JoinPath(tmpEnvPath, TEST_FIXTURE_HOOKS_ONE_ARG_ACTION_PATH) var ( stdout bytes.Buffer stderr bytes.Buffer ) err := runTerragruntCommand(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-working-dir %s --terragrunt-log-level debug", rootPath), &stdout, &stderr) output := stderr.String() if err != nil { t.Error("Expected successful execution of terragrunt with 1 before hook execution.") } else { assert.Contains(t, output, "Running command: date") } }
explode_data.jsonl/10070
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 285 }
[ 2830, 3393, 51402, 68305, 3850, 10227, 3966, 2735, 2512, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 1444, 60639, 51, 13886, 627, 13682, 1155, 11, 13602, 42635, 41486, 82251, 50, 34727, 13905, 21866, 7944, 340, 20082, 14359, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRPCStream_Concurrency(t *testing.T) { buffer := new(safeBuffer) codec := protoCodec.NewCodec(buffer) streamServer := rpcStream{ codec: codec, request: &rpcRequest{ codec: codec, }, } var wg sync.WaitGroup // Check if race conditions happen for i := 0; i < 10; i++ { wg.Add(2) go func() { for i := 0; i < 50; i++ { msg := protoStruct{Payload: "test"} <-time.After(time.Duration(rand.Intn(50)) * time.Millisecond) if err := streamServer.Send(msg); err != nil { t.Errorf("Unexpected Send error: %s", err) } } wg.Done() }() go func() { for i := 0; i < 50; i++ { <-time.After(time.Duration(rand.Intn(50)) * time.Millisecond) if err := streamServer.Recv(&protoStruct{}); err != nil { t.Errorf("Unexpected Recv error: %s", err) } } wg.Done() }() } wg.Wait() }
explode_data.jsonl/62488
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 391 }
[ 2830, 3393, 29528, 3027, 15100, 15973, 1155, 353, 8840, 836, 8, 341, 31122, 1669, 501, 1141, 5645, 4095, 340, 43343, 66, 1669, 18433, 36913, 7121, 36913, 12584, 340, 44440, 5475, 1669, 35596, 3027, 515, 197, 43343, 66, 25, 34647, 345, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAccDynatraceAlertingProfile_basic(t *testing.T) { rName := fmt.Sprintf("tf-acc-test-%s.dynatrace", acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum)) name := fmt.Sprintf("%s", rName) resourceName := "dynatrace_alerting_profile.test" resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, IDRefreshName: resourceName, ProviderFactories: testAccProviderFactories, CheckDestroy: testAccCheckDynatraceAlertingProfileDestroy, Steps: []resource.TestStep{ { Config: testAccDynatraceAlertingProfileConfig(name), Check: resource.ComposeAggregateTestCheckFunc( testAccCheckDynatraceAlertingProfileExists(resourceName), resource.TestCheckResourceAttr(resourceName, "display_name", name), resource.TestCheckResourceAttrSet(resourceName, "display_name"), ), }, { Config: testAccDynatraceAlertingProfileConfigModified(name), Check: resource.ComposeAggregateTestCheckFunc( testAccCheckDynatraceAlertingProfileExists(resourceName), resource.TestCheckResourceAttr(resourceName, "display_name", name), resource.TestCheckResourceAttrSet(resourceName, "display_name"), resource.TestCheckResourceAttrSet(resourceName, "rule.0.severity_level"), resource.TestCheckResourceAttrSet(resourceName, "rule.0.tag_filters.0.include_mode"), resource.TestCheckResourceAttrSet(resourceName, "event_type_filter.0.predefined_event_filter.0.negate"), resource.TestCheckResourceAttr(resourceName, "rule.0.severity_level", "ERROR"), ), }, { ResourceName: resourceName, ImportState: true, ImportStateVerify: true, }, }, }) }
explode_data.jsonl/38969
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 642 }
[ 2830, 3393, 14603, 95709, 266, 41183, 9676, 287, 8526, 34729, 1155, 353, 8840, 836, 8, 341, 7000, 675, 1669, 8879, 17305, 445, 8935, 12, 4475, 16839, 11069, 82, 950, 1872, 266, 41183, 497, 1613, 67880, 2013, 437, 703, 3830, 4768, 1649, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConvertComponentNameToPath(t *testing.T) { val := convertComponentNameToPath("azure.servicebus") assert.Equal(t, "azure/servicebus", val) val = convertComponentNameToPath("a.b.c") assert.Equal(t, "a/b/c", val) val = convertComponentNameToPath("redis") assert.Equal(t, "redis", val) }
explode_data.jsonl/36228
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 12012, 2189, 675, 1249, 1820, 1155, 353, 8840, 836, 8, 341, 19302, 1669, 5508, 2189, 675, 1249, 1820, 445, 39495, 5736, 10338, 1138, 6948, 12808, 1155, 11, 330, 39495, 34186, 10338, 497, 1044, 340, 19302, 284, 5508, 2189, 67...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewBundlerMissingCA(t *testing.T) { badFile := "testdata/no_such_file.pem" _, err := NewBundler(badFile, testIntCaBundle) if err == nil { t.Fatal("Should fail with error code 4001") } // generate a function checking error content errorCheck := ExpectErrorMessage(`"code":4001`) errorCheck(t, err) }
explode_data.jsonl/36876
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 117 }
[ 2830, 3393, 3564, 33, 85365, 25080, 5049, 1155, 353, 8840, 836, 8, 341, 2233, 329, 1703, 1669, 330, 92425, 33100, 643, 1387, 2458, 49373, 698, 197, 6878, 1848, 1669, 1532, 33, 85365, 1883, 329, 1703, 11, 1273, 1072, 22571, 8409, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetUserResponseValuePass(t *testing.T) { assert := assert.New(t) pn := NewPubNub(NewDemoConfig()) opts := &getUserOpts{ pubnub: pn, } jsonBytes := []byte(`{"status":200,"data":{"id":"id0","name":"name","externalId":"extid","profileUrl":"purl","email":"email","custom":{"a":"b","c":"d"},"created":"2019-08-20T13:26:19.140324Z","updated":"2019-08-20T13:26:19.140324Z","eTag":"AbyT4v2p6K7fpQE"}}`) r, _, err := newPNGetUserResponse(jsonBytes, opts, StatusResponse{}) assert.Equal(200, r.Status) assert.Equal("id0", r.Data.ID) assert.Equal("name", r.Data.Name) assert.Equal("extid", r.Data.ExternalID) assert.Equal("purl", r.Data.ProfileURL) assert.Equal("email", r.Data.Email) assert.Equal("2019-08-20T13:26:19.140324Z", r.Data.Created) assert.Equal("2019-08-20T13:26:19.140324Z", r.Data.Updated) assert.Equal("AbyT4v2p6K7fpQE", r.Data.ETag) assert.Equal("b", r.Data.Custom["a"]) assert.Equal("d", r.Data.Custom["c"]) assert.Nil(err) }
explode_data.jsonl/81824
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 424 }
[ 2830, 3393, 1949, 1474, 2582, 1130, 12187, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 3223, 77, 1669, 1532, 29162, 45, 392, 35063, 37413, 2648, 2398, 64734, 1669, 609, 28440, 43451, 515, 197, 62529, 77, 392, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTxnEventMarshalWithDatastoreExternal(t *testing.T) { e := sampleTxnEvent e.DatastoreExternalTotals = DatastoreExternalTotals{ externalCallCount: 22, externalDuration: 1122334 * time.Millisecond, datastoreCallCount: 33, datastoreDuration: 5566778 * time.Millisecond, } testTxnEventJSON(t, &e, `[ { "type":"Transaction", "name":"myName", "timestamp":1.488393111e+09, "error":false, "duration":2, "externalCallCount":22, "externalDuration":1122.334, "databaseCallCount":33, "databaseDuration":5566.778, "totalTime":3, "guid":"txn-id", "traceId":"txn-id", "priority":0.500000, "sampled":false }, {}, {}]`) }
explode_data.jsonl/37459
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 288 }
[ 2830, 3393, 31584, 77, 1556, 55438, 80356, 4314, 25913, 1155, 353, 8840, 836, 8, 341, 7727, 1669, 6077, 31584, 77, 1556, 198, 7727, 3336, 4314, 25913, 64602, 1127, 284, 2885, 4314, 25913, 64602, 1127, 515, 197, 197, 20921, 7220, 2507, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_decode_byte_array_from_array(t *testing.T) { should := require.New(t) data := []byte{} err := json.Unmarshal([]byte(`[1,2,3]`), &data) should.Nil(err) should.Equal([]byte{1, 2, 3}, data) err = jsoner.DefaultAPI().Unmarshal([]byte(`[1,2,3]`), &data) should.Nil(err) should.Equal([]byte{1, 2, 3}, data) }
explode_data.jsonl/57887
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 15227, 19737, 3858, 5673, 3858, 1155, 353, 8840, 836, 8, 341, 197, 5445, 1669, 1373, 7121, 1155, 340, 8924, 1669, 3056, 3782, 16094, 9859, 1669, 2951, 38097, 10556, 3782, 5809, 58, 16, 11, 17, 11, 18, 60, 63, 701, 609, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPodSpecWithNilStorageCluster(t *testing.T) { var cluster *corev1.StorageCluster driver := portworx{} nodeName := "testNode" _, err := driver.GetStoragePodSpec(cluster, nodeName) assert.Error(t, err, "Expected an error on GetStoragePodSpec") }
explode_data.jsonl/55453
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 91 }
[ 2830, 3393, 23527, 8327, 2354, 19064, 5793, 28678, 1155, 353, 8840, 836, 8, 341, 2405, 10652, 353, 98645, 16, 43771, 28678, 198, 33652, 1669, 2635, 49710, 87, 16094, 20831, 675, 1669, 330, 1944, 1955, 1837, 197, 6878, 1848, 1669, 5579, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateVLANs(t *testing.T) { t.Parallel() Convey("validateVLANs", t, func() { context := &validation.Context{Context: context.Background()} Convey("empty config", func() { vlans := &config.VLANs{} validateVLANs(context, vlans) So(context.Finalize(), ShouldBeNil) }) Convey("missing VLAN ID", func() { vlans := &config.VLANs{ Vlan: []*config.VLAN{ { CidrBlock: "127.0.0.1/32", }, }, } validateVLANs(context, vlans) So(context.Finalize(), ShouldErrLike, "must be positive") }) Convey("negative VLAN ID", func() { vlans := &config.VLANs{ Vlan: []*config.VLAN{ { Id: -1, CidrBlock: "127.0.0.1/32", }, }, } validateVLANs(context, vlans) So(context.Finalize(), ShouldErrLike, "must be positive") }) Convey("zero VLAN ID", func() { vlans := &config.VLANs{ Vlan: []*config.VLAN{ { Id: 0, CidrBlock: "127.0.0.1/32", }, }, } validateVLANs(context, vlans) So(context.Finalize(), ShouldErrLike, "must be positive") }) Convey("excessive VLAN ID", func() { vlans := &config.VLANs{ Vlan: []*config.VLAN{ { Id: 65536, CidrBlock: "127.0.0.1/32", }, }, } validateVLANs(context, vlans) So(context.Finalize(), ShouldErrLike, "must not exceed 65535") }) Convey("duplicate VLAN", func() { vlans := &config.VLANs{ Vlan: []*config.VLAN{ { Id: 1, CidrBlock: "127.0.0.1/32", }, { Id: 2, CidrBlock: "127.0.0.1/32", }, { Id: 1, CidrBlock: "127.0.0.1/32", }, }, } validateVLANs(context, vlans) So(context.Finalize(), ShouldErrLike, "duplicate VLAN") }) Convey("invalid CIDR block", func() { vlans := &config.VLANs{ Vlan: []*config.VLAN{ { Id: 1, CidrBlock: "512.0.0.1/128", }, }, } validateVLANs(context, vlans) So(context.Finalize(), ShouldErrLike, "invalid CIDR block") }) Convey("CIDR block too long", func() { vlans := &config.VLANs{ Vlan: []*config.VLAN{ { Id: 1, CidrBlock: "127.0.0.1/1", }, }, } validateVLANs(context, vlans) So(context.Finalize(), ShouldErrLike, "CIDR block suffix must be at least 18") }) Convey("ok", func() { vlans := &config.VLANs{ Vlan: []*config.VLAN{ { Id: 1, CidrBlock: "127.0.0.1/32", State: common.State_SERVING, }, { Id: 2, CidrBlock: "127.0.0.1/32", State: common.State_PRERELEASE, }, }, } validateVLANs(context, vlans) So(context.Finalize(), ShouldBeNil) }) }) }
explode_data.jsonl/64111
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1527 }
[ 2830, 3393, 17926, 53, 23408, 82, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 93070, 5617, 445, 7067, 53, 23408, 82, 497, 259, 11, 2915, 368, 341, 197, 28413, 1669, 609, 12284, 9328, 90, 1972, 25, 2266, 19047, 368, 630, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDatabase_compress(t *testing.T) { // setup tests tests := []struct { name string failure bool level int data []byte want []byte }{ { name: "compression level -1", failure: false, level: constants.CompressionNegOne, data: []byte("foo"), want: []byte{120, 156, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, }, { name: "compression level 0", failure: false, level: constants.CompressionZero, data: []byte("foo"), want: []byte{120, 1, 0, 3, 0, 252, 255, 102, 111, 111, 1, 0, 0, 255, 255, 2, 130, 1, 69}, }, { name: "compression level 1", failure: false, level: constants.CompressionOne, data: []byte("foo"), want: []byte{120, 1, 0, 3, 0, 252, 255, 102, 111, 111, 1, 0, 0, 255, 255, 2, 130, 1, 69}, }, { name: "compression level 2", failure: false, level: constants.CompressionTwo, data: []byte("foo"), want: []byte{120, 94, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, }, { name: "compression level 3", failure: false, level: constants.CompressionThree, data: []byte("foo"), want: []byte{120, 94, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, }, { name: "compression level 4", failure: false, level: constants.CompressionFour, data: []byte("foo"), want: []byte{120, 94, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, }, { name: "compression level 5", failure: false, level: constants.CompressionFive, data: []byte("foo"), want: []byte{120, 94, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, }, { name: "compression level 6", failure: false, level: constants.CompressionSix, data: []byte("foo"), want: []byte{120, 156, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, }, { name: "compression level 7", failure: false, level: constants.CompressionSeven, data: []byte("foo"), want: []byte{120, 218, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, }, { name: "compression level 8", failure: false, level: constants.CompressionEight, data: []byte("foo"), want: []byte{120, 218, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, }, { name: "compression level 9", failure: false, level: constants.CompressionNine, data: []byte("foo"), want: []byte{120, 218, 74, 203, 207, 7, 4, 0, 0, 255, 255, 2, 130, 1, 69}, }, } // run tests for _, test := range tests { t.Run(test.name, func(t *testing.T) { got, err := compress(test.level, test.data) if test.failure { if err == nil { t.Errorf("compress for %s should have returned err", test.name) } return } if err != nil { t.Errorf("compress for %s returned err: %v", test.name, err) } if !reflect.DeepEqual(got, test.want) { t.Errorf("compress for %s is %v, want %v", test.name, string(got), string(test.want)) } }) } }
explode_data.jsonl/80953
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1437 }
[ 2830, 3393, 5988, 87845, 1155, 353, 8840, 836, 8, 341, 197, 322, 6505, 7032, 198, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 1166, 9373, 1807, 198, 197, 53743, 256, 526, 198, 197, 8924, 262, 3056, 3782, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestMemberList(t *testing.T) { h := newHelper(t) helpers.AllowMe(h, types.RoleRbacResource(0), "read") r := h.repoMakeRole(h.randEmail()) h.createRoleMember(id.Next(), r.ID) h.createRoleMember(id.Next(), r.ID) h.apiInit(). Get(fmt.Sprintf("/roles/%d/members", r.ID)). Expect(t). Status(http.StatusOK). Assert(helpers.AssertNoErrors). Assert(jsonpath.Len(`$.response`, 2)). End() }
explode_data.jsonl/8349
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 180 }
[ 2830, 3393, 9366, 852, 1155, 353, 8840, 836, 8, 341, 9598, 1669, 501, 5511, 1155, 340, 197, 21723, 29081, 7823, 3203, 11, 4494, 35955, 49, 55877, 4783, 7, 15, 701, 330, 878, 5130, 7000, 1669, 305, 46169, 8078, 9030, 3203, 15506, 4781,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMacServiceImpl_FindByURL(t *testing.T) { a := assert.New(t) ctrl := gomock.NewController(t) defer ctrl.Finish() mockMr := database.NewMockMacRepository(ctrl) expected := &model.Mac{} url := "https://apple.com" { // success mpi := NewMacServiceImpl(mockMr) if mpi == nil { t.FailNow() } mockMr.EXPECT().FindByURL(url).Return(expected, nil) actual, err := mpi.FindByURL(url) a.Equal(expected, actual) a.NoError(err) } { // failed mpi := NewMacServiceImpl(mockMr) if mpi == nil { t.FailNow() } mockMr.EXPECT().FindByURL(url).Return(nil, fmt.Errorf("error")) actual, err := mpi.FindByURL(url) a.Nil(actual) a.Error(err) } }
explode_data.jsonl/55037
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 307 }
[ 2830, 3393, 19552, 50603, 95245, 1359, 3144, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 2060, 7121, 1155, 340, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 741, 77333, 12275, 1669, 4625, 7121, 11571, 19552...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAWSControllerIdentityController(t *testing.T) { t.Run("should create AWSClusterControllerIdentity when identityRef is not specified", func(t *testing.T) { g := NewWithT(t) ctx := context.Background() instance := &infrav1.AWSCluster{ObjectMeta: metav1.ObjectMeta{Name: "foo", Namespace: "default"}} instance.Default() // Create the AWSCluster object and expect the Reconcile and Deployment to be created g.Expect(testEnv.Create(ctx, instance)).To(Succeed()) t.Log("Ensuring AWSClusterControllerIdentity instance is created") g.Eventually(func() bool { cp := &infrav1.AWSClusterControllerIdentity{} key := client.ObjectKey{ Name: infrav1.AWSClusterControllerIdentityName, } err := testEnv.Get(ctx, key, cp) if err != nil { return false } if reflect.DeepEqual(*cp.Spec.AllowedNamespaces, infrav1.AllowedNamespaces{}) { return true } return false }, 10*time.Second).Should(Equal(true)) }) }
explode_data.jsonl/20378
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 353 }
[ 2830, 3393, 36136, 2051, 18558, 2051, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 5445, 1855, 33338, 3540, 75, 4993, 2051, 18558, 979, 9569, 3945, 374, 537, 5189, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 3174, 1669, 1532, 235...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPrintLogLine(t *testing.T) { testutil.Run(t, "verify lines are not intermixed", func(t *testutil.T) { var buf bytes.Buffer logger := &LogAggregator{ output: &buf, } var wg sync.WaitGroup for i := 0; i < 5; i++ { wg.Add(1) go func() { for i := 0; i < 100; i++ { logger.printLogLine(color.Default, "PREFIX", "TEXT\n") } wg.Done() }() } wg.Wait() lines := strings.Split(buf.String(), "\n") for i := 0; i < 5*100; i++ { t.CheckDeepEqual("PREFIX TEXT", lines[i]) } }) }
explode_data.jsonl/33816
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 259 }
[ 2830, 3393, 8994, 2201, 2460, 1155, 353, 8840, 836, 8, 341, 18185, 1314, 16708, 1155, 11, 330, 12446, 5128, 525, 537, 946, 56685, 497, 2915, 1155, 353, 1944, 1314, 836, 8, 341, 197, 2405, 6607, 5820, 22622, 271, 197, 17060, 1669, 609,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNavigation_EnterDoesNotAddSpaceAtStartOfLine(t *testing.T) { f, cleanup := setupNav() defer cleanup() feedInput(f.TTYCtrl, "put [\n") f.TTYCtrl.Inject(term.K('N', ui.Ctrl)) // begin navigation mode f.TTYCtrl.Inject(term.K(ui.Enter)) // insert the "a" file name f.TestTTY(t, filepath.Join("~", "d"), "> ", "put [", Styles, "vvv b", "\n", " a", term.DotHere, ) }
explode_data.jsonl/49814
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 178 }
[ 2830, 3393, 16399, 91105, 21468, 2623, 2212, 9914, 1655, 3479, 2124, 2460, 1155, 353, 8840, 836, 8, 341, 1166, 11, 21290, 1669, 6505, 10096, 741, 16867, 21290, 741, 1166, 12051, 2505, 955, 836, 22098, 15001, 11, 330, 628, 93815, 77, 113...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetPublicKey(t *testing.T) { k := newKMS(t, mockstorage.NewMockStoreProvider()) t.Run("successfully getting public key by id", func(t *testing.T) { prov := protocol.MockProvider{CustomKMS: k} ctx := getContext(t, &prov, kms.ED25519Type, kms.X25519ECDHKWType, transport.MediaTypeRFC0019EncryptedEnvelope) doc, err := ctx.vdRegistry.Create(testMethod, nil) require.NoError(t, err) pubkey, ok := diddoc.LookupPublicKey(doc.DIDDocument.VerificationMethod[0].ID, doc.DIDDocument) require.True(t, ok) require.NotNil(t, pubkey) }) t.Run("failed to get public key", func(t *testing.T) { prov := protocol.MockProvider{CustomKMS: k} ctx := getContext(t, &prov, kms.ED25519Type, kms.X25519ECDHKWType, transport.MediaTypeRFC0019EncryptedEnvelope) doc, err := ctx.vdRegistry.Create(testMethod, nil) require.NoError(t, err) pubkey, ok := diddoc.LookupPublicKey("invalid-key", doc.DIDDocument) require.False(t, ok) require.Nil(t, pubkey) }) }
explode_data.jsonl/2108
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 385 }
[ 2830, 3393, 1949, 61822, 1155, 353, 8840, 836, 8, 341, 16463, 1669, 501, 42, 4826, 1155, 11, 7860, 16172, 7121, 11571, 6093, 5179, 2398, 3244, 16708, 445, 60505, 3709, 584, 1376, 553, 877, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_fsSource_Close_NothingToClose(t *testing.T) { fsys := os.DirFS("test") f := &fsSource{ fsys: fsys, } err := f.Close() if err != nil { t.Fatalf("unable to close source: %v", err) } }
explode_data.jsonl/81879
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 90 }
[ 2830, 3393, 34470, 3608, 68185, 36989, 1596, 1249, 7925, 1155, 353, 8840, 836, 8, 341, 1166, 7791, 1669, 2643, 83757, 8485, 445, 1944, 1138, 1166, 1669, 609, 3848, 3608, 515, 197, 1166, 7791, 25, 282, 7791, 345, 197, 630, 9859, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCreatePipelineSetPipelineJobsFail(t *testing.T) { tmp, _ := ioutil.TempDir("", "TestCreatePipelineSetPipelineJobsFail") gaia.Cfg = new(gaia.Config) gaia.Cfg.HomePath = tmp gaia.Cfg.PipelinePath = tmp buf := new(bytes.Buffer) gaia.Cfg.Logger = hclog.New(&hclog.LoggerOptions{ Level: hclog.Trace, Output: buf, Name: "Gaia", }) mcp := new(mockCreatePipelineStore) services.MockStorageService(mcp) defer func() { services.MockStorageService(nil) }() cp := new(gaia.CreatePipeline) cp.Pipeline.Name = "test" cp.Pipeline.Type = gaia.PTypeGolang cp.Pipeline.Repo = &gaia.GitRepo{URL: "https://github.com/gaia-pipeline/pipeline-test"} pipelineService := NewGaiaPipelineService(Dependencies{ Scheduler: &mockScheduleService{ err: errors.New("error"), }, }) pipelineService.CreatePipeline(cp) if !strings.Contains(cp.Output, "cannot validate pipeline") { t.Fatalf("error thrown should contain 'cannot validate pipeline' but its %s", cp.Output) } }
explode_data.jsonl/13143
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 409 }
[ 2830, 3393, 4021, 34656, 1649, 34656, 40667, 19524, 1155, 353, 8840, 836, 8, 341, 20082, 11, 716, 1669, 43144, 65009, 6184, 19814, 330, 2271, 4021, 34656, 1649, 34656, 40667, 19524, 1138, 3174, 64, 685, 727, 4817, 284, 501, 3268, 64, 68...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStatusCodeFilter_SetMatcher(t *testing.T) { tests := []struct { name string matcher Matcher wantErr bool }{ {"happy", NewHTTPStatusMatcher(), false}, {"nil", nil, false}, {"sad matcher", &yesMatcher{}, true}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { f := &StatusCodeFilter{} if err := f.SetMatcher(tt.matcher); (err != nil) != tt.wantErr { t.Errorf("SetMatcher() error = %v, wantErr %v", err, tt.wantErr) } }) } }
explode_data.jsonl/61140
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 215 }
[ 2830, 3393, 15872, 5632, 14812, 37554, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 2109, 28058, 60632, 198, 197, 50780, 7747, 1807, 198, 197, 59403, 197, 197, 4913, 56521, 497, 1532, 9230,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCheckBadCardinality(t *testing.T) { tests := []struct { body string exp []types.Type }{ { body: "plus(1)", exp: []types.Type{types.N}, }, { body: "plus(1, 2, 3, 4)", exp: []types.Type{types.N, types.N, types.N, types.N}, }, } for _, test := range tests { body := MustParseBody(test.body) tc := newTypeChecker() env := tc.Env(BuiltinMap) _, err := tc.CheckBody(env, body) if len(err) != 1 || err[0].Code != TypeErr { t.Fatalf("Expected 1 type error from %v but got: %v", body, err) } detail, ok := err[0].Details.(*ArgErrDetail) if !ok { t.Fatalf("Expected argument error details but got: %v", err) } if len(test.exp) != len(detail.Have) { t.Fatalf("Expected arg types %v but got: %v", test.exp, detail.Have) } for i := range test.exp { if types.Compare(test.exp[i], detail.Have[i]) != 0 { t.Fatalf("Expected types for %v to be %v but got: %v", body[0], test.exp, detail.Have) } } } }
explode_data.jsonl/14561
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 437 }
[ 2830, 3393, 3973, 17082, 5770, 80777, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 35402, 914, 198, 197, 48558, 220, 3056, 9242, 10184, 198, 197, 59403, 197, 197, 515, 298, 35402, 25, 330, 7138, 7, 16, 15752, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestListRecipientTransferSchedules_Network(t *testing.T) { testutil.Require(t, "network") client := testutil.NewTestClient(t) var schds omise.ScheduleList list := ListRecipientTransferSchedules{ RecipientID: "reci_1234", List: List{ Limit: 100, From: time.Date(2017, 5, 16, 0, 0, 0, 0, time.Local), }, } client.MustDo(&schds, &list) t.Logf("Schedules Len: %d\n", len(schds.Data)) t.Logf("%#v\n", schds) }
explode_data.jsonl/19943
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 187 }
[ 2830, 3393, 852, 74432, 21970, 50, 49613, 1604, 2349, 1155, 353, 8840, 836, 8, 341, 18185, 1314, 81288, 1155, 11, 330, 17511, 1138, 25291, 1669, 1273, 1314, 7121, 2271, 2959, 1155, 340, 2405, 5699, 5356, 7861, 1064, 87081, 852, 198, 144...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExtract(t *testing.T) { type args struct { newSecret []byte currentSecret []byte } tests := []struct { name string args args want []byte }{ { `extract secret "early"`, args{ nil, nil, }, parseVector(`secret (32 octets): 33 ad 0a 1c 60 7e c0 3b 09 e6 cd 98 93 68 0c e2 10 ad f3 00 aa 1f 26 60 e1 b2 2e 10 f1 70 f9 2a`), }, { `extract secret "master"`, args{ nil, parseVector(`salt (32 octets): 43 de 77 e0 c7 77 13 85 9a 94 4d b9 db 25 90 b5 31 90 a6 5b 3e e2 e4 f1 2d d7 a0 bb 7c e2 54 b4`), }, parseVector(`secret (32 octets): 18 df 06 84 3d 13 a0 8b f2 a4 49 84 4c 5f 8a 47 80 01 bc 4d 4c 62 79 84 d5 a4 1d a8 d0 40 29 19`), }, { `extract secret "handshake"`, args{ parseVector(`IKM (32 octets): 8b d4 05 4f b5 5b 9d 63 fd fb ac f9 f0 4b 9f 0d 35 e6 d6 3f 53 75 63 ef d4 62 72 90 0f 89 49 2d`), parseVector(`salt (32 octets): 6f 26 15 a1 08 c7 02 c5 67 8f 54 fc 9d ba b6 97 16 c0 76 18 9c 48 25 0c eb ea c3 57 6c 36 11 ba`), }, parseVector(`secret (32 octets): 1d c8 26 e9 36 06 aa 6f dc 0a ad c1 2f 74 1b 01 04 6a a6 b9 9f 69 1e d2 21 a9 f0 ca 04 3f be ac`), }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { c := cipherSuitesTLS13[0] if got := c.extract(tt.args.newSecret, tt.args.currentSecret); !bytes.Equal(got, tt.want) { t.Errorf("cipherSuiteTLS13.extract() = % x, want % x", got, tt.want) } }) } }
explode_data.jsonl/71999
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 750 }
[ 2830, 3393, 28959, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 8638, 19773, 257, 3056, 3782, 198, 197, 20121, 19773, 3056, 3782, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestXfrmPolicyFlush(t *testing.T) { setUpNetlinkTest(t)() p1 := getPolicy() if err := XfrmPolicyAdd(p1); err != nil { t.Fatal(err) } p1.Dir = XFRM_DIR_IN s := p1.Src p1.Src = p1.Dst p1.Dst = s if err := XfrmPolicyAdd(p1); err != nil { t.Fatal(err) } policies, err := XfrmPolicyList(FAMILY_ALL) if err != nil { t.Fatal(err) } if len(policies) != 2 { t.Fatalf("unexpected number of policies: %d", len(policies)) } if err := XfrmPolicyFlush(); err != nil { t.Fatal(err) } policies, err = XfrmPolicyList(FAMILY_ALL) if err != nil { t.Fatal(err) } if len(policies) != 0 { t.Fatalf("unexpected number of policies: %d", len(policies)) } }
explode_data.jsonl/30737
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 330 }
[ 2830, 3393, 55, 31636, 13825, 46874, 1155, 353, 8840, 836, 8, 341, 8196, 2324, 6954, 2080, 2271, 1155, 8, 2822, 3223, 16, 1669, 633, 13825, 741, 743, 1848, 1669, 1599, 31636, 13825, 2212, 1295, 16, 1215, 1848, 961, 2092, 341, 197, 324...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestUpsertDevice(t *testing.T) { t.Parallel() cs := &model.ConnectionString{ HostName: "localhost", Key: []byte("secret"), Name: "gimmeAccessPls", } deviceID := "6c985f61-5093-45eb-8ece-7dfe97a6de7b" testCases := []struct { Name string Updates []*Device ConnStr *model.ConnectionString RSPCode int RSPBody interface{} RTError error Error error }{{ Name: "ok", Updates: []*Device{{ Auth: &Auth{ Type: AuthTypeSymmetric, SymmetricKey: &SymmetricKey{ Primary: Key("foo"), Secondary: Key("bar"), }, }, ETag: "qwerty", }, nil}, ConnStr: cs, RSPCode: http.StatusOK, }, { Name: "error/invalid connection string", ConnStr: &model.ConnectionString{ Name: "bad", }, Error: errors.New("failed to prepare request: invalid connection string"), }, { Name: "error/internal roundtrip error", ConnStr: cs, RTError: errors.New("idk"), Error: errors.New("failed to execute request:.*idk"), }, { Name: "error/bad status code", ConnStr: cs, RSPCode: http.StatusInternalServerError, Error: common.HTTPError{Code: http.StatusInternalServerError}, }, { Name: "error/malformed response", ConnStr: cs, RSPBody: []byte("imagine a device in this reponse pls"), RSPCode: http.StatusOK, Error: errors.New("iothub: failed to decode updated device"), }} for i := range testCases { tc := testCases[i] t.Run(tc.Name, func(t *testing.T) { t.Parallel() ctx := context.Background() w := httptest.NewRecorder() httpClient := &http.Client{ Transport: RoundTripperFunc(func( r *http.Request, ) (*http.Response, error) { if tc.RTError != nil { return nil, tc.RTError } w.WriteHeader(tc.RSPCode) switch typ := tc.RSPBody.(type) { case []byte: w.Write(typ) case nil: dev := mergeDevices(tc.Updates...) b, _ := json.Marshal(dev) w.Write(b) default: b, _ := json.Marshal(typ) w.Write(b) } return w.Result(), nil }), } client := NewClient(NewOptions(nil). SetClient(httpClient)) dev, err := client.UpsertDevice(ctx, tc.ConnStr, deviceID, tc.Updates...) if tc.Error != nil { if assert.Error(t, err) { assert.Regexp(t, tc.Error.Error(), err.Error()) } } else { assert.NoError(t, err) expected := mergeDevices(tc.Updates...) expected.DeviceID = deviceID assert.Equal(t, expected, dev) } }) } }
explode_data.jsonl/63458
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1099 }
[ 2830, 3393, 98778, 529, 6985, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 71899, 1669, 609, 2528, 67478, 515, 197, 197, 85305, 25, 330, 8301, 756, 197, 55242, 25, 414, 3056, 3782, 445, 20474, 4461, 197, 21297, 25, 257, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestUpsert(t *testing.T) { t.Run("AgentVersions", testAgentVersionsUpsert) t.Run("CrawlProperties", testCrawlPropertiesUpsert) t.Run("Crawls", testCrawlsUpsert) t.Run("IPAddresses", testIPAddressesUpsert) t.Run("Latencies", testLatenciesUpsert) t.Run("MultiAddresses", testMultiAddressesUpsert) t.Run("MultiAddressesSets", testMultiAddressesSetsUpsert) t.Run("MultiAddressesXIPAddresses", testMultiAddressesXIPAddressesUpsert) t.Run("Neighbors", testNeighborsUpsert) t.Run("Peers", testPeersUpsert) t.Run("PegasysConnections", testPegasysConnectionsUpsert) t.Run("PegasysNeighbours", testPegasysNeighboursUpsert) t.Run("Protocols", testProtocolsUpsert) t.Run("ProtocolsSets", testProtocolsSetsUpsert) t.Run("RawVisits", testRawVisitsUpsert) t.Run("Sessions", testSessionsUpsert) t.Run("Visits", testVisitsUpsert) }
explode_data.jsonl/49523
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 341 }
[ 2830, 3393, 98778, 529, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 16810, 69015, 497, 1273, 16810, 69015, 98778, 529, 692, 3244, 16708, 445, 34, 33683, 7903, 497, 1273, 34, 33683, 7903, 98778, 529, 692, 3244, 16708, 445, 34, 1041, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPluginYamls(t *testing.T) { var server = mockPluginsServer(t, "testdata/defaults/plugin.yaml", PluginType) defer server.Close() var pluginList Plugins p, err := pluginList.Encode(server.URL, "/test") if err != nil { t.Fatalf("expected nil but got %v", err) } if 1 != len(p.Plugins) { t.Fatalf("expected %d but got %v", 1, len(p.Plugins)) } }
explode_data.jsonl/1912
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 145 }
[ 2830, 3393, 11546, 56, 309, 4730, 1155, 353, 8840, 836, 8, 341, 2405, 3538, 284, 7860, 45378, 5475, 1155, 11, 330, 92425, 14, 26756, 51372, 33406, 497, 21245, 929, 340, 16867, 3538, 10421, 741, 2405, 9006, 852, 62498, 271, 3223, 11, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestWaitGroupTimeout_timeout(t *testing.T) { wg := &sync.WaitGroup{} wg.Add(1) timeouted := WaitGroupTimeout(wg, time.Millisecond*10) assert.True(t, timeouted) }
explode_data.jsonl/73889
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 69 }
[ 2830, 3393, 14190, 2808, 7636, 20537, 1155, 353, 8840, 836, 8, 341, 72079, 1669, 609, 12996, 28384, 2808, 16094, 72079, 1904, 7, 16, 692, 78395, 291, 1669, 13824, 2808, 7636, 3622, 70, 11, 882, 71482, 9, 16, 15, 340, 6948, 32443, 1155...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func Test_informerWatchFuncWithSelectors(t *testing.T) { ls, fs, err := selectorsFromFilters(Filters{ Fields: []FieldFilter{ { Key: "kk1", Value: "kv1", Op: selection.Equals, }, }, Labels: []FieldFilter{ { Key: "lk1", Value: "lv1", Op: selection.NotEquals, }, }, }) assert.NoError(t, err) c, err := newFakeAPIClientset(k8sconfig.APIConfig{}) assert.NoError(t, err) watchFunc := informerWatchFuncWithSelectors(c, "test-ns", ls, fs) opts := metav1.ListOptions{} obj, err := watchFunc(opts) assert.NoError(t, err) assert.NotNil(t, obj) }
explode_data.jsonl/41507
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 292 }
[ 2830, 3393, 1243, 34527, 14247, 9626, 2354, 96995, 1155, 353, 8840, 836, 8, 341, 197, 4730, 11, 8619, 11, 1848, 1669, 56037, 3830, 28351, 7832, 8612, 515, 197, 197, 8941, 25, 3056, 1877, 5632, 515, 298, 197, 515, 571, 55242, 25, 256, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDataResultSetUnfulfilledMergeShardResults(t *testing.T) { start := time.Now().Truncate(testBlockSize) rangeOne := shardTimeRanges{ 0: xtime.NewRanges(xtime.Range{ Start: start, End: start.Add(8 * testBlockSize), }), 1: xtime.NewRanges(xtime.Range{ Start: start, End: start.Add(testBlockSize), }), } rangeTwo := shardTimeRanges{ 0: xtime.NewRanges(xtime.Range{ Start: start.Add(6 * testBlockSize), End: start.Add(10 * testBlockSize), }), 2: xtime.NewRanges(xtime.Range{ Start: start.Add(testBlockSize), End: start.Add(2 * testBlockSize), }), } r := NewDataBootstrapResult() r.SetUnfulfilled(rangeOne) rTwo := NewDataBootstrapResult() rTwo.SetUnfulfilled(rangeTwo) rMerged := MergedDataBootstrapResult(nil, nil) assert.Nil(t, rMerged) rMerged = MergedDataBootstrapResult(r, nil) assert.True(t, rMerged.Unfulfilled().Equal(rangeOne)) rMerged = MergedDataBootstrapResult(nil, r) assert.True(t, rMerged.Unfulfilled().Equal(rangeOne)) rMerged = MergedDataBootstrapResult(r, rTwo) expected := shardTimeRanges{ 0: xtime.NewRanges(xtime.Range{ Start: start, End: start.Add(10 * testBlockSize), }), 1: xtime.NewRanges(xtime.Range{ Start: start, End: start.Add(testBlockSize), }), 2: xtime.NewRanges(xtime.Range{ Start: start.Add(testBlockSize), End: start.Add(testBlockSize * 2), })} assert.True(t, rMerged.Unfulfilled().Equal(expected)) }
explode_data.jsonl/4665
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 629 }
[ 2830, 93200, 33208, 1806, 1262, 27511, 52096, 2016, 567, 9801, 1155, 353, 8840, 836, 8, 341, 21375, 1669, 882, 13244, 1005, 1282, 26900, 8623, 89932, 340, 75087, 3966, 1669, 52069, 1462, 74902, 515, 197, 197, 15, 25, 856, 1678, 7121, 74...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTransformers(t *testing.T) { tests := []struct { name string instance servingv1alpha1.KnativeServing expected int }{{ name: "Available istio ingress", instance: servingv1alpha1.KnativeServing{ Spec: servingv1alpha1.KnativeServingSpec{ Ingress: &servingv1alpha1.IngressConfigs{ Istio: servingv1alpha1.IstioIngressConfiguration{ Enabled: true, }, }, }, }, expected: 1, }, { name: "Available kourier ingress", instance: servingv1alpha1.KnativeServing{ Spec: servingv1alpha1.KnativeServingSpec{ Ingress: &servingv1alpha1.IngressConfigs{ Kourier: servingv1alpha1.KourierIngressConfiguration{ Enabled: true, }, }, }, }, expected: 2, }, { name: "Available contour ingress", instance: servingv1alpha1.KnativeServing{ Spec: servingv1alpha1.KnativeServingSpec{ Ingress: &servingv1alpha1.IngressConfigs{ Contour: servingv1alpha1.ContourIngressConfiguration{ Enabled: true, }, }, }, }, expected: 0, }, { name: "Empty ingress for default istio", instance: servingv1alpha1.KnativeServing{ Spec: servingv1alpha1.KnativeServingSpec{}, }, expected: 1, }, { name: "All ingresses enabled", instance: servingv1alpha1.KnativeServing{ Spec: servingv1alpha1.KnativeServingSpec{ Ingress: &servingv1alpha1.IngressConfigs{ Contour: servingv1alpha1.ContourIngressConfiguration{ Enabled: true, }, Kourier: servingv1alpha1.KourierIngressConfiguration{ Enabled: true, }, Istio: servingv1alpha1.IstioIngressConfiguration{ Enabled: true, }, }, }, }, expected: 3, }} for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { transformers := Transformers(context.TODO(), &tt.instance) util.AssertEqual(t, len(transformers), tt.expected) }) } }
explode_data.jsonl/15441
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 809 }
[ 2830, 3393, 8963, 388, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 56256, 13480, 85, 16, 7141, 16, 11352, 29738, 50, 19505, 198, 197, 42400, 526, 198, 197, 15170, 515, 197, 11609, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestScrape(t *testing.T) { type testCase struct { name string getMiscStats func() (*load.MiscStat, error) getProcesses func() ([]proc, error) expectedErr string validate func(*testing.T, pdata.MetricSlice) } testCases := []testCase{{ name: "Standard", validate: validateRealData, }, { name: "FakeData", getMiscStats: func() (*load.MiscStat, error) { return &fakeData, nil }, getProcesses: func() ([]proc, error) { return fakeProcessesData, nil }, validate: validateFakeData, }, { name: "ErrorFromMiscStat", getMiscStats: func() (*load.MiscStat, error) { return &load.MiscStat{}, errors.New("err1") }, expectedErr: "err1", }, { name: "ErrorFromProcesses", getProcesses: func() ([]proc, error) { return nil, errors.New("err2") }, expectedErr: "err2", }, { name: "ErrorFromProcessShouldBeIgnored", getProcesses: func() ([]proc, error) { return []proc{errProcess{}}, nil }, }, { name: "Validate Start Time", validate: validateStartTime, }} for _, test := range testCases { t.Run(test.name, func(t *testing.T) { assert := assert.New(t) scraper := newProcessesScraper(context.Background(), &Config{}) err := scraper.start(context.Background(), componenttest.NewNopHost()) assert.NoError(err, "Failed to initialize processes scraper: %v", err) // Override scraper methods if we are mocking out for this test case if test.getMiscStats != nil { scraper.getMiscStats = test.getMiscStats } if test.getProcesses != nil { scraper.getProcesses = test.getProcesses } scraper.startTime = startTime md, err := scraper.scrape(context.Background()) expectedMetricCount := 0 if expectProcessesCountMetric { expectedMetricCount++ } if expectProcessesCreatedMetric { expectedMetricCount++ } if (expectProcessesCountMetric || expectProcessesCreatedMetric) && test.expectedErr != "" { assert.EqualError(err, test.expectedErr) isPartial := scrapererror.IsPartialScrapeError(err) assert.Truef(isPartial, "expected partial scrape error, have %+v", err) if isPartial { assert.Equal(expectedMetricCount, err.(scrapererror.PartialScrapeError).Failed) } return } if test.expectedErr == "" { assert.NoErrorf(err, "Failed to scrape metrics: %v", err) } assert.Equal(expectedMetricCount, md.MetricCount()) metrics := md.ResourceMetrics().At(0).InstrumentationLibraryMetrics().At(0).Metrics() if test.validate != nil { test.validate(t, metrics) } internal.AssertSameTimeStampForAllMetrics(t, metrics) }) } }
explode_data.jsonl/18727
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1047 }
[ 2830, 3393, 3326, 19842, 1155, 353, 8840, 836, 8, 341, 13158, 54452, 2036, 341, 197, 11609, 260, 914, 198, 197, 10366, 50979, 16635, 2915, 368, 4609, 1078, 95738, 15878, 11, 1465, 340, 197, 10366, 92727, 2915, 368, 34923, 15782, 11, 146...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUpdateFeeReceiverSendsUpdate(t *testing.T) { t.Parallel() // Create a test channel which will be used for the duration of this // unittest. The channel will be funded evenly with Alice having 5 BTC, // and Bob having 5 BTC. aliceChannel, bobChannel, cleanUp, err := CreateTestChannels( channeldb.SingleFunderTweaklessBit, ) if err != nil { t.Fatalf("unable to create test channels: %v", err) } defer cleanUp() // Since Alice is the channel initiator, she should fail when receiving // fee update fee := chainfee.SatPerKWeight(333) err = aliceChannel.ReceiveUpdateFee(fee) if err == nil { t.Fatalf("expected alice to fail receiving fee update") } // Similarly, initiating fee update should fail for Bob. err = bobChannel.UpdateFee(fee) if err == nil { t.Fatalf("expected bob to fail initiating fee update") } }
explode_data.jsonl/72472
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 276 }
[ 2830, 3393, 4289, 41941, 25436, 50, 1412, 4289, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 322, 4230, 264, 1273, 5496, 892, 686, 387, 1483, 369, 279, 8090, 315, 419, 198, 197, 322, 19905, 13, 576, 5496, 686, 387, 23...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestImageAnnotatorBatchAnnotateImagesError(t *testing.T) { errCode := codes.PermissionDenied mockImageAnnotator.err = grpc.Errorf(errCode, "test error") var requests []*visionpb.AnnotateImageRequest = nil var request = &visionpb.BatchAnnotateImagesRequest{ Requests: requests, } c, err := NewImageAnnotatorClient(context.Background(), clientOpt) if err != nil { t.Fatal(err) } resp, err := c.BatchAnnotateImages(context.Background(), request) if c := grpc.Code(err); c != errCode { t.Errorf("got error code %q, want %q", c, errCode) } _ = resp }
explode_data.jsonl/81906
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 211 }
[ 2830, 3393, 1906, 2082, 1921, 850, 21074, 2082, 1921, 349, 14228, 1454, 1155, 353, 8840, 836, 8, 341, 9859, 2078, 1669, 13912, 73409, 54481, 198, 77333, 1906, 2082, 1921, 850, 18441, 284, 47900, 13080, 3964, 2078, 11, 330, 1944, 1465, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestquickstartsQuickstartCreateVersion(t *testing.T) { t.Parallel() sha := "d9e925718" v := quickstarts.QuickStartVersion(sha) sv, err := semver.Parse(v) require.NoError(t, err, "failed to parse semantic version %s for quickstart", v) t.Logf("parsed semantic version %s for quickstart", sv.String()) }
explode_data.jsonl/21109
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 113 }
[ 2830, 3393, 27763, 65876, 24318, 2468, 4021, 5637, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 15247, 1669, 330, 67, 24, 68, 24, 17, 20, 22, 16, 23, 698, 5195, 1669, 3974, 65876, 13, 24318, 3479, 5637, 7, 15247, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAgentConnectAuthorize_allow(t *testing.T) { t.Parallel() require := require.New(t) a := NewTestAgent(t.Name(), "") defer a.Shutdown() target := "db" // Create some intentions var ixnId string { req := structs.IntentionRequest{ Datacenter: "dc1", Op: structs.IntentionOpCreate, Intention: structs.TestIntention(t), } req.Intention.SourceNS = structs.IntentionDefaultNamespace req.Intention.SourceName = "web" req.Intention.DestinationNS = structs.IntentionDefaultNamespace req.Intention.DestinationName = target req.Intention.Action = structs.IntentionActionAllow require.Nil(a.RPC("Intention.Apply", &req, &ixnId)) } args := &structs.ConnectAuthorizeRequest{ Target: target, ClientCertURI: connect.TestSpiffeIDService(t, "web").URI().String(), } req, _ := http.NewRequest("POST", "/v1/agent/connect/authorize", jsonReader(args)) resp := httptest.NewRecorder() respRaw, err := a.srv.AgentConnectAuthorize(resp, req) require.Nil(err) require.Equal(200, resp.Code) require.Equal("MISS", resp.Header().Get("X-Cache")) obj := respRaw.(*connectAuthorizeResp) require.True(obj.Authorized) require.Contains(obj.Reason, "Matched") // Make the request again { req, _ := http.NewRequest("POST", "/v1/agent/connect/authorize", jsonReader(args)) resp := httptest.NewRecorder() respRaw, err := a.srv.AgentConnectAuthorize(resp, req) require.Nil(err) require.Equal(200, resp.Code) obj := respRaw.(*connectAuthorizeResp) require.True(obj.Authorized) require.Contains(obj.Reason, "Matched") // That should've been a cache hit. require.Equal("HIT", resp.Header().Get("X-Cache")) } // Change the intention { req := structs.IntentionRequest{ Datacenter: "dc1", Op: structs.IntentionOpUpdate, Intention: structs.TestIntention(t), } req.Intention.ID = ixnId req.Intention.SourceNS = structs.IntentionDefaultNamespace req.Intention.SourceName = "web" req.Intention.DestinationNS = structs.IntentionDefaultNamespace req.Intention.DestinationName = target req.Intention.Action = structs.IntentionActionDeny require.Nil(a.RPC("Intention.Apply", &req, &ixnId)) } // Short sleep lets the cache background refresh happen time.Sleep(100 * time.Millisecond) // Make the request again { req, _ := http.NewRequest("POST", "/v1/agent/connect/authorize", jsonReader(args)) resp := httptest.NewRecorder() respRaw, err := a.srv.AgentConnectAuthorize(resp, req) require.Nil(err) require.Equal(200, resp.Code) obj := respRaw.(*connectAuthorizeResp) require.False(obj.Authorized) require.Contains(obj.Reason, "Matched") // That should've been a cache hit, too, since it updated in the // background. require.Equal("HIT", resp.Header().Get("X-Cache")) } }
explode_data.jsonl/33669
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1053 }
[ 2830, 3393, 16810, 14611, 37483, 55731, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 17957, 1669, 1373, 7121, 1155, 340, 11323, 1669, 1532, 2271, 16810, 1155, 2967, 1507, 14676, 16867, 264, 10849, 18452, 2822, 28861, 1669, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetExternalTagEmptyDict(t *testing.T) { // Reset memory counters helpers.ResetMemoryStats() code := ` tags = [ ('hostname', {}), ('hostname2', {'source_type2': ['tag3', 'tag4']}), ('hostname', {}), ] datadog_agent.set_external_tags(tags) ` out, err := run(code) if err != nil { t.Fatal(err) } if out != "hostname2,source_type2,tag3,tag4" { t.Errorf("Unexpected printed value: '%s'", out) } // Check for leaks helpers.AssertMemoryUsage(t) }
explode_data.jsonl/24558
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 202 }
[ 2830, 3393, 1649, 25913, 5668, 3522, 13448, 1155, 353, 8840, 836, 8, 341, 197, 322, 16932, 4938, 31532, 198, 197, 21723, 36660, 10642, 16635, 2822, 43343, 1669, 22074, 3244, 2032, 284, 2278, 197, 197, 492, 27806, 516, 77872, 197, 197, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestLateInitialize(t *testing.T) { type args struct { cr *svcapitypes.UserPoolParameters resp *svcsdk.DescribeUserPoolOutput } type want struct { result *svcapitypes.UserPoolParameters err error } cases := map[string]struct { args want }{ "NoLateInitialization": { args: args{ cr: &svcapitypes.UserPoolParameters{ MFAConfiguration: &testString1, }, resp: &svcsdk.DescribeUserPoolOutput{ UserPool: &svcsdk.UserPoolType{ MfaConfiguration: &testString2, }, }, }, want: want{ result: &svcapitypes.UserPoolParameters{ MFAConfiguration: &testString1, }, err: nil, }, }, "LateInitializeMFAConfiguration": { args: args{ cr: &svcapitypes.UserPoolParameters{}, resp: &svcsdk.DescribeUserPoolOutput{ UserPool: &svcsdk.UserPoolType{ MfaConfiguration: &testString2, }, }, }, want: want{ result: &svcapitypes.UserPoolParameters{ MFAConfiguration: &testString2, }, err: nil, }, }, } for name, tc := range cases { t.Run(name, func(t *testing.T) { // Act err := lateInitialize(tc.args.cr, tc.args.resp) if diff := cmp.Diff(tc.want.result, tc.args.cr, test.EquateConditions()); diff != "" { t.Errorf("r: -want, +got:\n%s", diff) } if diff := cmp.Diff(tc.want.err, err, test.EquateErrors()); diff != "" { t.Errorf("r: -want, +got:\n%s", diff) } }) } }
explode_data.jsonl/18663
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 664 }
[ 2830, 3393, 61457, 9928, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 91492, 256, 353, 3492, 11346, 487, 20352, 7344, 10551, 9706, 198, 197, 34653, 353, 3492, 4837, 7584, 23548, 3114, 1474, 10551, 5097, 198, 197, 630, 1315...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRowEncodeAndDecode(t *testing.T) { gtest.C(t, func(t *gtest.T) { t.Assert(gurl.RawEncode(urlStr), rawUrlEncode) res, err := gurl.RawDecode(rawUrlEncode) if err != nil { t.Errorf("decode failed. %v", err) return } t.Assert(res, urlStr) }) }
explode_data.jsonl/52549
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 130 }
[ 2830, 3393, 3102, 32535, 3036, 32564, 1155, 353, 8840, 836, 8, 341, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 3244, 11711, 3268, 1085, 50575, 32535, 6522, 2580, 701, 7112, 2864, 32535, 692, 197, 10202, 11, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2