text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestBotCommandManager(t *testing.T) { useRemoteMock = false defer func() { useRemoteMock = true }() ctc := makeChatTestContext(t, "TestBotCommandManager", 2) defer ctc.cleanup() timeout := 20 * time.Second users := ctc.users() tc := ctc.world.Tcs[users[0].Username] tc.G.UIRouter = dummyUIRouter{} tc1 := ctc.world.Tcs[users[1].Username] tc1.G.UIRouter = dummyUIRouter{} ctx := ctc.as(t, users[0]).startCtx ctx1 := ctc.as(t, users[1]).startCtx uid := gregor1.UID(users[0].GetUID().ToBytes()) t.Logf("uid: %s", uid) listener0 := newServerChatListener() ctc.as(t, users[0]).h.G().NotifyRouter.AddListener(listener0) impConv := mustCreateConversationForTest(t, ctc, users[0], chat1.TopicType_CHAT, chat1.ConversationMembersType_IMPTEAMNATIVE) impConv1 := mustCreateConversationForTest(t, ctc, users[1], chat1.TopicType_CHAT, chat1.ConversationMembersType_IMPTEAMNATIVE, users[0]) t.Logf("impconv: %s", impConv.Id) t.Logf("impconv1: %s", impConv1.Id) teamConv := mustCreateConversationForTest(t, ctc, users[0], chat1.TopicType_CHAT, chat1.ConversationMembersType_TEAM) t.Logf("teamconv: %x", teamConv.Id.DbShortForm()) // test public alias := "MIKE BOT" commands := []chat1.AdvertiseCommandsParam{ { Typ: chat1.BotCommandsAdvertisementTyp_PUBLIC, Commands: []chat1.UserBotCommandInput{ { Name: "status", Description: "get status", Usage: "just type it", }, }, }, } require.NoError(t, tc.Context().BotCommandManager.Advertise(ctx, &alias, commands)) cmds, err := tc.Context().BotCommandManager.ListCommands(ctx, impConv.Id) require.NoError(t, err) require.Zero(t, len(cmds)) errCh, err := tc.Context().BotCommandManager.UpdateCommands(ctx, impConv.Id, nil) require.NoError(t, err) errCh1, err := tc1.Context().BotCommandManager.UpdateCommands(ctx1, impConv1.Id, nil) require.NoError(t, err) readErrCh := func(errCh chan error) error { select { case err := <-errCh: return err case <-time.After(timeout): return errors.New("timeout") } } select { case items := <-listener0.threadsStale: require.Equal(t, 1, len(items)) require.Equal(t, impConv.Id, items[0].ConvID) case <-time.After(timeout): require.Fail(t, "no stale") } impConvLocal, err := utils.GetVerifiedConv(ctx, tc.Context(), uid, impConv.Id, types.InboxSourceDataSourceAll) require.NoError(t, err) typ, err := impConvLocal.BotCommands.Typ() require.NoError(t, err) require.Equal(t, chat1.ConversationCommandGroupsTyp_CUSTOM, typ) require.Equal(t, 1, len(impConvLocal.BotCommands.Custom().Commands)) require.Equal(t, "status", impConvLocal.BotCommands.Custom().Commands[0].Name) require.NoError(t, readErrCh(errCh)) cmds, err = tc.Context().BotCommandManager.ListCommands(ctx, impConv.Id) require.NoError(t, err) require.Equal(t, 1, len(cmds)) require.Equal(t, "status", cmds[0].Name) require.NoError(t, readErrCh(errCh1)) cmds, err = tc1.Context().BotCommandManager.ListCommands(ctx1, impConv1.Id) require.NoError(t, err) require.Equal(t, 1, len(cmds)) require.Equal(t, "status", cmds[0].Name) // test team commands = append(commands, chat1.AdvertiseCommandsParam{ Typ: chat1.BotCommandsAdvertisementTyp_TLFID_CONVS, Commands: []chat1.UserBotCommandInput{{ Name: "teamconvonly", }}, TeamName: &teamConv.TlfName, }, chat1.AdvertiseCommandsParam{ Typ: chat1.BotCommandsAdvertisementTyp_TLFID_MEMBERS, Commands: []chat1.UserBotCommandInput{{ Name: "teammembsonly", }}, TeamName: &teamConv.TlfName, }) require.NoError(t, tc.Context().BotCommandManager.Advertise(ctx, &alias, commands)) errCh, err = tc.Context().BotCommandManager.UpdateCommands(ctx, impConv.Id, nil) require.NoError(t, err) errChT, err := tc.Context().BotCommandManager.UpdateCommands(ctx, teamConv.Id, nil) require.NoError(t, err) errCh1, err = tc1.Context().BotCommandManager.UpdateCommands(ctx, impConv1.Id, nil) require.NoError(t, err) require.NoError(t, readErrCh(errCh)) require.NoError(t, readErrCh(errCh1)) require.NoError(t, readErrCh(errChT)) cmds, err = tc.Context().BotCommandManager.ListCommands(ctx, impConv.Id) require.NoError(t, err) require.Equal(t, 2, len(cmds)) cmds, err = tc.Context().BotCommandManager.ListCommands(ctx, teamConv.Id) require.NoError(t, err) require.Equal(t, 3, len(cmds)) cmds, err = tc1.Context().BotCommandManager.ListCommands(ctx1, impConv1.Id) require.NoError(t, err) require.Equal(t, 1, len(cmds)) }
explode_data.jsonl/48897
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1841 }
[ 2830, 3393, 23502, 4062, 2043, 1155, 353, 8840, 836, 8, 341, 41819, 24703, 11571, 284, 895, 198, 16867, 2915, 368, 314, 990, 24703, 11571, 284, 830, 50746, 89216, 66, 1669, 1281, 15672, 2271, 1972, 1155, 11, 330, 2271, 23502, 4062, 2043...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeposits(t *testing.T) { mapp, keeper, _, addrs, _, _ := getMockApp(t, 2, GenesisState{}, nil) SortAddresses(addrs) header := abci.Header{Height: mapp.LastBlockHeight() + 1} mapp.BeginBlock(abci.RequestBeginBlock{Header: header}) ctx := mapp.BaseApp.NewContext(false, abci.Header{}) tp := testProposal() proposal, err := keeper.SubmitProposal(ctx, tp) require.NoError(t, err) proposalID := proposal.GetProposalID() fourStake := sdk.NewCoins(sdk.NewCoin(sdk.DefaultBondDenom, sdk.TokensFromTendermintPower(4))) fiveStake := sdk.NewCoins(sdk.NewCoin(sdk.DefaultBondDenom, sdk.TokensFromTendermintPower(5))) addr0Initial := keeper.ck.GetCoins(ctx, addrs[0]) addr1Initial := keeper.ck.GetCoins(ctx, addrs[1]) expTokens := sdk.TokensFromTendermintPower(42) require.Equal(t, sdk.NewCoins(sdk.NewCoin(sdk.DefaultBondDenom, expTokens)), addr0Initial) require.True(t, proposal.GetTotalDeposit().IsEqual(sdk.NewCoins())) // Check no deposits at beginning deposit, found := keeper.GetDeposit(ctx, proposalID, addrs[1]) require.False(t, found) proposal, ok := keeper.GetProposal(ctx, proposalID) require.True(t, ok) require.True(t, proposal.GetVotingStartTime().Equal(time.Time{})) // Check first deposit err, votingStarted := keeper.AddDeposit(ctx, proposalID, addrs[0], fourStake) require.Nil(t, err) require.False(t, votingStarted) deposit, found = keeper.GetDeposit(ctx, proposalID, addrs[0]) require.True(t, found) require.Equal(t, fourStake, deposit.Amount) require.Equal(t, addrs[0], deposit.Depositor) proposal, ok = keeper.GetProposal(ctx, proposalID) require.True(t, ok) require.Equal(t, fourStake, proposal.GetTotalDeposit()) require.Equal(t, addr0Initial.Sub(fourStake), keeper.ck.GetCoins(ctx, addrs[0])) // Check a second deposit from same address err, votingStarted = keeper.AddDeposit(ctx, proposalID, addrs[0], fiveStake) require.Nil(t, err) require.False(t, votingStarted) deposit, found = keeper.GetDeposit(ctx, proposalID, addrs[0]) require.True(t, found) require.Equal(t, fourStake.Add(fiveStake), deposit.Amount) require.Equal(t, addrs[0], deposit.Depositor) proposal, ok = keeper.GetProposal(ctx, proposalID) require.True(t, ok) require.Equal(t, fourStake.Add(fiveStake), proposal.GetTotalDeposit()) require.Equal(t, addr0Initial.Sub(fourStake).Sub(fiveStake), keeper.ck.GetCoins(ctx, addrs[0])) // Check third deposit from a new address err, votingStarted = keeper.AddDeposit(ctx, proposalID, addrs[1], fourStake) require.Nil(t, err) require.True(t, votingStarted) deposit, found = keeper.GetDeposit(ctx, proposalID, addrs[1]) require.True(t, found) require.Equal(t, addrs[1], deposit.Depositor) require.Equal(t, fourStake, deposit.Amount) proposal, ok = keeper.GetProposal(ctx, proposalID) require.True(t, ok) require.Equal(t, fourStake.Add(fiveStake).Add(fourStake), proposal.GetTotalDeposit()) require.Equal(t, addr1Initial.Sub(fourStake), keeper.ck.GetCoins(ctx, addrs[1])) // Check that proposal moved to voting period proposal, ok = keeper.GetProposal(ctx, proposalID) require.True(t, ok) require.True(t, proposal.GetVotingStartTime().Equal(ctx.BlockHeader().Time)) // Test deposit iterator depositsIterator := keeper.GetDeposits(ctx, proposalID) require.True(t, depositsIterator.Valid()) keeper.cdc.MustUnmarshalBinaryLengthPrefixed(depositsIterator.Value(), &deposit) require.Equal(t, addrs[0], deposit.Depositor) require.Equal(t, fourStake.Add(fiveStake), deposit.Amount) depositsIterator.Next() keeper.cdc.MustUnmarshalBinaryLengthPrefixed(depositsIterator.Value(), &deposit) require.Equal(t, addrs[1], deposit.Depositor) require.Equal(t, fourStake, deposit.Amount) depositsIterator.Next() require.False(t, depositsIterator.Valid()) depositsIterator.Close() // Test Refund Deposits deposit, found = keeper.GetDeposit(ctx, proposalID, addrs[1]) require.True(t, found) require.Equal(t, fourStake, deposit.Amount) keeper.RefundDeposits(ctx, proposalID) deposit, found = keeper.GetDeposit(ctx, proposalID, addrs[1]) require.False(t, found) require.Equal(t, addr0Initial, keeper.ck.GetCoins(ctx, addrs[0])) require.Equal(t, addr1Initial, keeper.ck.GetCoins(ctx, addrs[1])) }
explode_data.jsonl/60867
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1578 }
[ 2830, 3393, 7839, 436, 1199, 1155, 353, 8840, 836, 8, 341, 2109, 676, 11, 53416, 11, 8358, 912, 5428, 11, 8358, 716, 1669, 633, 11571, 2164, 1155, 11, 220, 17, 11, 40788, 1397, 22655, 2092, 340, 7568, 371, 52290, 25906, 5428, 692, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetStateValidateSubscribeKey(t *testing.T) { assert := assert.New(t) pn := NewPubNub(NewDemoConfig()) pn.Config.SubscribeKey = "" opts := &getStateOpts{ Channels: []string{"ch1", "ch2", "ch3"}, ChannelGroups: []string{"cg1", "cg2", "cg3"}, pubnub: pn, } assert.Equal("pubnub/validation: pubnub: \v: Missing Subscribe Key", opts.validate().Error()) }
explode_data.jsonl/32500
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 165 }
[ 2830, 3393, 1949, 1397, 17926, 28573, 1592, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 3223, 77, 1669, 1532, 29162, 45, 392, 35063, 37413, 2648, 2398, 3223, 77, 10753, 82628, 1592, 284, 8389, 64734, 1669, 609, 6235...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_cvssScoreWithNilMetadata(t *testing.T) { pres := Presenter{ metadataProvider: &NilMetadataProvider{}, } score := pres.cvssScore(vulnerability.Vulnerability{ ID: "id", Namespace: "namespace", }) assert.Equal(t, float64(-1), score) }
explode_data.jsonl/25495
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 105 }
[ 2830, 3393, 43233, 778, 10570, 2354, 19064, 14610, 1155, 353, 8840, 836, 8, 341, 3223, 416, 1669, 96223, 515, 197, 2109, 7603, 5179, 25, 609, 19064, 14610, 5179, 38837, 197, 532, 60425, 1669, 1652, 75948, 778, 10570, 3747, 58401, 2897, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestResMgmtClientQueries(t *testing.T) { // Using shared SDK instance to increase test speed. sdk := mainSDK testSetup := mainTestSetup chaincodeID := mainChaincodeID //prepare contexts org1AdminClientContext := sdk.Context(fabsdk.WithUser(org1AdminUser), fabsdk.WithOrg(org1Name)) // Resource management client client, err := resmgmt.New(org1AdminClientContext) if err != nil { t.Fatalf("Failed to create new resource management client: %s", err) } // Our target for queries will be primary peer on this channel target := testSetup.Targets[0] testQueryConfigFromOrderer(t, testSetup.ChannelID, client) testInstalledChaincodes(t, chaincodeID, target, client) testInstantiatedChaincodes(t, testSetup.ChannelID, chaincodeID, target, client) testQueryChannels(t, testSetup.ChannelID, target, client) // test java chaincode installed and instantiated javaCCID := integration.GenerateExampleJavaID(false) testInstalledChaincodes(t, javaCCID, target, client) testInstantiatedChaincodes(t, orgChannelID, javaCCID, target, client) }
explode_data.jsonl/35753
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 329 }
[ 2830, 3393, 1061, 44, 46063, 2959, 55261, 1155, 353, 8840, 836, 8, 1476, 197, 322, 12091, 6094, 26623, 2867, 311, 5263, 1273, 4628, 624, 1903, 7584, 1669, 1887, 31534, 198, 18185, 21821, 1669, 1887, 2271, 21821, 198, 197, 8819, 1851, 91...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFormatter_FormatStatusEvent(t *testing.T) { testCases := map[string]struct { previewStrategy common.DryRunStrategy event event.StatusEvent statusCollector list.Collector expected string }{ "resource update with Current status": { previewStrategy: common.DryRunNone, event: event.StatusEvent{ Identifier: object.ObjMetadata{ GroupKind: schema.GroupKind{ Group: "apps", Kind: "Deployment", }, Namespace: "foo", Name: "bar", }, PollResourceInfo: &pollevent.ResourceStatus{ Identifier: object.ObjMetadata{ GroupKind: schema.GroupKind{ Group: "apps", Kind: "Deployment", }, Namespace: "foo", Name: "bar", }, Status: status.CurrentStatus, Message: "Resource is Current", }, }, expected: "deployment.apps/bar is Current: Resource is Current", }, } for tn, tc := range testCases { t.Run(tn, func(t *testing.T) { ioStreams, _, out, _ := genericclioptions.NewTestIOStreams() //nolint:dogsled formatter := NewFormatter(ioStreams, tc.previewStrategy) err := formatter.FormatStatusEvent(tc.event) assert.NoError(t, err) assert.Equal(t, tc.expected, strings.TrimSpace(out.String())) }) } }
explode_data.jsonl/58257
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 553 }
[ 2830, 3393, 14183, 72999, 2522, 1556, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 2415, 14032, 60, 1235, 341, 197, 197, 27171, 19816, 4185, 909, 884, 6727, 19816, 198, 197, 28302, 1843, 1538, 10538, 1556, 198, 197, 23847, 53694, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStartCmdWithMissingClientSecretArg(t *testing.T) { startCmd := GetStartCmd(&mockServer{}) var args []string args = append(args, hostURLArg()...) args = append(args, endpointAuthURLArg()...) args = append(args, endpointTokenURLArg()...) args = append(args, clientRedirectURLArg()...) args = append(args, clientIDArg()...) startCmd.SetArgs(args) err := startCmd.Execute() require.Contains(t, err.Error(), "Neither client-secret (command line flag) nor OAUTH2_ISSUER_CLIENT_SECRET (environment variable) have been set.") }
explode_data.jsonl/23761
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 177 }
[ 2830, 3393, 3479, 15613, 2354, 25080, 2959, 19773, 2735, 1155, 353, 8840, 836, 8, 341, 21375, 15613, 1669, 2126, 3479, 15613, 2099, 16712, 5475, 6257, 692, 2405, 2827, 3056, 917, 198, 31215, 284, 8737, 7356, 11, 3468, 3144, 2735, 368, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_execActive_fail(t *testing.T) { address1 := &address.Address{} envHandleUintptr := uintptr(unsafe.Pointer(address1)) jvmsCached, _ = lru.New(1000) ok := execActive(opener, 100, envHandleUintptr) assert.Equal(t, false, ok) jvmTestEnv := setupTestEnv() jvmsCached.Add(envHandleUintptr, jvmTestEnv.jvm) ok = execActive(opener, 100, envHandleUintptr) assert.Equal(t, false, ok) }
explode_data.jsonl/38983
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 165 }
[ 2830, 3393, 18430, 5728, 22121, 1155, 353, 8840, 836, 8, 341, 63202, 16, 1669, 609, 4995, 26979, 16094, 57538, 6999, 21570, 3505, 1669, 38190, 7, 38157, 41275, 15434, 16, 1171, 12428, 85, 1011, 70293, 11, 716, 284, 326, 2672, 7121, 7, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSimpleQuery(t *testing.T) { struct1 := Gopher{Name: "George", Height: 32} struct2 := Gopher{Name: "Rufus"} pList1 := PropertyList{ { Name: "Height", Value: int64(32), }, { Name: "Name", Value: "George", }, } pList2 := PropertyList{ { Name: "Name", Value: "Rufus", }, } pMap1 := PropertyMap{ "Name": Property{ Name: "Name", Value: "George", }, "Height": Property{ Name: "Height", Value: int64(32), }, } pMap2 := PropertyMap{ "Name": Property{ Name: "Name", Value: "Rufus", }, } testCases := []struct { dst interface{} want interface{} }{ // The destination must have type *[]P, *[]S or *[]*S, for some non-interface // type P such that *P implements PropertyLoadSaver, or for some struct type S. {new([]Gopher), &[]Gopher{struct1, struct2}}, {new([]*Gopher), &[]*Gopher{&struct1, &struct2}}, {new([]PropertyList), &[]PropertyList{pList1, pList2}}, {new([]PropertyMap), &[]PropertyMap{pMap1, pMap2}}, // Any other destination type is invalid. {0, nil}, {Gopher{}, nil}, {PropertyList{}, nil}, {PropertyMap{}, nil}, {[]int{}, nil}, {[]Gopher{}, nil}, {[]PropertyList{}, nil}, {new(int), nil}, {new(Gopher), nil}, {new(PropertyList), nil}, // This is a special case. {new(PropertyMap), nil}, {new([]int), nil}, {new([]map[int]int), nil}, {new([]map[string]Property), nil}, {new([]map[string]interface{}), nil}, {new([]*int), nil}, {new([]*map[int]int), nil}, {new([]*map[string]Property), nil}, {new([]*map[string]interface{}), nil}, {new([]**Gopher), nil}, {new([]*PropertyList), nil}, {new([]*PropertyMap), nil}, } for _, tc := range testCases { nCall := 0 client := &Client{ client: &fakeClient{ queryFn: func(req *pb.RunQueryRequest) (*pb.RunQueryResponse, error) { nCall++ return fakeRunQuery(req) }, }, } ctx := context.Background() var ( expectedErr error expectedNCall int ) if tc.want == nil { expectedErr = ErrInvalidEntityType } else { expectedNCall = 1 } keys, err := client.GetAll(ctx, NewQuery("Gopher"), tc.dst) if err != expectedErr { t.Errorf("dst type %T: got error %v, want %v", tc.dst, err, expectedErr) continue } if nCall != expectedNCall { t.Errorf("dst type %T: Context.Call was called an incorrect number of times: got %d want %d", tc.dst, nCall, expectedNCall) continue } if err != nil { continue } key1 := IDKey("Gopher", 6, nil) expectedKeys := []*Key{ key1, IDKey("Gopher", 8, key1), } if l1, l2 := len(keys), len(expectedKeys); l1 != l2 { t.Errorf("dst type %T: got %d keys, want %d keys", tc.dst, l1, l2) continue } for i, key := range keys { if !keysEqual(key, expectedKeys[i]) { t.Errorf("dst type %T: got key #%d %v, want %v", tc.dst, i, key, expectedKeys[i]) continue } } // Make sure we sort any PropertyList items (the order is not deterministic). if pLists, ok := tc.dst.(*[]PropertyList); ok { for _, p := range *pLists { sort.Sort(byName(p)) } } if !testutil.Equal(tc.dst, tc.want) { t.Errorf("dst type %T: Entities\ngot %+v\nwant %+v", tc.dst, tc.dst, tc.want) continue } } }
explode_data.jsonl/50243
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1450 }
[ 2830, 3393, 16374, 2859, 1155, 353, 8840, 836, 8, 341, 6472, 16, 1669, 479, 16940, 63121, 25, 330, 38952, 497, 21432, 25, 220, 18, 17, 532, 6472, 17, 1669, 479, 16940, 63121, 25, 330, 49, 1704, 355, 16707, 3223, 852, 16, 1669, 8655,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFeedItem(t *testing.T) { for _, tc := range itemTC { t.Run(tc.name, func(t *testing.T) { testFeedItem(t, tc.name, tc.act, tc.cat, tc.itm, tc.mock) }) } }
explode_data.jsonl/78671
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 84 }
[ 2830, 3393, 28916, 1234, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 1509, 7749, 341, 197, 3244, 16708, 44415, 2644, 11, 2915, 1155, 353, 8840, 836, 8, 341, 298, 18185, 28916, 1234, 1155, 11, 17130, 2644, 11, 17130, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestFiles_SharedWith(t *testing.T) { setup() defer teardown() fixture := ` { "shared-with": [ { "share_id": 1, "user_avatar_url": "https://some-valid-avatar-url.com/avatar.jpg", "user_name": "spike" }, { "share_id": 2, "user_avatar_url": "https://some-valid-avatar-url.com/avatar2.jpg", "user_name": "edward" } ], "status": "OK" } ` mux.HandleFunc("/v2/files/1/shared-with", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") fmt.Fprintln(w, fixture) }) ctx := context.Background() files, err := client.Files.sharedWith(ctx, 1) if err != nil { t.Error(err) } if len(files) != 2 { t.Errorf("got: %v, want: %v", len(files), 2) } }
explode_data.jsonl/48327
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 331 }
[ 2830, 3393, 10809, 36578, 1605, 2354, 1155, 353, 8840, 836, 8, 341, 84571, 741, 16867, 49304, 2822, 1166, 12735, 1669, 22074, 515, 197, 1, 6100, 26189, 788, 2278, 262, 341, 197, 197, 1, 19368, 842, 788, 220, 16, 345, 197, 197, 76522, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBindIndex(t *testing.T) { env := environment(map[string]interface{}{ "a": StringType, "b": IntType, "c": NewOutputType(StringType), "d": NewOutputType(IntType), "e": NewPromiseType(StringType), "f": NewPromiseType(IntType), "g": NewListType(BoolType), "h": NewMapType(BoolType), "i": NewObjectType(map[string]Type{"foo": BoolType}), "j": NewOutputType(NewListType(BoolType)), "k": NewOutputType(NewMapType(BoolType)), "l": NewOutputType(NewObjectType(map[string]Type{"foo": BoolType})), "m": NewPromiseType(NewListType(BoolType)), "n": NewPromiseType(NewMapType(BoolType)), "o": NewPromiseType(NewObjectType(map[string]Type{"foo": BoolType})), }) scope := env.scope() cases := []exprTestCase{ // Standard operations {x: "g[a]", t: BoolType}, {x: "g[b]", t: BoolType}, {x: "h[a]", t: BoolType}, {x: "h[b]", t: BoolType}, {x: "i[a]", t: BoolType}, {x: "i[b]", t: BoolType}, // Lifted operations {x: "g[c]", t: NewOutputType(BoolType)}, {x: "g[d]", t: NewOutputType(BoolType)}, {x: "h[c]", t: NewOutputType(BoolType)}, {x: "h[d]", t: NewOutputType(BoolType)}, {x: "i[c]", t: NewOutputType(BoolType)}, {x: "i[d]", t: NewOutputType(BoolType)}, {x: "g[e]", t: NewPromiseType(BoolType)}, {x: "g[f]", t: NewPromiseType(BoolType)}, {x: "h[e]", t: NewPromiseType(BoolType)}, {x: "h[f]", t: NewPromiseType(BoolType)}, {x: "i[e]", t: NewPromiseType(BoolType)}, {x: "i[f]", t: NewPromiseType(BoolType)}, {x: "j[a]", t: NewOutputType(BoolType)}, {x: "j[b]", t: NewOutputType(BoolType)}, {x: "k[a]", t: NewOutputType(BoolType)}, {x: "k[b]", t: NewOutputType(BoolType)}, {x: "l[a]", t: NewOutputType(BoolType)}, {x: "l[b]", t: NewOutputType(BoolType)}, {x: "m[a]", t: NewPromiseType(BoolType)}, {x: "m[b]", t: NewPromiseType(BoolType)}, {x: "n[a]", t: NewPromiseType(BoolType)}, {x: "n[b]", t: NewPromiseType(BoolType)}, {x: "o[a]", t: NewPromiseType(BoolType)}, {x: "o[b]", t: NewPromiseType(BoolType)}, } for _, c := range cases { t.Run(c.x, func(t *testing.T) { expr, diags := BindExpressionText(c.x, scope, hcl.Pos{}) assert.Len(t, diags, 0) assert.Equal(t, c.t, expr.Type()) _, ok := expr.(*IndexExpression) assert.True(t, ok) assert.Equal(t, c.x, fmt.Sprintf("%v", expr)) }) } }
explode_data.jsonl/42567
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1136 }
[ 2830, 3393, 9950, 1552, 1155, 353, 8840, 836, 8, 341, 57538, 1669, 4573, 9147, 14032, 31344, 67066, 197, 197, 56693, 788, 93635, 345, 197, 197, 1, 65, 788, 1333, 929, 345, 197, 197, 96946, 788, 1532, 5097, 929, 2242, 929, 1326, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_MarkDeviceAsMounted_Positive_NewVolume(t *testing.T) { // Arrange volumePluginMgr, plugin := volumetesting.GetTestVolumePluginMgr(t) asw := NewActualStateOfWorld("mynode" /* nodeName */, volumePluginMgr) pod := &v1.Pod{ ObjectMeta: metav1.ObjectMeta{ Name: "pod1", UID: "pod1uid", }, Spec: v1.PodSpec{ Volumes: []v1.Volume{ { Name: "volume-name", VolumeSource: v1.VolumeSource{ GCEPersistentDisk: &v1.GCEPersistentDiskVolumeSource{ PDName: "fake-device1", }, }, }, }, }, } volumeSpec := &volume.Spec{Volume: &pod.Spec.Volumes[0]} devicePath := "fake/device/path" deviceMountPath := "fake/device/mount/path" generatedVolumeName, err := util.GetUniqueVolumeNameFromSpec(plugin, volumeSpec) if err != nil { t.Fatalf("GetUniqueVolumeNameFromSpec failed. Expected: <no error> Actual: <%v>", err) } err = asw.MarkVolumeAsAttached(emptyVolumeName, volumeSpec, "" /* nodeName */, devicePath) if err != nil { t.Fatalf("MarkVolumeAsAttached failed. Expected: <no error> Actual: <%v>", err) } // Act err = asw.MarkDeviceAsMounted(generatedVolumeName, devicePath, deviceMountPath) // Assert if err != nil { t.Fatalf("MarkDeviceAsMounted failed. Expected: <no error> Actual: <%v>", err) } verifyVolumeExistsAsw(t, generatedVolumeName, true /* shouldExist */, asw) verifyVolumeExistsInUnmountedVolumes(t, generatedVolumeName, asw) verifyVolumeExistsInGloballyMountedVolumes(t, generatedVolumeName, asw) }
explode_data.jsonl/28883
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 589 }
[ 2830, 3393, 1245, 838, 6985, 2121, 90789, 44246, 3404, 39582, 18902, 1155, 353, 8840, 836, 8, 341, 197, 322, 40580, 198, 5195, 4661, 11546, 25567, 11, 9006, 1669, 62820, 57824, 287, 2234, 2271, 18902, 11546, 25567, 1155, 340, 60451, 86, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestBadTrustedCIDRsForRunListener(t *testing.T) { router := New() router.TrustedProxies = []string{"hello/world"} addr, err := net.ResolveTCPAddr("tcp", "localhost:0") assert.NoError(t, err) listener, err := net.ListenTCP("tcp", addr) assert.NoError(t, err) go func() { router.GET("/example", func(c *Context) { c.String(http.StatusOK, "it worked") }) assert.Error(t, router.RunListener(listener)) }() // have to wait for the goroutine to start and run the server // otherwise the main thread will complete time.Sleep(5 * time.Millisecond) }
explode_data.jsonl/1172
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 207 }
[ 2830, 3393, 17082, 1282, 27145, 54146, 42327, 2461, 6727, 2743, 1155, 353, 8840, 836, 8, 341, 67009, 1669, 1532, 741, 67009, 8240, 27145, 1336, 80934, 284, 3056, 917, 4913, 14990, 78892, 63159, 53183, 11, 1848, 1669, 4179, 57875, 49896, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBenchmarkResult_AllKeyValuePairs(t *testing.T) { verifyPairs := func(in *BenchmarkResult, expected *OrderedStringStringMap) func(t *testing.T) { return func(t *testing.T) { out := in.AllKeyValuePairs() require.Equal(t, expected, out) } } t.Run("case=simple", verifyPairs(&BenchmarkResult{ Name: "BenchmarkBob", }, &OrderedStringStringMap{ Contents: map[string]string{ "BenchmarkBob": "", }, Order: []string{ "BenchmarkBob", }, })) t.Run("case=simpleconfig", verifyPairs(&BenchmarkResult{ Name: "BenchmarkBob/name=bob", Configuration: &OrderedStringStringMap{ Contents: map[string]string{ "name": "bob", }, Order: []string{ "name", }, }, }, &OrderedStringStringMap{ Contents: map[string]string{ "BenchmarkBob": "", "name": "bob", }, Order: []string{ "BenchmarkBob", "name", }, })) t.Run("case=sometags", verifyPairs(&BenchmarkResult{ Name: "BenchmarkBob/name=bob", }, &OrderedStringStringMap{ Contents: map[string]string{ "BenchmarkBob": "", "name": "bob", }, Order: []string{ "BenchmarkBob", "name", }, })) t.Run("case=configmatches", verifyPairs(&BenchmarkResult{ Name: "BenchmarkBob/name=bob", Configuration: &OrderedStringStringMap{ Contents: map[string]string{ "name": "john", }, Order: []string{ "name", }, }, }, &OrderedStringStringMap{ Contents: map[string]string{ "BenchmarkBob": "", "name": "bob", }, Order: []string{ "BenchmarkBob", "name", }, })) t.Run("case=withdashnum", verifyPairs(&BenchmarkResult{ Name: "BenchmarkBob/name=bob-8", }, &OrderedStringStringMap{ Contents: map[string]string{ "BenchmarkBob": "", "name": "bob", }, Order: []string{ "BenchmarkBob", "name", }, })) t.Run("case=justdash", verifyPairs(&BenchmarkResult{ Name: "BenchmarkBob/name=bob-", }, &OrderedStringStringMap{ Contents: map[string]string{ "BenchmarkBob": "", "name": "bob-", }, Order: []string{ "BenchmarkBob", "name", }, })) t.Run("case=dashmixed", verifyPairs(&BenchmarkResult{ Name: "BenchmarkBob/name=bob-3n", }, &OrderedStringStringMap{ Contents: map[string]string{ "BenchmarkBob": "", "name": "bob-3n", }, Order: []string{ "BenchmarkBob", "name", }, })) }
explode_data.jsonl/39870
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1064 }
[ 2830, 3393, 84971, 2077, 53629, 72082, 54228, 1155, 353, 8840, 836, 8, 341, 93587, 54228, 1669, 2915, 5900, 353, 84971, 2077, 11, 3601, 353, 54384, 703, 703, 2227, 8, 2915, 1155, 353, 8840, 836, 8, 341, 197, 853, 2915, 1155, 353, 8840...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRulesGetValues(t *testing.T) { type tests struct { input Rules want []string } tcs := []tests{ {input: Rules{{Tag: "tag", Value: "value"}, {Tag: "tag2", Value: ""}}, want: []string{"value", ""}}, {input: Rules{{Tag: "tag", Value: "value"}}, want: []string{"value"}}, {input: Rules{}, want: nil}, } for _, tc := range tcs { got := tc.input.GetValues() if !reflect.DeepEqual(tc.want, got) { t.Fatalf("expected: %v, got: %v", tc.want, got) } } }
explode_data.jsonl/69242
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 200 }
[ 2830, 3393, 26008, 1949, 6227, 1155, 353, 8840, 836, 8, 341, 13158, 7032, 2036, 341, 197, 22427, 22847, 198, 197, 50780, 220, 3056, 917, 198, 197, 630, 3244, 4837, 1669, 3056, 23841, 515, 197, 197, 90, 1355, 25, 22847, 2979, 5668, 25,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGlobalStatus(t *testing.T) { globalRegistry = newRegistry() if err := Register(DriverDef{Name: "foo"}); err != nil { t.Errorf("register returned error: %v", err) } expected := State{Installed: true, Healthy: true} bar := DriverDef{ Name: "bar", Default: true, Priority: Default, Status: func() State { return expected }, } if err := Register(bar); err != nil { t.Errorf("register returned error: %v", err) } if diff := cmp.Diff(Status("bar"), expected); diff != "" { t.Errorf("status mismatch (-want +got):\n%s", diff) } }
explode_data.jsonl/15483
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 209 }
[ 2830, 3393, 11646, 2522, 1155, 353, 8840, 836, 8, 341, 18842, 15603, 284, 501, 15603, 2822, 743, 1848, 1669, 8451, 5432, 5469, 2620, 63121, 25, 330, 7975, 9207, 1215, 1848, 961, 2092, 341, 197, 3244, 13080, 445, 6343, 5927, 1465, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLchown(t *testing.T) { // Use TempDir() to make sure we're on a local file system, // so that the group ids returned by Getgroups will be allowed // on the file. On NFS, the Getgroups groups are // basically useless. f := newFile("TestLchown", t) defer Remove(f.Name()) defer f.Close() dir, err := f.Stat() if err != nil { t.Fatalf("stat %s: %s", f.Name(), err) } linkname := f.Name() + "2" if err := Symlink(f.Name(), linkname); err != nil { if runtime.GOOS == "android" && IsPermission(err) { t.Skip("skipping test on Android; permission error creating symlink") } t.Fatalf("link %s -> %s: %v", f.Name(), linkname, err) } defer Remove(linkname) // Can't change uid unless root, but can try // changing the group id. First try our current group. gid := Getgid() t.Log("gid:", gid) if err = Lchown(linkname, -1, gid); err != nil { t.Fatalf("lchown %s -1 %d: %s", linkname, gid, err) } sys := dir.Sys().(*syscall.Stat_t) checkUidGid(t, linkname, int(sys.Uid), gid) // Then try all the auxiliary groups. groups, err := Getgroups() if err != nil { t.Fatalf("getgroups: %s", err) } t.Log("groups: ", groups) for _, g := range groups { if err = Lchown(linkname, -1, g); err != nil { t.Fatalf("lchown %s -1 %d: %s", linkname, g, err) } checkUidGid(t, linkname, int(sys.Uid), g) // Check that link target's gid is unchanged. checkUidGid(t, f.Name(), int(sys.Uid), int(sys.Gid)) } }
explode_data.jsonl/74927
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 594 }
[ 2830, 3393, 43, 331, 779, 1155, 353, 8840, 836, 8, 341, 197, 322, 5443, 19944, 6184, 368, 311, 1281, 2704, 582, 2299, 389, 264, 2205, 1034, 1849, 345, 197, 322, 773, 429, 279, 1874, 14151, 5927, 553, 2126, 16753, 686, 387, 5420, 198...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestAutoIncIDInRetry(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t;") tk.MustExec("create table t (id int not null auto_increment primary key)") tk.MustExec("set @@tidb_disable_txn_auto_retry = 0") tk.MustExec("begin") tk.MustExec("insert into t values ()") tk.MustExec("insert into t values (),()") tk.MustExec("insert into t values ()") require.NoError(t, failpoint.Enable("github.com/pingcap/tidb/session/mockCommitRetryForAutoIncID", `return(true)`)) tk.MustExec("commit") require.NoError(t, failpoint.Disable("github.com/pingcap/tidb/session/mockCommitRetryForAutoIncID")) tk.MustExec("insert into t values ()") tk.MustQuery(`select * from t`).Check(testkit.Rows("1", "2", "3", "4", "5")) }
explode_data.jsonl/38148
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 317 }
[ 2830, 3393, 13253, 39245, 915, 641, 51560, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 340, 3244, 74, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewWatcherCancel(t *testing.T) { b, tmpPath := backend.NewDefaultTmpBackend() s := newWatchableStore(b, &lease.FakeLessor{}, nil) defer func() { s.store.Close() os.Remove(tmpPath) }() testKey := []byte("foo") testValue := []byte("bar") s.Put(testKey, testValue, lease.NoLease) w := s.NewWatchStream() wt := w.Watch(testKey, nil, 0) if err := w.Cancel(wt); err != nil { t.Error(err) } if s.synced.contains(string(testKey)) { // the key shoud have been deleted t.Errorf("existence = true, want false") } }
explode_data.jsonl/12640
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 221 }
[ 2830, 3393, 3564, 47248, 9269, 1155, 353, 8840, 836, 8, 341, 2233, 11, 4174, 1820, 1669, 19163, 7121, 3675, 35986, 29699, 741, 1903, 1669, 501, 14247, 480, 6093, 1883, 11, 609, 1623, 991, 726, 43, 8309, 22655, 2092, 692, 16867, 2915, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDrainHTTPRequestBody(t *testing.T) { var someTestBody []byte = []byte(`SOME TEST BODY`) req, err := http.NewRequest("POST", "/", bytes.NewReader(someTestBody)) if err != nil { t.Errorf("cannot create new request: %v", err) return } // drain shouldn't affect further reads, do it multiple times and check result drainAndCheckEqual(t, req, someTestBody) drainAndCheckEqual(t, req, someTestBody) drainAndCheckEqual(t, req, someTestBody) drainAndCheckEqual(t, req, someTestBody) }
explode_data.jsonl/42543
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 179 }
[ 2830, 3393, 8847, 466, 9230, 33334, 1155, 353, 8840, 836, 8, 341, 2405, 1045, 2271, 5444, 3056, 3782, 284, 3056, 3782, 5809, 50, 11408, 13602, 68583, 24183, 24395, 11, 1848, 1669, 1758, 75274, 445, 2946, 497, 64657, 5820, 68587, 1141, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSupportedSelectWhere(t *testing.T) { c := newQueryConverter(nil, nil) for sql, expectedJson := range supportedWhereCases { query, _, err := c.ConvertWhereOrderBy(sql) assert.NoError(t, err) actualMap, _ := query.Source() actualJson, _ := json.Marshal(actualMap) assert.Equal(t, expectedJson, string(actualJson), fmt.Sprintf("sql: %s", sql)) } }
explode_data.jsonl/1320
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 139 }
[ 2830, 3393, 34636, 3379, 9064, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 501, 2859, 14920, 27907, 11, 2092, 692, 2023, 5704, 11, 3601, 5014, 1669, 2088, 7248, 9064, 37302, 341, 197, 27274, 11, 8358, 1848, 1669, 272, 36179, 9064, 34605, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestClientOneway(t *testing.T) { caller, service, procedureName := "caller", "MyService", "someMethod" tests := []struct { desc string giveRequestBody envelope.Enveloper // outgoing request body clientOptions []ClientOption expectCall bool // whether outbound.Call is expected wantRequestEnvelope *wire.Envelope // expect EncodeEnveloped(x) wantRequestBody *wire.Value // expect Encode(x) wantError string // whether an error is expected }{ { desc: "happy case", giveRequestBody: fakeEnveloper(wire.Call), clientOptions: []ClientOption{Enveloped}, expectCall: true, wantRequestEnvelope: &wire.Envelope{ Name: procedureName, SeqID: 1, Type: wire.Call, Value: wire.NewValueStruct(wire.Struct{}), }, }, { desc: "happy case without enveloping", giveRequestBody: fakeEnveloper(wire.Call), expectCall: true, wantRequestBody: valueptr(wire.NewValueStruct(wire.Struct{})), }, { desc: "wrong envelope type for request", giveRequestBody: fakeEnveloper(wire.Reply), clientOptions: []ClientOption{Enveloped}, wantError: `failed to encode "thrift" request body for procedure ` + `"MyService::someMethod" of service "MyService": unexpected envelope type: Reply`, }, } for _, tt := range tests { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() proto := thrifttest.NewMockProtocol(mockCtrl) bodyBytes := []byte("irrelevant") if tt.wantRequestEnvelope != nil { proto.EXPECT().EncodeEnveloped(*tt.wantRequestEnvelope, gomock.Any()). Do(func(_ wire.Envelope, w io.Writer) { _, err := w.Write(bodyBytes) require.NoError(t, err, "Write() failed") }).Return(nil) } if tt.wantRequestBody != nil { proto.EXPECT().Encode(*tt.wantRequestBody, gomock.Any()). Do(func(_ wire.Value, w io.Writer) { _, err := w.Write(bodyBytes) require.NoError(t, err, "Write() failed") }).Return(nil) } ctx := context.Background() onewayOutbound := transporttest.NewMockOnewayOutbound(mockCtrl) requestMatcher := transporttest.NewRequestMatcher(t, &transport.Request{ Caller: caller, Service: service, Encoding: Encoding, Procedure: procedure.ToName(service, procedureName), Body: bytes.NewReader(bodyBytes), }) if tt.expectCall { if tt.wantError != "" { onewayOutbound. EXPECT(). CallOneway(ctx, requestMatcher). Return(nil, errors.New(tt.wantError)) } else { onewayOutbound. EXPECT(). CallOneway(ctx, requestMatcher). Return(&successAck{}, nil) } } opts := tt.clientOptions opts = append(opts, Protocol(proto)) c := New(Config{ Service: service, ClientConfig: clientconfig.MultiOutbound(caller, service, transport.Outbounds{ Oneway: onewayOutbound, }), }, opts...) ack, err := c.CallOneway(ctx, tt.giveRequestBody) if tt.wantError != "" { if assert.Error(t, err, "%v: expected failure", tt.desc) { assert.Contains(t, err.Error(), tt.wantError, "%v: error mismatch", tt.desc) } } else { assert.NoError(t, err, "%v: expected success", tt.desc) assert.Equal(t, "success", ack.String()) } } }
explode_data.jsonl/20933
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1378 }
[ 2830, 3393, 2959, 46, 931, 352, 1155, 353, 8840, 836, 8, 341, 1444, 13956, 11, 2473, 11, 10324, 675, 1669, 330, 56126, 497, 330, 5050, 1860, 497, 330, 14689, 3523, 1837, 78216, 1669, 3056, 1235, 341, 197, 41653, 310, 914, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRouter_NewRouter_WithAutoMethodHeadEnabled(t *testing.T) { mainRouter := NewRouter(RouterConfig{ EnableAutoMethodHead: true, }) _ = mainRouter.Register(http.MethodGet, "/with/slash", testHandlerFunc, MatchingOptions{Name: "test_name"}) req, _ := http.NewRequest(http.MethodGet, "/with/slash", nil) getResponse := httptest.NewRecorder() mainRouter.ServeHTTP(getResponse, req) assertEqual(t, http.StatusOK, getResponse.Code) req, _ = http.NewRequest(http.MethodHead, "/with/slash", nil) headResponse := httptest.NewRecorder() mainRouter.ServeHTTP(headResponse, req) assertEqual(t, http.StatusOK, headResponse.Code) }
explode_data.jsonl/31746
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 229 }
[ 2830, 3393, 9523, 39582, 9523, 62, 2354, 13253, 3523, 12346, 5462, 1155, 353, 8840, 836, 8, 341, 36641, 9523, 1669, 1532, 9523, 2785, 2676, 2648, 515, 197, 197, 11084, 13253, 3523, 12346, 25, 830, 345, 197, 8824, 197, 62, 284, 1887, 9...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_PostConfig_MultipleUsers(t *testing.T) { setup(t) defer cleanup(t) userID1 := makeUserID() userID2 := makeUserID() for _, c := range allClients { config1 := c.post(t, userID1, makeConfig()) config2 := c.post(t, userID2, makeConfig()) foundConfig1 := c.get(t, userID1) assert.Equal(t, config1, foundConfig1) foundConfig2 := c.get(t, userID2) assert.Equal(t, config2, foundConfig2) assert.True(t, config2.ID > config1.ID, "%v > %v", config2.ID, config1.ID) } }
explode_data.jsonl/37312
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 208 }
[ 2830, 3393, 66726, 2648, 1245, 12229, 7137, 1155, 353, 8840, 836, 8, 341, 84571, 1155, 340, 16867, 21290, 1155, 692, 19060, 915, 16, 1669, 1281, 36899, 741, 19060, 915, 17, 1669, 1281, 36899, 741, 2023, 8358, 272, 1669, 2088, 678, 47174...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGitService_GetBlob(t *testing.T) { client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/repos/o/r/git/blobs/s", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") testHeader(t, r, "Accept", mediaTypeGraphQLNodeIDPreview) fmt.Fprint(w, `{ "sha": "s", "content": "blob content" }`) }) blob, _, err := client.Git.GetBlob(context.Background(), "o", "r", "s") if err != nil { t.Errorf("Git.GetBlob returned error: %v", err) } want := Blob{ SHA: String("s"), Content: String("blob content"), } if !reflect.DeepEqual(*blob, want) { t.Errorf("Blob.Get returned %+v, want %+v", *blob, want) } }
explode_data.jsonl/11494
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 306 }
[ 2830, 3393, 46562, 1860, 13614, 37985, 1155, 353, 8840, 836, 8, 341, 25291, 11, 59807, 11, 8358, 49304, 1669, 6505, 741, 16867, 49304, 2822, 2109, 2200, 63623, 4283, 68354, 20271, 7382, 60590, 3470, 68164, 2687, 497, 2915, 3622, 1758, 375...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestListServerCerts(t *testing.T) { thisTime := time.Now() nowString := thisTime.Format("2006-01-02 15:04:05 Monday") t.Log("Starting unit test at " + nowString) mockSvc := &mockIAMClient{} _, err := GetServerCerts(mockSvc) if err != nil { t.Fatal(err) } t.Log("Retrieved the server certificates") }
explode_data.jsonl/72889
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 145 }
[ 2830, 3393, 852, 5475, 34, 15546, 1155, 353, 8840, 836, 8, 341, 262, 419, 1462, 1669, 882, 13244, 741, 262, 1431, 703, 1669, 419, 1462, 9978, 445, 17, 15, 15, 21, 12, 15, 16, 12, 15, 17, 220, 16, 20, 25, 15, 19, 25, 15, 20, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFixExamplesHeaders(t *testing.T) { p := &tfMarkdownParser{} codeFence := "```" t.Run("WithCodeFences", func(t *testing.T) { markdown := ` # digitalocean\_cdn Provides a DigitalOcean CDN Endpoint resource for use with Spaces. ## Example Usage #### Basic Example ` + codeFence + `typescript // Some code. ` + codeFence + ` ## Argument Reference` var processedMarkdown string groups := splitGroupLines(markdown, "## ") for _, lines := range groups { p.fixExampleTitles(lines) for _, line := range lines { processedMarkdown += line } } assert.NotContains(t, processedMarkdown, "#### Basic Example") assert.Contains(t, processedMarkdown, "### Basic Example") }) t.Run("WithoutCodeFences", func(t *testing.T) { markdown := ` # digitalocean\_cdn Provides a DigitalOcean CDN Endpoint resource for use with Spaces. ## Example Usage #### Basic Example Misleading example title without any actual code fences. We should not modify the title. ## Argument Reference` var processedMarkdown string groups := splitGroupLines(markdown, "## ") for _, lines := range groups { p.fixExampleTitles(lines) for _, line := range lines { processedMarkdown += line } } assert.Contains(t, processedMarkdown, "#### Basic Example") }) }
explode_data.jsonl/33536
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 442 }
[ 2830, 3393, 25958, 40381, 10574, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 609, 8935, 68005, 6570, 31483, 43343, 37, 763, 1669, 330, 73594, 698, 3244, 16708, 445, 2354, 2078, 37, 2380, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 210...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMakeXRegistryConfigHeaderGetCredentialsRoundtrip(t *testing.T) { for _, tc := range []struct { name string fileContents string username, password string expectedOverride *types.DockerAuthConfig expectedFileValues map[string]types.DockerAuthConfig }{ { name: "no data", fileContents: "", username: "", password: "", expectedOverride: nil, expectedFileValues: nil, }, { name: "file data", fileContents: largeAuthFile, username: "", password: "", expectedOverride: nil, expectedFileValues: largeAuthFileValues, }, { name: "file data + override", fileContents: largeAuthFile, username: "override-user", password: "override-pass", expectedOverride: &types.DockerAuthConfig{Username: "override-user", Password: "override-pass"}, expectedFileValues: largeAuthFileValues, }, } { sys, cleanup := systemContextForAuthFile(t, tc.fileContents) defer cleanup() headers, err := MakeXRegistryConfigHeader(sys, tc.username, tc.password) require.NoError(t, err) req, err := http.NewRequest(http.MethodPost, "/", nil) require.NoError(t, err, tc.name) for k, v := range headers { req.Header.Set(k, v) } override, resPath, err := GetCredentials(req) require.NoError(t, err, tc.name) defer RemoveAuthfile(resPath) if tc.expectedOverride == nil { assert.Nil(t, override, tc.name) } else { require.NotNil(t, override, tc.name) assert.Equal(t, *tc.expectedOverride, *override, tc.name) } for key, expectedAuth := range tc.expectedFileValues { auth, err := config.GetCredentials(&types.SystemContext{AuthFilePath: resPath}, key) require.NoError(t, err, tc.name) assert.Equal(t, expectedAuth, auth, "%s, key %s", tc.name, key) } } }
explode_data.jsonl/33430
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 819 }
[ 2830, 3393, 8078, 55, 15603, 2648, 4047, 1949, 27025, 27497, 32981, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 11609, 2290, 914, 198, 197, 17661, 14803, 981, 914, 198, 197, 72358, 11, 3552, 914, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestServer_Push_RejectRecursivePush(t *testing.T) { // Expect two requests, but might get three if there's a bug and the second push succeeds. errc := make(chan error, 3) handler := func(w http.ResponseWriter, r *http.Request) error { baseURL := "https://" + r.Host switch r.URL.Path { case "/": if err := w.(http.Pusher).Push(baseURL+"/push1", nil); err != nil { return fmt.Errorf("first Push()=%v, want nil", err) } return nil case "/push1": if got, want := w.(http.Pusher).Push(baseURL+"/push2", nil), ErrRecursivePush; got != want { return fmt.Errorf("Push()=%v, want %v", got, want) } return nil default: return fmt.Errorf("unexpected path: %q", r.URL.Path) } } st := newServerTester(t, func(w http.ResponseWriter, r *http.Request) { errc <- handler(w, r) }) defer st.Close() st.greet() getSlash(st) if err := <-errc; err != nil { t.Errorf("First request failed: %v", err) } if err := <-errc; err != nil { t.Errorf("Second request failed: %v", err) } }
explode_data.jsonl/1970
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 415 }
[ 2830, 3393, 5475, 1088, 1116, 50693, 583, 78542, 16644, 1155, 353, 8840, 836, 8, 341, 197, 322, 32085, 1378, 7388, 11, 714, 2578, 633, 2326, 421, 1052, 594, 264, 9876, 323, 279, 2086, 4484, 50081, 624, 9859, 66, 1669, 1281, 35190, 146...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestIPSetList(t *testing.T) { // NOTE: IPv4-in-IPv6 addresses are represented as IPv4 in its string value s, err := ParseIPSet("3.0.0.0", "1.0.0.0", "2.0.0.0", "::ffff:1.2.3.4") if err != nil { t.Errorf("error parsing IPSet: %v", err) } l := s.StringSlice() sort.Strings(l) if !reflect.DeepEqual(l, []string{"1.0.0.0", "1.2.3.4", "2.0.0.0", "3.0.0.0"}) { t.Errorf("List gave unexpected result: %#v", l) } }
explode_data.jsonl/15552
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 208 }
[ 2830, 3393, 3298, 1649, 852, 1155, 353, 8840, 836, 8, 341, 197, 322, 16743, 25, 31560, 19, 3419, 12, 58056, 21, 14230, 525, 15251, 438, 31560, 19, 304, 1181, 914, 897, 198, 1903, 11, 1848, 1669, 14775, 3298, 1649, 445, 18, 13, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFixSetTiDBSnapshotTS(t *testing.T) { store, clean := realtikvtest.CreateMockStoreAndSetup(t) defer clean() tk := testkit.NewTestKit(t, store) safePointName := "tikv_gc_safe_point" safePointValue := "20160102-15:04:05 -0700" safePointComment := "All versions after safe point can be accessed. (DO NOT EDIT)" updateSafePoint := fmt.Sprintf(`INSERT INTO mysql.tidb VALUES ('%[1]s', '%[2]s', '%[3]s') ON DUPLICATE KEY UPDATE variable_value = '%[2]s', comment = '%[3]s'`, safePointName, safePointValue, safePointComment) tk.MustExec(updateSafePoint) tk.MustExec("create database t123") time.Sleep(time.Second) ts := time.Now().Format("2006-1-2 15:04:05") time.Sleep(time.Second) tk.MustExec("drop database t123") tk.MustMatchErrMsg("use t123", ".*Unknown database.*") tk.MustExec(fmt.Sprintf("set @@tidb_snapshot='%s'", ts)) tk.MustExec("use t123") // update any session variable and assert whether infoschema is changed tk.MustExec("SET SESSION sql_mode = 'STRICT_TRANS_TABLES,NO_AUTO_CREATE_USER';") tk.MustExec("use t123") }
explode_data.jsonl/5718
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 405 }
[ 2830, 3393, 25958, 1649, 45351, 3506, 15009, 9951, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1931, 83, 1579, 85, 1944, 7251, 11571, 6093, 3036, 21821, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_write_array_of_interface_in_struct(t *testing.T) { should := require.New(t) type TestObject struct { Field []interface{} Field2 string } val := TestObject{[]interface{}{1, 2}, ""} str, err := jsoner.DefaultAPI().MarshalToString(val) should.Nil(err) should.Contains(str, `"Field":[1,2]`) should.Contains(str, `"Field2":""`) }
explode_data.jsonl/57881
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 140 }
[ 2830, 3393, 9165, 3858, 3575, 20546, 1243, 15126, 1155, 353, 8840, 836, 8, 341, 197, 5445, 1669, 1373, 7121, 1155, 340, 13158, 3393, 1190, 2036, 341, 197, 94478, 220, 3056, 4970, 16094, 197, 94478, 17, 914, 198, 197, 532, 19302, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStack(t *testing.T) { t.Log("具有getMin 功能的栈表组测试") var tests = []struct { randAmount int // 随机数数量 randMax int // 随机数最大值 } { { 30, 100, }, { 50, 200, }, { 500, 1000, }, } for i, test := range tests { t.Logf("正在进行第%d组测试, 共%d组", i+1, len(tests)) stack_1 := stack1.New() // 方案一创建空栈 stack_2 := stack2.New() // 方案二创建空栈 rand.Seed(time.Now().UnixNano()) t.Logf("生成一组随机数,并依次插入栈中,随机数个数为 %d,随机数最大值为 %d", test.randAmount, test.randMax) var randNums []int // 存放随机数的切片 for i := 0; i < test.randAmount; i++ { randNum := rand.Intn(test.randMax) randNums = append(randNums, randNum) stack_1.Push(randNum) stack_2.Push(randNum) } t.Logf("随机数插入顺序为:%v", randNums) sort.Ints(randNums) t.Logf("随机数从小到大排列为:%v", randNums) min := randNums[0] // 随机数的最小值 getMin1, ok1 := stack_1.GetMin() getMin2, ok2 := stack_2.GetMin() if !ok1 { t.Fatal("栈无数据!") } else if getMin1 != min { t.Fatalf("方案一栈GetMin()返回值 %d 与随机数最小值 %d不匹配", getMin1, min) } else { t.Logf("方案一栈GetMin()返回值 %d 与随机数最小值 %d匹配", getMin1, min) } if !ok2 { t.Fatal("栈无数据!") } else if getMin2 != min { t.Fatalf("方案二栈GetMin()返回值 %d 与随机数最小值 %d不匹配", getMin2, min) } else { t.Logf("方案二栈GetMin()返回值 %d 与随机数最小值 %d匹配", getMin2, min) } } }
explode_data.jsonl/66063
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1018 }
[ 2830, 3393, 4336, 1155, 353, 8840, 836, 8, 341, 3244, 5247, 445, 100629, 455, 6217, 54599, 96808, 9370, 103642, 20742, 40027, 81705, 1138, 2405, 7032, 284, 3056, 1235, 341, 197, 7000, 437, 10093, 526, 220, 442, 18137, 248, 237, 32648, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestStepsOnExitTimeout(t *testing.T) { cancel, controller := newController() defer cancel() wfcset := controller.wfclientset.ArgoprojV1alpha1().Workflows("") // Test list expansion ctx := context.Background() wf := unmarshalWF(onExitTimeout) wf, err := wfcset.Create(ctx, wf, metav1.CreateOptions{}) assert.Nil(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate(ctx) woc = newWorkflowOperationCtx(woc.wf, controller) woc.operate(ctx) wf, err = wfcset.Get(ctx, wf.ObjectMeta.Name, metav1.GetOptions{}) assert.Nil(t, err) onExitNodeIsPresent := false for _, node := range wf.Status.Nodes { if strings.Contains(node.Name, "onExit") && node.Phase == wfv1.NodePending { onExitNodeIsPresent = true break } } assert.True(t, onExitNodeIsPresent) }
explode_data.jsonl/70995
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 317 }
[ 2830, 3393, 33951, 1925, 15339, 7636, 1155, 353, 8840, 836, 8, 341, 84441, 11, 6461, 1669, 501, 2051, 741, 16867, 9121, 741, 6692, 8316, 746, 1669, 6461, 1418, 69, 2972, 746, 18979, 45926, 73, 53, 16, 7141, 16, 1005, 6776, 38140, 445,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestPaginatedVotesQuery(t *testing.T) { app := simapp.Setup(t, false) ctx := app.BaseApp.NewContext(false, tmproto.Header{}) legacyQuerierCdc := app.LegacyAmino() proposal := v1beta2.Proposal{ Id: 100, Status: v1beta2.StatusVotingPeriod, } app.GovKeeper.SetProposal(ctx, proposal) votes := make([]v1beta2.Vote, 20) random := rand.New(rand.NewSource(time.Now().UnixNano())) addrMap := make(map[string]struct{}) genAddr := func() string { addr := make(sdk.AccAddress, 20) for { random.Read(addr) addrStr := addr.String() if _, ok := addrMap[addrStr]; !ok { addrMap[addrStr] = struct{}{} return addrStr } } } for i := range votes { vote := v1beta2.Vote{ ProposalId: proposal.Id, Voter: genAddr(), Options: v1beta2.NewNonSplitVoteOption(v1beta2.OptionYes), } votes[i] = vote app.GovKeeper.SetVote(ctx, vote) } querier := keeper.NewQuerier(app.GovKeeper, legacyQuerierCdc) // keeper preserves consistent order for each query, but this is not the insertion order all := getQueriedVotes(t, ctx, legacyQuerierCdc, querier, proposal.Id, 1, 0) require.Equal(t, len(all), len(votes)) type testCase struct { description string page int limit int votes []v1beta2.Vote } for _, tc := range []testCase{ { description: "SkipAll", page: 2, limit: len(all), }, { description: "GetFirstChunk", page: 1, limit: 10, votes: all[:10], }, { description: "GetSecondsChunk", page: 2, limit: 10, votes: all[10:], }, { description: "InvalidPage", page: -1, }, } { tc := tc t.Run(tc.description, func(t *testing.T) { votes := getQueriedVotes(t, ctx, legacyQuerierCdc, querier, proposal.Id, tc.page, tc.limit) require.Equal(t, len(tc.votes), len(votes)) for i := range votes { require.Equal(t, tc.votes[i], votes[i]) } }) } }
explode_data.jsonl/60180
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 907 }
[ 2830, 3393, 47712, 15479, 75535, 2859, 1155, 353, 8840, 836, 8, 341, 28236, 1669, 1643, 676, 39820, 1155, 11, 895, 340, 20985, 1669, 906, 13018, 2164, 7121, 1972, 3576, 11, 17333, 15110, 15753, 37790, 197, 39884, 2183, 261, 1268, 34, 76...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNasTypeGetSetPDUSESSIONMODIFICATIONCOMPLETEMessageIdentityMessageType(t *testing.T) { a := nasType.NewPDUSESSIONMODIFICATIONCOMPLETEMessageIdentity() for _, table := range nasTypePDUSESSIONMODIFICATIONCOMPLETEMessageIdentityMessageTypeTable { a.SetMessageType(table.in) assert.Equal(t, table.out, a.GetMessageType()) } }
explode_data.jsonl/48474
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 45, 300, 929, 1949, 1649, 47, 21547, 6302, 26459, 22872, 81969, 2052, 18558, 82107, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 17141, 929, 7121, 47, 21547, 6302, 26459, 22872, 81969, 2052, 18558, 741, 2023, 8358, 1965, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestColor16(t *testing.T) { buf := forceOpenColorRender() defer resetColorRender() at := assert.New(t) at.True(Bold.IsValid()) r := Bold.Render("text") at.Equal("\x1b[1mtext\x1b[0m", r) r = LightYellow.Text("text") at.Equal("\x1b[93mtext\x1b[0m", r) r = LightWhite.Sprint("text") at.Equal("\x1b[97mtext\x1b[0m", r) r = White.Render("test", "spaces") at.Equal("\x1b[37mtestspaces\x1b[0m", r) r = Black.Renderln("test", "spaces") at.Equal("\x1b[30mtest spaces\x1b[0m", r) str := Red.Sprintf("A %s", "MSG") at.Equal("\x1b[31mA MSG\x1b[0m", str) // Color.Print FgGray.Print("MSG") str = buf.String() at.Equal("\x1b[90mMSG\x1b[0m", str) buf.Reset() // Color.Printf BgGray.Printf("A %s", "MSG") str = buf.String() at.Equal("\x1b[100mA MSG\x1b[0m", str) buf.Reset() // Color.Println LightMagenta.Println("MSG") str = buf.String() at.Equal("\x1b[95mMSG\x1b[0m\n", str) buf.Reset() LightMagenta.Println() str = buf.String() at.Equal("\n", str) buf.Reset() LightCyan.Print("msg") LightRed.Printf("m%s", "sg") LightGreen.Println("msg") str = buf.String() at.Equal("\x1b[96mmsg\x1b[0m\x1b[91mmsg\x1b[0m\x1b[92mmsg\x1b[0m\n", str) buf.Reset() // Color.Darken blue := LightBlue.Darken() at.Equal(94, int(LightBlue)) at.Equal(34, int(blue)) c := Color(120).Darken() at.Equal(120, int(c)) // Color.Light lightCyan := Cyan.Light() at.Equal(36, int(Cyan)) at.Equal(96, int(lightCyan)) c = Bit4(120).Light() at.Equal(120, int(c)) // Colors vars _, ok := FgColors["red"] at.True(ok) _, ok = ExFgColors["lightRed"] at.True(ok) _, ok = BgColors["red"] at.True(ok) _, ok = ExBgColors["lightRed"] at.True(ok) }
explode_data.jsonl/7004
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 843 }
[ 2830, 3393, 1636, 16, 21, 1155, 353, 8840, 836, 8, 341, 26398, 1669, 5344, 5002, 1636, 6750, 741, 16867, 7585, 1636, 6750, 741, 35447, 1669, 2060, 7121, 1155, 692, 35447, 32443, 5349, 813, 28992, 2398, 7000, 1669, 46002, 27386, 445, 131...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Enrich_Models(t *testing.T) { data := ` models: Model1: field1: Model3 field2: Model2 ` old, err := unmarshalSpec([]byte(data)) assert.Equal(t, err, nil) enrichSpec(old) ver := &old.Versions[0] assert.Equal(t, ver.Models[0].Version, ver) }
explode_data.jsonl/79857
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 119 }
[ 2830, 3393, 62, 1702, 13851, 21626, 82, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 22074, 6507, 510, 220, 4903, 16, 510, 262, 2070, 16, 25, 4903, 18, 198, 262, 2070, 17, 25, 4903, 17, 198, 3989, 61828, 11, 1848, 1669, 650, 27121, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestArenaWebsocketApi(t *testing.T) { web := setupTestWeb(t) server, wsUrl := web.startTestServer() defer server.Close() conn, _, err := gorillawebsocket.DefaultDialer.Dial(wsUrl+"/api/arena/websocket", nil) assert.Nil(t, err) defer conn.Close() ws := websocket.NewTestWebsocket(conn) // Should get a few status updates right after connection. readWebsocketType(t, ws, "matchTiming") readWebsocketType(t, ws, "matchLoad") readWebsocketType(t, ws, "matchTime") }
explode_data.jsonl/75532
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 187 }
[ 2830, 3393, 93937, 5981, 9556, 6563, 1155, 353, 8840, 836, 8, 341, 97250, 1669, 6505, 2271, 5981, 1155, 692, 41057, 11, 17624, 2864, 1669, 3482, 4962, 2271, 5475, 741, 16867, 3538, 10421, 741, 32917, 11, 8358, 1848, 1669, 45198, 483, 67...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrintOrg(t *testing.T) { factory.InitFactories(nil) config := configtxgentest.LoadTopLevel() assert.NoError(t, doPrintOrg(config, genesisconfig.SampleOrgName), "Good org to print") err := doPrintOrg(config, genesisconfig.SampleOrgName+".wrong") assert.Error(t, err, "Bad org name") assert.Regexp(t, "organization [^ ]* not found", err.Error()) config.Organizations[0] = &genesisconfig.Organization{Name: "FakeOrg", ID: "FakeOrg"} err = doPrintOrg(config, "FakeOrg") assert.Error(t, err, "Fake org") assert.Regexp(t, "bad org definition", err.Error()) }
explode_data.jsonl/14595
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 202 }
[ 2830, 3393, 8994, 42437, 1155, 353, 8840, 836, 8, 341, 1166, 2919, 26849, 17417, 2433, 27907, 340, 25873, 1669, 2193, 3998, 15772, 477, 13969, 5366, 4449, 2822, 6948, 35699, 1155, 11, 653, 8994, 42437, 8754, 11, 59366, 1676, 76266, 42437,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSpanProcessor_setStatusCode(t *testing.T) { factory := NewFactory() cfg := factory.CreateDefaultConfig() oCfg := cfg.(*Config) oCfg.SetStatus = &Status{ Code: "Error", Description: "Set custom error message", } tp, err := factory.CreateTracesProcessor(context.Background(), componenttest.NewNopProcessorCreateSettings(), oCfg, consumertest.NewNop()) require.Nil(t, err) require.NotNil(t, tp) td := generateTraceDataSetStatus(pdata.StatusCodeUnset, "foobar", nil) assert.NoError(t, tp.ConsumeTraces(context.Background(), td)) assert.EqualValues(t, generateTraceDataSetStatus(pdata.StatusCodeError, "Set custom error message", nil), td) }
explode_data.jsonl/51045
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 238 }
[ 2830, 3393, 12485, 22946, 2602, 15872, 1155, 353, 8840, 836, 8, 341, 1166, 2919, 1669, 1532, 4153, 741, 50286, 1669, 8633, 7251, 3675, 2648, 741, 22229, 42467, 1669, 13286, 41399, 2648, 340, 22229, 42467, 4202, 2522, 284, 609, 2522, 515, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseFlagsGood(t *testing.T) { c, err := parseFlags([]string{ "-dataDir=.", "-logLevel=INFO", "-serverAddress=127.0.0.1", "-serverPort=8081", "-socketPath=/tmp/agent.sock", "-trustBundle=conf/agent/dummy_root_ca.crt", "-trustDomain=example.org", }) require.NoError(t, err) assert.Equal(t, c.DataDir, ".") assert.Equal(t, c.LogLevel, "INFO") assert.Equal(t, c.ServerAddress, "127.0.0.1") assert.Equal(t, c.ServerPort, 8081) assert.Equal(t, c.SocketPath, "/tmp/agent.sock") assert.Equal(t, c.TrustBundlePath, "conf/agent/dummy_root_ca.crt") assert.Equal(t, c.TrustDomain, "example.org") }
explode_data.jsonl/69509
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 276 }
[ 2830, 3393, 14463, 9195, 15216, 1155, 353, 8840, 836, 8, 341, 1444, 11, 1848, 1669, 4715, 9195, 10556, 917, 515, 197, 197, 34294, 691, 6184, 28, 10346, 197, 197, 34294, 839, 4449, 28, 6637, 756, 197, 197, 34294, 4030, 4286, 28, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContextQueryParam(t *testing.T) { q := make(url.Values) q.Set("name", "Jon Snow") q.Set("email", "jon@labstack.com") req := test.NewRequest(GET, "/?"+q.Encode(), nil) e := New() c := e.NewContext(req, nil) // QueryParam assert.Equal(t, "Jon Snow", c.QueryParam("name")) assert.Equal(t, "jon@labstack.com", c.QueryParam("email")) // QueryParams assert.Equal(t, map[string][]string{ "name": []string{"Jon Snow"}, "email": []string{"jon@labstack.com"}, }, c.QueryParams()) }
explode_data.jsonl/35874
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 202 }
[ 2830, 3393, 1972, 84085, 1155, 353, 8840, 836, 8, 341, 18534, 1669, 1281, 6522, 35145, 340, 18534, 4202, 445, 606, 497, 330, 37152, 18901, 1138, 18534, 4202, 445, 2332, 497, 330, 34165, 31, 14380, 7693, 905, 1138, 24395, 1669, 1273, 752...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMakeMultipartUserData(t *testing.T) { populatedUserData, err := newUserData(userdata.Options{ Directive: userdata.ShellScript + "/bin/bash", Content: "echo foo", }) require.NoError(t, err) fileOne := "1.txt" fileTwo := "2.txt" multipart, err := makeMultipartUserData(map[string]*userData{}) require.NoError(t, err) assert.NotEmpty(t, multipart) multipart, err = makeMultipartUserData(map[string]*userData{ fileOne: populatedUserData, fileTwo: populatedUserData, }) require.NoError(t, err) assert.Contains(t, multipart, fileOne) assert.Contains(t, multipart, fileTwo) assert.Equal(t, 2, strings.Count(multipart, string(populatedUserData.Directive))) assert.Equal(t, 2, strings.Count(multipart, populatedUserData.Content)) }
explode_data.jsonl/3811
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 286 }
[ 2830, 3393, 8078, 44, 18204, 39485, 1155, 353, 8840, 836, 8, 341, 74813, 7757, 39485, 11, 1848, 1669, 501, 39485, 4277, 691, 22179, 515, 197, 10957, 1226, 533, 25, 66874, 10849, 613, 5910, 488, 3521, 6863, 17148, 756, 197, 197, 2762, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestShouldReturnNormalizedValueForTagFilterOfApplicationConfigWhenStateFuncIsCalledAndValueCanBeNormalized(t *testing.T) { resourceHandle := NewApplicationConfigResourceHandle() schema := resourceHandle.MetaData().Schema expectedValue := expressionEntityTypeDestEqValue newValue := validTagFilter require.Equal(t, expectedValue, schema[ApplicationConfigFieldTagFilter].StateFunc(newValue)) }
explode_data.jsonl/64929
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 107 }
[ 2830, 3393, 14996, 5598, 79082, 1130, 2461, 5668, 5632, 2124, 4988, 2648, 4498, 1397, 9626, 3872, 20960, 3036, 1130, 69585, 79082, 1155, 353, 8840, 836, 8, 341, 50346, 6999, 1669, 1532, 4988, 2648, 4783, 6999, 741, 1903, 3416, 1669, 5101,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIterator(t *testing.T) { // 实现带快照的迭代器 vector := NewVector() iter := NewIterator(vector) for i := 0; i < 20; i++ { vector.Add(i) } iter.Init() vector.Remove(0) vector.Remove(11) vector.Add(100) for iter.hasNext() { fmt.Printf("%v\t", iter.currentItem()) } fmt.Println() fmt.Println("========================================") iter.Init() for iter.hasNext() { fmt.Printf("%v\t", iter.currentItem()) } fmt.Println() }
explode_data.jsonl/5876
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 203 }
[ 2830, 3393, 11951, 1155, 353, 8840, 836, 8, 341, 197, 322, 92293, 46451, 99278, 99234, 99331, 9370, 113862, 31548, 198, 20365, 1669, 1532, 3781, 741, 79924, 1669, 1532, 11951, 19066, 340, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 17,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestShutdown(t *testing.T) { ln := fasthttputil.NewInmemoryListener() h := func(ctx *RequestCtx) { time.Sleep(time.Millisecond * 500) ctx.Success("aaa/bbb", []byte("real response")) } s := &Server{ Handler: h, } serveCh := make(chan struct{}) go func() { if err := s.Serve(ln); err != nil { t.Fatalf("unexepcted error: %s", err) } _, err := ln.Dial() if err == nil { t.Fatalf("server is still listening") } serveCh <- struct{}{} }() clientCh := make(chan struct{}) go func() { conn, err := ln.Dial() if err != nil { t.Fatalf("unexepcted error: %s", err) } if _, err = conn.Write([]byte("GET / HTTP/1.1\r\nHost: google.com\r\n\r\n")); err != nil { t.Fatalf("unexpected error: %s", err) } br := bufio.NewReader(conn) verifyResponse(t, br, StatusOK, "aaa/bbb", "real response") clientCh <- struct{}{} }() time.Sleep(time.Millisecond * 100) shutdownCh := make(chan struct{}) go func() { if err := s.Shutdown(); err != nil { t.Fatalf("unexepcted error: %s", err) } shutdownCh <- struct{}{} }() done := 0 for { select { case <-time.After(time.Second): t.Fatalf("shutdown took too long") case <-serveCh: done++ case <-clientCh: done++ case <-shutdownCh: done++ } if done == 3 { return } } }
explode_data.jsonl/73314
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 586 }
[ 2830, 3393, 62004, 1155, 353, 8840, 836, 8, 341, 197, 2261, 1669, 4937, 96336, 628, 321, 7121, 641, 17269, 2743, 741, 9598, 1669, 2915, 7502, 353, 1900, 23684, 8, 341, 197, 21957, 31586, 9730, 71482, 353, 220, 20, 15, 15, 340, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetFullName(t *testing.T) { name, err := GetFullContainerName("testing") if err != nil { t.Fatal(err) } if name != "/testing" { t.Fatalf("Expected /testing got %s", name) } if _, err := GetFullContainerName(""); err == nil { t.Fatal("Error should not be nil") } }
explode_data.jsonl/21111
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 112 }
[ 2830, 3393, 1949, 36217, 1155, 353, 8840, 836, 8, 341, 11609, 11, 1848, 1669, 2126, 9432, 4502, 675, 445, 8840, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 743, 829, 961, 3521, 8840, 1, 341, 197, 3244, 30...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetBase(t *testing.T) { fhExec := &elf.FileHeader{ Type: elf.ET_EXEC, } fhRel := &elf.FileHeader{ Type: elf.ET_REL, } fhDyn := &elf.FileHeader{ Type: elf.ET_DYN, } lsOffset := &elf.ProgHeader{ Vaddr: 0x400000, Off: 0x200000, } kernelHeader := &elf.ProgHeader{ Vaddr: 0xffffffff81000000, } kernelAslrHeader := &elf.ProgHeader{ Vaddr: 0xffffffff80200000, Off: 0x1000, } ppc64KernelHeader := &elf.ProgHeader{ Vaddr: 0xc000000000000000, } testcases := []struct { label string fh *elf.FileHeader loadSegment *elf.ProgHeader stextOffset *uint64 start, limit, offset uint64 want uint64 wanterr bool }{ {"exec", fhExec, nil, nil, 0x400000, 0, 0, 0, false}, {"exec offset", fhExec, lsOffset, nil, 0x400000, 0x800000, 0, 0x200000, false}, {"exec offset 2", fhExec, lsOffset, nil, 0x200000, 0x600000, 0, 0, false}, {"exec nomap", fhExec, nil, nil, 0, 0, 0, 0, false}, {"exec kernel", fhExec, kernelHeader, uint64p(0xffffffff81000198), 0xffffffff82000198, 0xffffffff83000198, 0, 0x1000000, false}, {"exec kernel", fhExec, kernelHeader, uint64p(0xffffffff810002b8), 0xffffffff81000000, 0xffffffffa0000000, 0x0, 0x0, false}, {"exec kernel ASLR", fhExec, kernelHeader, uint64p(0xffffffff810002b8), 0xffffffff81000000, 0xffffffffa0000000, 0xffffffff81000000, 0x0, false}, // TODO(aalexand): Figure out where this test case exactly comes from and // whether it's still relevant. {"exec kernel ASLR 2", fhExec, kernelAslrHeader, nil, 0xffffffff83e00000, 0xfffffffffc3fffff, 0x3c00000, 0x3c00000, false}, {"exec PPC64 kernel", fhExec, ppc64KernelHeader, uint64p(0xc000000000000000), 0xc000000000000000, 0xd00000001a730000, 0x0, 0x0, false}, {"exec chromeos kernel", fhExec, kernelHeader, uint64p(0xffffffff81000198), 0, 0x10197, 0, 0x7efffe68, false}, {"exec chromeos kernel 2", fhExec, kernelHeader, uint64p(0xffffffff81000198), 0, 0x10198, 0, 0x7efffe68, false}, {"exec chromeos kernel 3", fhExec, kernelHeader, uint64p(0xffffffff81000198), 0x198, 0x100000, 0, 0x7f000000, false}, {"exec chromeos kernel 4", fhExec, kernelHeader, uint64p(0xffffffff81200198), 0x198, 0x100000, 0, 0x7ee00000, false}, {"exec chromeos kernel unremapped", fhExec, kernelHeader, uint64p(0xffffffff810001c8), 0xffffffff834001c8, 0xffffffffc0000000, 0xffffffff834001c8, 0x2400000, false}, {"dyn", fhDyn, nil, nil, 0x200000, 0x300000, 0, 0x200000, false}, {"dyn map", fhDyn, lsOffset, nil, 0x0, 0x300000, 0, 0xFFFFFFFFFFE00000, false}, {"dyn nomap", fhDyn, nil, nil, 0x0, 0x0, 0, 0, false}, {"dyn map+offset", fhDyn, lsOffset, nil, 0x900000, 0xa00000, 0x200000, 0x500000, false}, {"rel", fhRel, nil, nil, 0x2000000, 0x3000000, 0, 0x2000000, false}, {"rel nomap", fhRel, nil, nil, 0x0, ^uint64(0), 0, 0, false}, {"rel offset", fhRel, nil, nil, 0x100000, 0x200000, 0x1, 0, true}, } for _, tc := range testcases { base, err := GetBase(tc.fh, tc.loadSegment, tc.stextOffset, tc.start, tc.limit, tc.offset) if err != nil { if !tc.wanterr { t.Errorf("%s: want no error, got %v", tc.label, err) } continue } if tc.wanterr { t.Errorf("%s: want error, got nil", tc.label) continue } if base != tc.want { t.Errorf("%s: want 0x%x, got 0x%x", tc.label, tc.want, base) } } }
explode_data.jsonl/65949
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1502 }
[ 2830, 3393, 1949, 3978, 1155, 353, 8840, 836, 8, 1476, 1166, 71, 10216, 1669, 609, 490, 8576, 4047, 515, 197, 27725, 25, 40745, 13, 1348, 38235, 345, 197, 532, 1166, 71, 6740, 1669, 609, 490, 8576, 4047, 515, 197, 27725, 25, 40745, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestMSI(t *testing.T) { val := map[string]interface{}(map[string]interface{}{"name": "Tyler"}) m := map[string]interface{}{"value": val, "nothing": nil} assert.Equal(t, val, New(m).Get("value").MSI()) assert.Equal(t, val, New(m).Get("value").MustMSI()) assert.Equal(t, map[string]interface{}(nil), New(m).Get("nothing").MSI()) assert.Equal(t, val, New(m).Get("nothing").MSI(map[string]interface{}{"name": "Tyler"})) assert.Panics(t, func() { New(m).Get("age").MustMSI() }) }
explode_data.jsonl/23384
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 203 }
[ 2830, 3393, 4826, 40, 1155, 353, 8840, 836, 8, 1476, 19302, 1669, 2415, 14032, 31344, 6257, 7, 2186, 14032, 31344, 6257, 4913, 606, 788, 330, 99124, 23625, 2109, 1669, 2415, 14032, 31344, 6257, 4913, 957, 788, 1044, 11, 330, 41212, 788,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDetect(t *testing.T) { md := &mockMetadata{} md.On("Hostname").Return("hostname", nil) md.On("OSType").Return("darwin", nil) detector := &Detector{provider: md, logger: zap.NewNop()} res, schemaURL, err := detector.Detect(context.Background()) require.NoError(t, err) assert.Equal(t, conventions.SchemaURL, schemaURL) md.AssertExpectations(t) res.Attributes().Sort() expected := internal.NewResource(map[string]interface{}{ conventions.AttributeHostName: "hostname", conventions.AttributeOSType: "darwin", }) expected.Attributes().Sort() assert.Equal(t, expected, res) }
explode_data.jsonl/32915
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 216 }
[ 2830, 3393, 57193, 1155, 353, 8840, 836, 8, 341, 84374, 1669, 609, 16712, 14610, 16094, 84374, 8071, 445, 88839, 1827, 5598, 445, 27806, 497, 2092, 340, 84374, 8071, 445, 4233, 499, 1827, 5598, 445, 98765, 497, 2092, 692, 2698, 295, 125...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCalculateSeek_SeekEnd_Zero(t *testing.T) { finalOffset, err := CalculateSeek(11, 0, os.SEEK_END, 100) log.PanicIf(err) if finalOffset != 100 { t.Fatalf("Offset not correct: (%d)", finalOffset) } }
explode_data.jsonl/45048
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 87 }
[ 2830, 3393, 47866, 39350, 26920, 1225, 3727, 97672, 1155, 353, 8840, 836, 8, 341, 14213, 6446, 11, 1848, 1669, 20517, 39350, 7, 16, 16, 11, 220, 15, 11, 2643, 808, 32262, 10898, 11, 220, 16, 15, 15, 340, 6725, 1069, 31270, 2679, 396...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStockCountLinesEndpoint_GetPrimary(t *testing.T) { var want types.GUID n := &StockCountLines{ID: &want} if got := n.GetPrimary(); !reflect.DeepEqual(*got, want) { t.Errorf("StockCountLinesEndpoint.GetPrimary() failed, got: %v, want: %v", *got, want) } }
explode_data.jsonl/22763
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 104 }
[ 2830, 3393, 19369, 2507, 16794, 27380, 13614, 15972, 1155, 353, 8840, 836, 8, 341, 2405, 1366, 4494, 1224, 6463, 198, 9038, 1669, 609, 19369, 2507, 16794, 90, 915, 25, 609, 52657, 630, 743, 2684, 1669, 308, 2234, 15972, 2129, 753, 34913...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRateLimiting_DefaultLessThanOverride(t *testing.T) { framework. NewTest(t). // TODO(https://github.com/istio/istio/issues/15686) deflake and remove label Label(label.Flaky). RequiresEnvironment(environment.Kube). Run(func(ctx framework.TestContext) { destinationService := "productpage" bookInfoNameSpaceStr := bookinfoNs.Name() config := setupConfigOrFail(t, bookinfo.ProductPageRedisRateLimit, bookInfoNameSpaceStr, red, g, ctx) defer deleteConfigOrFail(t, config, g, ctx) util.AllowRuleSync(t) res := util.SendTraffic(ing, t, "Sending traffic...", "", "", 300) totalReqs := float64(res.DurationHistogram.Count) succReqs := float64(res.RetCodes[http.StatusOK]) got429s := float64(res.RetCodes[http.StatusTooManyRequests]) actualDuration := res.ActualDuration.Seconds() // can be a bit more than requested // Sending 600 requests at 10qps, and limit allowed is 50 for 30s, so we should see approx 100 requests go // through. want200s := 50.0 // everything in excess of 200s should be 429s (ideally) want429s := totalReqs - want200s t.Logf("Expected Totals: 200s: %f (%f rps), 429s: %f (%f rps)", want200s, want200s/actualDuration, want429s, want429s/actualDuration) // As rate limit is applied at ingressgateway itself, fortio should see the limits too. want := math.Floor(want200s * 0.90) if succReqs < want { attributes := []string{fmt.Sprintf("%s=\"%s\"", util.GetDestinationLabel(), util.Fqdn(destinationService, bookInfoNameSpaceStr)), fmt.Sprintf("%s=\"%d\"", util.GetResponseCodeLabel(), 200), fmt.Sprintf("%s=\"%s\"", util.GetReporterCodeLabel(), "destination")} t.Logf("prometheus values for istio_requests_total for 200's:\n%s", util.PromDumpWithAttributes(prom, "istio_requests_total", attributes)) t.Errorf("Bad metric value for successful requests (200s): got %f, want at least %f", succReqs, want) } // check resource exhausted // TODO: until https://github.com/istio/istio/issues/3028 is fixed, use 50% - should be only 5% or so want429s = math.Floor(want429s * 0.50) if got429s < want429s { attributes := []string{fmt.Sprintf("%s=\"%s\"", util.GetDestinationLabel(), util.Fqdn(destinationService, bookInfoNameSpaceStr)), fmt.Sprintf("%s=\"%d\"", util.GetResponseCodeLabel(), 429), fmt.Sprintf("%s=\"%s\"", util.GetReporterCodeLabel(), "destination")} t.Logf("prometheus values for istio_requests_total for 429's:\n%s", util.PromDumpWithAttributes(prom, "istio_requests_total", attributes)) t.Errorf("Bad metric value for rate-limited requests (429s): got %f, want at least %f", got429s, want429s) } }) }
explode_data.jsonl/37965
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1032 }
[ 2830, 3393, 11564, 16527, 287, 60336, 27451, 26067, 2177, 1155, 353, 8840, 836, 8, 341, 1166, 5794, 624, 197, 197, 3564, 2271, 1155, 4292, 197, 197, 322, 5343, 7, 2428, 1110, 5204, 905, 14, 380, 815, 14, 380, 815, 38745, 14, 16, 20,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestResolveTerraformModulesMultipleModulesWithNestedExternalDependencies(t *testing.T) { t.Parallel() moduleH := &TerraformModule{ Path: canonical(t, "../test/fixture-modules/module-h"), Dependencies: []*TerraformModule{}, Config: config.TerragruntConfig{IsPartial: true}, TerragruntOptions: mockOptions.Clone(canonical(t, "../test/fixture-modules/module-h/"+config.DefaultTerragruntConfigPath)), AssumeAlreadyApplied: true, } moduleI := &TerraformModule{ Path: canonical(t, "../test/fixture-modules/module-i"), Dependencies: []*TerraformModule{moduleH}, Config: config.TerragruntConfig{ Dependencies: &config.ModuleDependencies{Paths: []string{"../module-h"}}, IsPartial: true, }, TerragruntOptions: mockOptions.Clone(canonical(t, "../test/fixture-modules/module-i/"+config.DefaultTerragruntConfigPath)), AssumeAlreadyApplied: true, } moduleJ := &TerraformModule{ Path: canonical(t, "../test/fixture-modules/module-j"), Dependencies: []*TerraformModule{moduleI}, Config: config.TerragruntConfig{ Dependencies: &config.ModuleDependencies{Paths: []string{"../module-i"}}, Terraform: &config.TerraformConfig{Source: ptr("temp")}, IsPartial: true, }, TerragruntOptions: mockOptions.Clone(canonical(t, "../test/fixture-modules/module-j/"+config.DefaultTerragruntConfigPath)), } moduleK := &TerraformModule{ Path: canonical(t, "../test/fixture-modules/module-k"), Dependencies: []*TerraformModule{moduleH}, Config: config.TerragruntConfig{ Dependencies: &config.ModuleDependencies{Paths: []string{"../module-h"}}, Terraform: &config.TerraformConfig{Source: ptr("fire")}, IsPartial: true, }, TerragruntOptions: mockOptions.Clone(canonical(t, "../test/fixture-modules/module-k/"+config.DefaultTerragruntConfigPath)), } configPaths := []string{"../test/fixture-modules/module-j/" + config.DefaultTerragruntConfigPath, "../test/fixture-modules/module-k/" + config.DefaultTerragruntConfigPath} expected := []*TerraformModule{moduleH, moduleI, moduleJ, moduleK} actualModules, actualErr := ResolveTerraformModules(configPaths, mockOptions, mockHowThesePathsWereFound) require.NoError(t, actualErr) assertModuleListsEqual(t, expected, actualModules) }
explode_data.jsonl/26645
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 913 }
[ 2830, 3393, 56808, 51, 13886, 627, 28201, 32089, 28201, 2354, 71986, 25913, 48303, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 54020, 39, 1669, 609, 51, 13886, 627, 3332, 515, 197, 69640, 25, 338, 42453, 1155, 11, 7005, 1944,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLogPassesContext(t *testing.T) { t.Parallel() l1 := &testLogger{} config := mustParseConfig(t, os.Getenv("PGX_TEST_DATABASE")) config.Logger = l1 conn := mustConnect(t, config) defer closeConn(t, conn) l1.logs = l1.logs[0:0] // Clear logs written when establishing connection ctx := context.WithValue(context.Background(), "ctxdata", "foo") if _, err := conn.Exec(ctx, ";"); err != nil { t.Fatal(err) } if len(l1.logs) != 1 { t.Fatal("Expected logger to be called once, but it wasn't") } if l1.logs[0].data["ctxdata"] != "foo" { t.Fatal("Expected context data to be passed to logger, but it wasn't") } }
explode_data.jsonl/40023
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 255 }
[ 2830, 3393, 2201, 12187, 288, 1972, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 8810, 16, 1669, 609, 1944, 7395, 16094, 25873, 1669, 1969, 14463, 2648, 1155, 11, 2643, 64883, 445, 11383, 55, 11641, 45510, 5455, 25873, 12750, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_VerifyEmail(t *testing.T) { tests := []struct { name string prep func(*testSetup) jwt string userId string wantResCode int }{ { name: "should return 400 when user service returns ErrInvalidID", userId: testUserId.Hex(), prep: func(setup *testSetup) { setup.mockUService.EXPECT().GetUserWithID(setup.testCtx, testUserId.Hex()). Return(nil, services.ErrInvalidID).Times(1) }, wantResCode: http.StatusBadRequest, }, { name: "should return 404 when user service returns ErrNotFound", userId: testUserId.Hex(), prep: func(setup *testSetup) { setup.mockUService.EXPECT().GetUserWithID(setup.testCtx, testUserId.Hex()). Return(nil, services.ErrNotFound).Times(1) }, wantResCode: http.StatusNotFound, }, { name: "should return 500 when user service returns unknown error", userId: testUserId.Hex(), prep: func(setup *testSetup) { setup.mockUService.EXPECT().GetUserWithID(setup.testCtx, testUserId.Hex()). Return(nil, errors.New("service err")).Times(1) }, wantResCode: http.StatusInternalServerError, }, { name: "should return 400 when user's email is already verified", userId: testUserId.Hex(), prep: func(setup *testSetup) { setup.mockUService.EXPECT().GetUserWithID(setup.testCtx, testUserId.Hex()). Return(&entities.User{Role: role.Applicant}, nil).Times(1) }, wantResCode: http.StatusBadRequest, }, { name: "should return 500 when updating user fails", userId: testUserId.Hex(), prep: func(setup *testSetup) { setup.mockUService.EXPECT().GetUserWithID(setup.testCtx, testUserId.Hex()). Return(&entities.User{ID: testUserId, Role: role.Unverified}, nil).Times(1) setup.mockUService.EXPECT().UpdateUserWithID(setup.testCtx, testUserId.Hex(), gomock.Any()). Return(errors.New("service err")).Times(1) }, wantResCode: http.StatusInternalServerError, }, { name: "should return 200 when invalidating service token fails", userId: testUserId.Hex(), jwt: testAuthToken, prep: func(setup *testSetup) { setup.mockUService.EXPECT().GetUserWithID(setup.testCtx, testUserId.Hex()). Return(&entities.User{ID: testUserId, Role: role.Unverified}, nil).Times(1) setup.mockUService.EXPECT().UpdateUserWithID(setup.testCtx, testUserId.Hex(), gomock.Any()). Return(nil).Times(1) setup.mockAuthorizer.EXPECT().InvalidateServiceToken(setup.testCtx, testAuthToken). Return(errors.New("authorizer err")).Times(1) }, wantResCode: http.StatusOK, }, { name: "should return 200", userId: testUserId.Hex(), jwt: testAuthToken, prep: func(setup *testSetup) { setup.mockUService.EXPECT().GetUserWithID(setup.testCtx, testUserId.Hex()). Return(&entities.User{ID: testUserId, Role: role.Unverified}, nil).Times(1) setup.mockUService.EXPECT().UpdateUserWithID(setup.testCtx, testUserId.Hex(), gomock.Any()). Return(nil).Times(1) setup.mockAuthorizer.EXPECT().InvalidateServiceToken(setup.testCtx, testAuthToken). Return(nil).Times(1) }, wantResCode: http.StatusOK, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { setup := setupTest(t, map[string]string{ environment.JWTSecret: "test", }) defer setup.ctrl.Finish() mockRenderPageCall(setup) if tt.prep != nil { tt.prep(setup) } req := httptest.NewRequest(http.MethodGet, fmt.Sprintf("/test?token=%s&userId=%s", tt.jwt, tt.userId), nil) setup.testCtx.Request = req setup.router.VerifyEmail(setup.testCtx) assert.Equal(t, tt.wantResCode, setup.w.Code) }) } }
explode_data.jsonl/32966
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1562 }
[ 2830, 3393, 2334, 261, 1437, 4781, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 286, 914, 198, 197, 40346, 79, 286, 2915, 4071, 1944, 21821, 340, 197, 12428, 9306, 260, 914, 198, 197, 197, 13508, 414, 914, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_valuesToSubmissionRecords(t *testing.T) { got := valuesToSubmissionRecords([][]interface{}{ {"Credited Name", "Instrument", "Bottom Text"}, {"Calem Destiny", "Soprano", "1"}, {"Cheryl Carr", "Soprano", "1"}, {"Chris Erickson", "Soprano", "1"}, }) want := Submissions{ {CreditedName: "Calem Destiny", Instrument: "Soprano", BottomText: "1"}, {CreditedName: "Cheryl Carr", Instrument: "Soprano", BottomText: "1"}, {CreditedName: "Chris Erickson", Instrument: "Soprano", BottomText: "1"}, } assert.Equal(t, want, got) }
explode_data.jsonl/30733
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 205 }
[ 2830, 3393, 9146, 1249, 86621, 25876, 1155, 353, 8840, 836, 8, 341, 3174, 354, 1669, 2750, 1249, 86621, 25876, 10556, 1294, 4970, 67066, 197, 197, 4913, 34, 1151, 1608, 3988, 497, 330, 56324, 497, 330, 11279, 2918, 7115, 197, 197, 4913,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultiModuleV2(t *testing.T) { const multiModule = ` -- moda/a/go.mod -- module a.com require b.com/v2 v2.1.9 -- moda/a/a.go -- package a import ( "b.com/v2/b" ) func main() { var x int _ = b.Hi() } -- modb/go.mod -- module b.com -- modb/b/b.go -- package b func Hello() int { var x int } -- modb/v2/go.mod -- module b.com/v2 -- modb/v2/b/b.go -- package b func Hi() int { var x int } -- modc/go.mod -- module gopkg.in/yaml.v1 // test gopkg.in versions -- modc/main.go -- package main func main() { var x int } ` WithOptions( Modes(Experimental), ).Run(t, multiModule, func(t *testing.T, env *Env) { env.Await( env.DiagnosticAtRegexp("moda/a/a.go", "x"), env.DiagnosticAtRegexp("modb/b/b.go", "x"), env.DiagnosticAtRegexp("modb/v2/b/b.go", "x"), env.DiagnosticAtRegexp("modc/main.go", "x"), ) }) }
explode_data.jsonl/37370
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 417 }
[ 2830, 3393, 20358, 3332, 53, 17, 1155, 353, 8840, 836, 8, 341, 4777, 7299, 3332, 284, 22074, 313, 90668, 14186, 25525, 10929, 39514, 4352, 264, 905, 271, 4310, 293, 905, 5457, 17, 348, 17, 13, 16, 13, 24, 198, 313, 90668, 14186, 141...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUserHandler(t *testing.T) { db, _ := initTest() accountRepository := models.AccountRepository{DB: db, Debug: false} createdAccount, _ := accountRepository.Create(&models.Account{ Email: "test@test.test", Password: "secret", }) accountSerialized, err := requests.AccessTokenRequest(&models.AccountSerialized{ID: createdAccount.ID}) if err != nil { t.Fatalf("access token request error: %s", err) } requestUrl := fmt.Sprintf("/api/account/user/?userId=%d", accountSerialized.ID) request, _ := http.NewRequest("POST", requestUrl, nil) request.Header = http.Header{ "Content-Type": []string{"application/json"}, "Authorization": []string{accountSerialized.AccessToken}, "Expires": []string{accountSerialized.ExpirationTime}, } response := httptest.NewRecorder() userHandler := NewUserHandler(db) userHandler.ServeHTTP(response, request) if response.Code != 200 { t.Fatalf("non-expected status code %d", response.Code) } body, err := ioutil.ReadAll(response.Body) if err != nil { t.Fatalf("unable to read response body: %s", err.Error()) } account := &models.Account{} err = json.Unmarshal(body, &account) if err != nil { t.Fatalf("unable to unmarshal response body: %s", err.Error()) } if account.Email != account.Email { t.Fatalf("emails doesn's match") } verifiedPassword := account.VerifyPassword("secret") if verifiedPassword != nil { t.Errorf("[func (model *Account) VerifyPassword(password string) error] -> %s", verifiedPassword) } }
explode_data.jsonl/16538
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 517 }
[ 2830, 3393, 1474, 3050, 1155, 353, 8840, 836, 8, 341, 20939, 11, 716, 1669, 2930, 2271, 2822, 86866, 4624, 1669, 4119, 30877, 4624, 90, 3506, 25, 2927, 11, 11091, 25, 895, 532, 197, 7120, 7365, 11, 716, 1669, 2692, 4624, 7251, 2099, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestAuthzCheckAuthFailed(t *testing.T) { t.Parallel() checker := NewMiddleware() check := func(ctx context.Context, req interface{}) (interface{}, error) { return nil, fleet.NewAuthFailedError("failed") } check = checker.AuthzCheck()(check) _, err := check(context.Background(), struct{}{}) assert.Error(t, err) assert.Contains(t, err.Error(), "failed") }
explode_data.jsonl/24358
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 131 }
[ 2830, 3393, 5087, 89, 3973, 5087, 9408, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 25157, 261, 1669, 1532, 24684, 2822, 25157, 1669, 2915, 7502, 2266, 9328, 11, 4232, 3749, 28875, 320, 4970, 22655, 1465, 8, 341, 197, 853, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateTxWithStateBasedEndorsement(t *testing.T) { // SCENARIO: we validate a transaction that writes to key "key". This key // has a state-based endorsement policy that cannot be satisfied, while // the chaincode endorseemnt policy is satisfied by this transaction. // When we run with the 1.2 capability we expect the transaction to be // successfully validated, while when we run with the 1.3 capability, // validation is expected to fail owing to the unsatisfiable SBEP. // Notice that this corner case should never occur in practice, since // capabilities also determine whether honest peers will endorse // chaincodes that set state-based endorsement policies. Still, the test // is valuable as it shows how transactions that are affected by state-based // endorsement policies are handled by the 1.3 validation, and simply // ignored by 1.2. t.Run("1.2Capability", func(t *testing.T) { l, v, cleanup := setupLedgerAndValidatorWithV12Capabilities(t) defer cleanup() err, b := validateTxWithStateBasedEndorsement(t, l, v) assert.NoError(t, err) assertValid(b, t) }) t.Run("1.3Capability", func(t *testing.T) { l, v, cleanup := setupLedgerAndValidatorWithV13Capabilities(t) defer cleanup() err, b := validateTxWithStateBasedEndorsement(t, l, v) assert.NoError(t, err) assertInvalid(b, t, peer.TxValidationCode_ENDORSEMENT_POLICY_FAILURE) }) }
explode_data.jsonl/47816
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 425 }
[ 2830, 3393, 17926, 31584, 2354, 1397, 28715, 3727, 10836, 478, 1155, 353, 8840, 836, 8, 1476, 197, 322, 7531, 94274, 25, 582, 9593, 264, 7745, 429, 13914, 311, 1376, 330, 792, 3263, 1096, 1376, 198, 197, 322, 702, 264, 1584, 5980, 410...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMVCCComputeStatsError(t *testing.T) { defer leaktest.AfterTest(t)() defer log.Scope(t).Close(t) for _, engineImpl := range mvccEngineImpls { t.Run(engineImpl.name, func(t *testing.T) { engine := engineImpl.create() defer engine.Close() // Write a MVCC metadata key where the value is not an encoded MVCCMetadata // protobuf. if err := engine.Put(mvccKey(roachpb.Key("garbage")), []byte("garbage")); err != nil { t.Fatal(err) } iter := engine.NewMVCCIterator(MVCCKeyAndIntentsIterKind, IterOptions{UpperBound: roachpb.KeyMax}) defer iter.Close() for _, mvccStatsTest := range mvccStatsTests { t.Run(mvccStatsTest.name, func(t *testing.T) { _, err := mvccStatsTest.fn(iter, roachpb.KeyMin, roachpb.KeyMax, 100) if e := "unable to decode MVCCMetadata"; !testutils.IsError(err, e) { t.Fatalf("expected %s, got %v", e, err) } }) } }) } }
explode_data.jsonl/70088
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 399 }
[ 2830, 3393, 66626, 3706, 46254, 16635, 1454, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 16867, 1487, 77940, 1155, 568, 7925, 1155, 340, 2023, 8358, 4712, 9673, 1669, 2088, 23164, 638, 4571, 9673, 82, 34...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAuthMethodWrongPassword(t *testing.T) { config := &ClientConfig{ User: "testuser", Auth: []AuthMethod{ Password("wrong"), PublicKeys(testSigners["rsa"]), }, HostKeyCallback: InsecureIgnoreHostKey(), } if err := tryAuth(t, config); err != nil { t.Fatalf("unable to dial remote side: %s", err) } }
explode_data.jsonl/6934
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 126 }
[ 2830, 3393, 5087, 3523, 29185, 4876, 1155, 353, 8840, 836, 8, 341, 25873, 1669, 609, 2959, 2648, 515, 197, 31672, 25, 330, 1944, 872, 756, 197, 197, 5087, 25, 3056, 5087, 3523, 515, 298, 197, 4876, 445, 34870, 4461, 298, 73146, 8850, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_tokenLimiter_allow(t *testing.T) { t.Parallel() qps, oneTokenDuration := 10, time.Second/10 lim := newTokenLimiter(qps) lim.token = 0 assert.True(t, lim.allow()) // cover exceed revoke time.Sleep(oneTokenDuration * 2) var wg sync.WaitGroup wg.Add(qps) for i := 0; i < qps; i++ { go func() { defer wg.Done() assert.True(t, lim.allow()) }() } wg.Wait() assert.False(t, lim.allow()) time.Sleep(oneTokenDuration) assert.True(t, lim.allow()) }
explode_data.jsonl/12310
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 205 }
[ 2830, 3393, 6458, 43, 17700, 55731, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 18534, 1690, 11, 825, 3323, 12945, 1669, 220, 16, 15, 11, 882, 32435, 14, 16, 15, 198, 197, 4659, 1669, 501, 3323, 43, 17700, 10583, 1690, 34...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewOrQuery_Next_check(t *testing.T) { sl := datastruct.NewSkipList(datastruct.DefaultMaxLevel) sl.Add(document.DocId(1), 1) sl.Add(document.DocId(3), 1) sl.Add(document.DocId(6), 2) sl.Add(document.DocId(10), 2) sl1 := datastruct.NewSkipList(datastruct.DefaultMaxLevel) sl1.Add(document.DocId(1), 1) sl1.Add(document.DocId(4), 1) sl1.Add(document.DocId(6), 2) sl1.Add(document.DocId(9), 2) sl2 := datastruct.NewSkipList(datastruct.DefaultMaxLevel) sl2.Add(document.DocId(1), 1) sl2.Add(document.DocId(4), 1) sl2.Add(document.DocId(6), 2) sl2.Add(document.DocId(9), 2) Convey("or query with check", t, func() { a := NewOrQuery([]Query{ NewTermQuery(sl.Iterator()), NewTermQuery(sl1.Iterator()), }, []check.Checker{ check.NewChecker(sl2.Iterator(), 2, operation.EQ, nil, false), }) testCase := []document.DocId{6, 9} for _, expect := range testCase { v, e := a.Current() a.Next() So(v, ShouldEqual, expect) So(e, ShouldBeNil) } v, e := a.Current() a.Next() So(v, ShouldEqual, 0) So(e, ShouldNotBeNil) }) Convey("or query with In checke", t, func() { a := NewOrQuery([]Query{ NewTermQuery(sl.Iterator()), NewTermQuery(sl1.Iterator()), }, []check.Checker{ check.NewInChecker(sl2.Iterator(), []int{1, 2, 3}, nil, false), }) testCase := []document.DocId{1, 4, 6, 9} for _, expect := range testCase { v, e := a.Current() a.Next() So(v, ShouldEqual, expect) So(e, ShouldBeNil) } v, e := a.Current() a.Next() So(v, ShouldEqual, 0) So(e, ShouldNotBeNil) }) Convey("or query with or check", t, func() { a := NewOrQuery([]Query{ NewTermQuery(sl.Iterator()), NewTermQuery(sl1.Iterator()), }, []check.Checker{ check.NewOrChecker([]check.Checker{ check.NewInChecker(sl2.Iterator(), []int{1, 2, 3}, nil, false), check.NewChecker(sl2.Iterator(), 2, operation.EQ, nil, false), }), check.NewInChecker(sl2.Iterator(), []int{1, 2, 3}, nil, false), }) testCase := []document.DocId{1, 4, 6, 9} for _, expect := range testCase { v, e := a.Current() a.Next() So(v, ShouldEqual, expect) So(e, ShouldBeNil) } v, e := a.Current() a.Next() So(v, ShouldEqual, 0) So(e, ShouldNotBeNil) }) Convey("or query check4", t, func() { a := NewOrQuery([]Query{ NewTermQuery(sl.Iterator()), NewTermQuery(sl1.Iterator()), }, []check.Checker{ check.NewOrChecker([]check.Checker{ check.NewInChecker(sl2.Iterator(), []int{1, 2, 3}, nil, false), check.NewChecker(sl2.Iterator(), 2, operation.EQ, nil, false), }), check.NewInChecker(sl2.Iterator(), []int{1, 2, 3}, nil, false), }) testCase := []document.DocId{1, 4, 6, 9} a.SetDebug(1) for _, expect := range testCase { v, e := a.Current() a.Next() So(v, ShouldEqual, expect) So(e, ShouldBeNil) } v, e := a.Current() a.Next() So(v, ShouldEqual, 0) So(e, ShouldNotBeNil) So(a.DebugInfo().String(), ShouldNotBeNil) }) }
explode_data.jsonl/43262
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1362 }
[ 2830, 3393, 3564, 2195, 2859, 1604, 427, 7200, 1155, 353, 8840, 836, 8, 341, 78626, 1669, 821, 1235, 7121, 35134, 852, 2592, 1235, 13275, 5974, 4449, 692, 78626, 1904, 15290, 42452, 764, 7, 16, 701, 220, 16, 340, 78626, 1904, 15290, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFullClusterBackupDroppedTables(t *testing.T) { defer leaktest.AfterTest(t)() const numAccounts = 10 _, _, sqlDB, tempDir, cleanupFn := backupRestoreTestSetup(t, singleNode, numAccounts, initNone) _, _, sqlDBRestore, cleanupEmptyCluster := backupRestoreTestSetupEmpty(t, singleNode, tempDir, initNone) defer cleanupFn() defer cleanupEmptyCluster() _, tablesToCheck := generateInterleavedData(sqlDB, t, numAccounts) sqlDB.Exec(t, `BACKUP TO $1`, localFoo) sqlDBRestore.Exec(t, `RESTORE FROM $1`, localFoo) for _, table := range tablesToCheck { query := fmt.Sprintf("SELECT * FROM data.%s", table) sqlDBRestore.CheckQueryResults(t, query, sqlDB.QueryStr(t, query)) } }
explode_data.jsonl/48476
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 254 }
[ 2830, 3393, 9432, 28678, 56245, 35, 41716, 21670, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 2822, 4777, 1629, 41369, 284, 220, 16, 15, 198, 197, 6878, 8358, 5704, 3506, 11, 2730, 6184, 11, 21290, 24911, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestClient_CreateHeader_validation(t *testing.T) { var err error _, err = testClient.CreateHeader(&CreateHeaderInput{ ServiceID: "", }) if err != ErrMissingServiceID { t.Errorf("bad error: %s", err) } _, err = testClient.CreateHeader(&CreateHeaderInput{ ServiceID: "foo", ServiceVersion: 0, }) if err != ErrMissingServiceVersion { t.Errorf("bad error: %s", err) } }
explode_data.jsonl/3296
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 150 }
[ 2830, 3393, 2959, 34325, 4047, 19416, 1155, 353, 8840, 836, 8, 341, 2405, 1848, 1465, 198, 197, 6878, 1848, 284, 1273, 2959, 7251, 4047, 2099, 4021, 4047, 2505, 515, 197, 91619, 915, 25, 8324, 197, 3518, 743, 1848, 961, 15495, 25080, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetNodeName(t *testing.T) { tests := map[string]struct { expectedOutput string Volume VolumeInfo }{ "Fetching Node Name from openebs.io/node-names": { Volume: VolumeInfo{ Volume: v1alpha1.CASVolume{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{ "openebs.io/node-names": "gke-ashish-dev-default-pool-1fe155b7-rvqd,gke-ashish-dev-default-pool-1fe155b7-qv7v,gke-ashish-dev-default-pool-1fe155b7-w75t", }, }, }, }, expectedOutput: "gke-ashish-dev-default-pool-1fe155b7-rvqd,gke-ashish-dev-default-pool-1fe155b7-qv7v,gke-ashish-dev-default-pool-1fe155b7-w75t", }, "Fetching Node Name when no key is present": { Volume: VolumeInfo{ Volume: v1alpha1.CASVolume{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{}, }, }, }, expectedOutput: "", }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { got := tt.Volume.GetNodeName() if got != tt.expectedOutput { t.Fatalf("Test: %v Expected: %v but got: %v", name, tt.expectedOutput, got) } }) } }
explode_data.jsonl/78057
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 530 }
[ 2830, 3393, 1949, 1955, 675, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 2415, 14032, 60, 1235, 341, 197, 42400, 5097, 914, 198, 197, 17446, 4661, 260, 20265, 1731, 198, 197, 59403, 197, 197, 1, 52416, 6018, 3988, 504, 1787, 68, 1279, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestModeFromRSAMod(t *testing.T) { tests := []struct { rsaMod int want int wantErr bool }{ {rsaMod: 53, want: crypto.Rsa2048, wantErr: false}, {rsaMod: 1029, want: crypto.Rsa2048, wantErr: false}, {rsaMod: 3000, want: crypto.Rsa3072, wantErr: false}, {rsaMod: 4000, want: crypto.Rsa4096, wantErr: false}, {rsaMod: 5000, want: 0x1300, wantErr: false}, {rsaMod: 7000, want: 0x1500, wantErr: false}, {rsaMod: 15361, want: 0x1e00, wantErr: false}, {rsaMod: 160000, want: crypto.None, wantErr: true}, } for _, tt := range tests { t.Run(strconv.Itoa(tt.rsaMod), func(t *testing.T) { got, err := ModeFromRSAMod(tt.rsaMod) if (err != nil) != tt.wantErr { t.Errorf("ModeFromRSAMod() error = %v, wantErr %v", err, tt.wantErr) return } if got != tt.want { t.Errorf("ModeFromRSAMod() got = 0x%x, want 0x%x", got, tt.want) } }) } }
explode_data.jsonl/45155
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 431 }
[ 2830, 3393, 3636, 3830, 11451, 1402, 347, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 41231, 64, 4459, 220, 526, 198, 197, 50780, 262, 526, 198, 197, 50780, 7747, 1807, 198, 197, 59403, 197, 197, 90, 60869, 4459, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestHostNetworkPortIncrease(t *testing.T) { clientset := test.New(1) node, err := clientset.CoreV1().Nodes().Get("node0", metav1.GetOptions{}) assert.Nil(t, err) assert.NotNil(t, node) node.Status = v1.NodeStatus{} node.Status.Addresses = []v1.NodeAddress{ { Type: v1.NodeExternalIP, Address: "1.1.1.1", }, } configDir, _ := ioutil.TempDir("", "") defer os.RemoveAll(configDir) c := New(&clusterd.Context{Clientset: clientset, ConfigDir: configDir}, "ns", "", "myversion", cephv1beta1.CephVersionSpec{}, cephv1beta1.MonSpec{Count: 3, AllowMultiplePerNode: true}, rookalpha.Placement{}, true, v1.ResourceRequirements{}, metav1.OwnerReference{}) c.clusterInfo = test.CreateConfigDir(0) mons := []*monConfig{ { ResourceName: "rook-ceph-mon-a", DaemonName: "a", Port: mondaemon.DefaultPort, }, { ResourceName: "rook-ceph-mon-b", DaemonName: "b", Port: mondaemon.DefaultPort, }, } c.maxMonID = 1 err = c.assignMons(mons) assert.Nil(t, err) err = c.initMonIPs(mons) assert.Nil(t, err) assert.Equal(t, node.Name, c.mapping.Node["a"].Name) assert.Equal(t, node.Name, c.mapping.Node["b"].Name) sEndpoint := strings.Split(c.clusterInfo.Monitors["a"].Endpoint, ":") assert.Equal(t, strconv.Itoa(mondaemon.DefaultPort), sEndpoint[1]) sEndpoint = strings.Split(c.clusterInfo.Monitors["b"].Endpoint, ":") assert.Equal(t, strconv.Itoa(mondaemon.DefaultPort+1), sEndpoint[1]) }
explode_data.jsonl/39537
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 648 }
[ 2830, 3393, 9296, 12320, 7084, 69556, 1155, 353, 8840, 836, 8, 341, 25291, 746, 1669, 1273, 7121, 7, 16, 340, 20831, 11, 1848, 1669, 2943, 746, 12777, 53, 16, 1005, 12288, 1005, 1949, 445, 3509, 15, 497, 77520, 16, 2234, 3798, 37790, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetAppContainers(t *testing.T) { assert := asrt.New(t) containers, err := GetAppContainers(testContainerName) assert.NoError(err) assert.Contains(containers[0].Image, version.WebImg) }
explode_data.jsonl/41377
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 72 }
[ 2830, 3393, 1949, 2164, 74632, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 438, 3342, 7121, 1155, 340, 197, 39399, 11, 1848, 1669, 2126, 2164, 74632, 8623, 4502, 675, 340, 6948, 35699, 3964, 340, 6948, 11545, 37846, 20568, 58, 15, 936, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestGetExchangeHighestPriceByCurrencyPair(t *testing.T) { CreateTestBot(t) p, err := currency.NewPairFromStrings("BTC", "USD") if err != nil { t.Fatal(err) } err = stats.Add("Bitfinex", p, asset.Spot, 1000, 10000) if err != nil { t.Error(err) } err = stats.Add(testExchange, p, asset.Spot, 1337, 10000) if err != nil { t.Error(err) } exchangeName, err := GetExchangeHighestPriceByCurrencyPair(p, asset.Spot) if err != nil { t.Error(err) } if exchangeName != testExchange { t.Error("Unexpected result") } btcaud, err := currency.NewPairFromStrings("BTC", "AUD") if err != nil { t.Fatal(err) } _, err = GetExchangeHighestPriceByCurrencyPair(btcaud, asset.Spot) if err == nil { t.Error("Unexpected result") } }
explode_data.jsonl/59238
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 312 }
[ 2830, 3393, 1949, 31564, 96329, 6972, 1359, 26321, 12443, 1155, 353, 8840, 836, 8, 341, 75569, 2271, 23502, 1155, 692, 3223, 11, 1848, 1669, 11413, 7121, 12443, 3830, 20859, 445, 59118, 497, 330, 26749, 1138, 743, 1848, 961, 2092, 341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func Test_MessageSerDeser(t *testing.T) { req := require.New(t) data := make([]byte, 4192) rand.Read(data) hash := sha512.Sum512(data) block := &RandHashedBlock{ Type: BlockTypePlain, Sequence: 10, Hash: hash[:], Data: data, } testBuf := &testPeer{} p := &protocol{ peer: testBuf, test: &loop3_pb.Test{ Name: "test", }, } req.NoError(block.Tx(p)) readBlock := &RandHashedBlock{} req.NoError(readBlock.Rx(p)) req.True(reflect.DeepEqual(block, readBlock), cmp.Diff(block, readBlock)) data = make([]byte, 4192) rand.Read(data) hash = sha512.Sum512(data) block = &RandHashedBlock{ Type: BlockTypeLatencyRequest, Sequence: 10, Hash: hash[:], Data: data, } req.NoError(block.Tx(p)) readBlock = &RandHashedBlock{} req.NoError(readBlock.Rx(p)) req.Equal("", cmp.Diff(block, readBlock)) }
explode_data.jsonl/13431
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 395 }
[ 2830, 3393, 50974, 31745, 4896, 261, 1155, 353, 8840, 836, 8, 341, 24395, 1669, 1373, 7121, 1155, 340, 8924, 1669, 1281, 10556, 3782, 11, 220, 19, 16, 24, 17, 340, 7000, 437, 6503, 2592, 692, 50333, 1669, 15870, 20, 16, 17, 41676, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCallAfterLoopPanic(t *testing.T) { _, ctrl := createFixtures(t) subject := new(Subject) firstCall := ctrl.RecordCall(subject, "FooMethod", "1") secondCall := ctrl.RecordCall(subject, "FooMethod", "2") thirdCall := ctrl.RecordCall(subject, "FooMethod", "3") gomock.InOrder(firstCall, secondCall, thirdCall) defer func() { err := recover() if err == nil { t.Error("Call.After creation of dependency loop did not panic.") } }() // This should panic due to dependency loop. firstCall.After(thirdCall) }
explode_data.jsonl/17291
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 188 }
[ 2830, 3393, 7220, 6025, 14620, 47, 31270, 1155, 353, 8840, 836, 8, 341, 197, 6878, 23743, 1669, 1855, 25958, 18513, 1155, 692, 28624, 583, 1669, 501, 7, 13019, 692, 42190, 7220, 1669, 23743, 49959, 7220, 29128, 11, 330, 40923, 3523, 497...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestWriterRoundTrip(t *testing.T) { blockSizes := []int{100, 1000, 2048, 4096, math.MaxInt32} for _, blockSize := range blockSizes { for _, indexBlockSize := range blockSizes { for name, fp := range map[string]FilterPolicy{ "none": nil, "bloom10bit": bloom.FilterPolicy(10), } { t.Run(fmt.Sprintf("bloom=%s", name), func(t *testing.T) { f, err := build(DefaultCompression, fp, TableFilter, nil, nil, blockSize, indexBlockSize) require.NoError(t, err) // Check that we can read a freshly made table. require.NoError(t, check(f, nil, nil)) }) } } } }
explode_data.jsonl/40340
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 268 }
[ 2830, 3393, 6492, 27497, 56352, 1155, 353, 8840, 836, 8, 341, 47996, 34930, 1669, 3056, 396, 90, 16, 15, 15, 11, 220, 16, 15, 15, 15, 11, 220, 17, 15, 19, 23, 11, 220, 19, 15, 24, 21, 11, 6888, 14535, 1072, 18, 17, 532, 2023, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAssetServiceGetAssetByID(t *testing.T) { // case 1 AssetRepo.On("GetAssetByID", int64(1)).Once().Return(&model.Asset{Code: "123"}, nil) testutil.Play(t, AssetSrv, "GetAssetByID", int64(1)).Match(&model.Asset{Code: "123"}, nil) // case 2 AssetRepo.On("GetAssetByID", int64(2)).Once().Return(nil, gorm.ErrRecordNotFound) testutil.Play(t, AssetSrv, "GetAssetByID", int64(2)).Match(nil, gorm.ErrRecordNotFound) }
explode_data.jsonl/24293
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 170 }
[ 2830, 3393, 16604, 1860, 1949, 16604, 60572, 1155, 353, 8840, 836, 8, 341, 197, 322, 1142, 220, 16, 198, 197, 16604, 25243, 8071, 445, 1949, 16604, 60572, 497, 526, 21, 19, 7, 16, 4579, 12522, 1005, 5598, 2099, 2528, 88161, 90, 2078, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSSGen(t *testing.T) { var ssgen = dcrutil.NewTx(ssgenMsgTx) ssgen.SetTree(wire.TxTreeStake) ssgen.SetIndex(0) err := stake.CheckSSGen(ssgen.MsgTx()) if err != nil { t.Errorf("IsSSGen: unexpected err: %v", err) } if !stake.IsSSGen(ssgen.MsgTx()) { t.Errorf("IsSSGen claimed a valid ssgen is invalid") } // Test for an OP_RETURN VoteBits push of the maximum size biggestPush := []byte{ 0x6a, 0x4b, // OP_RETURN Push 75-bytes 0x14, 0x94, 0x8c, 0x76, 0x5a, 0x69, 0x14, 0xd4, // 75 bytes 0x3f, 0x2a, 0x7a, 0xc1, 0x77, 0xda, 0x2c, 0x2f, 0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c, 0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c, 0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c, 0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c, 0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c, 0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c, 0x6b, 0x52, 0xde, 0x3d, 0x7c, 0x7c, 0x7c, 0x7c, 0x6b, 0x52, 0xde, } ssgen = dcrutil.NewTxDeep(ssgenMsgTx) ssgen.SetTree(wire.TxTreeStake) ssgen.SetIndex(0) ssgen.MsgTx().TxOut[1].PkScript = biggestPush err = stake.CheckSSGen(ssgen.MsgTx()) if err != nil { t.Errorf("IsSSGen: unexpected err: %v", err) } if !stake.IsSSGen(ssgen.MsgTx()) { t.Errorf("IsSSGen claimed a valid ssgen is invalid") } }
explode_data.jsonl/70508
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 726 }
[ 2830, 3393, 1220, 9967, 1155, 353, 8840, 836, 8, 341, 2405, 274, 1991, 268, 284, 294, 5082, 1314, 7121, 31584, 30678, 4370, 6611, 31584, 340, 34472, 4370, 4202, 6533, 3622, 554, 81362, 6533, 623, 726, 340, 34472, 4370, 4202, 1552, 7, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestTSVReadAndValidateHeader(t *testing.T) { testtype.SkipUnlessTestType(t, testtype.UnitTestType) Convey("With a TSV input reader", t, func() { Convey("setting the header should read the first line of the TSV", func() { contents := "extraHeader1\textraHeader2\textraHeader3\n" colSpecs := []ColumnSpec{} r := NewTSVInputReader(colSpecs, bytes.NewReader([]byte(contents)), os.Stdout, 1, false) So(r.ReadAndValidateHeader(), ShouldBeNil) So(len(r.colSpecs), ShouldEqual, 3) }) }) }
explode_data.jsonl/69022
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 197 }
[ 2830, 3393, 9951, 53, 4418, 3036, 17926, 4047, 1155, 353, 8840, 836, 8, 341, 18185, 1313, 57776, 35587, 2271, 929, 1155, 11, 1273, 1313, 25159, 2271, 929, 340, 93070, 5617, 445, 2354, 264, 350, 17803, 1946, 6604, 497, 259, 11, 2915, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRecorderWithDanglingNotString(t *testing.T) { r := &Recorder{} fields, index := r.With(1, 2, 3).Fields() if got, want := len(fields), 3; got != want { t.Errorf(" got %d", got) t.Errorf(" want %d", want) } else if got = len(index); got != want { t.Errorf(" got index %d", got) t.Errorf(" want index %d", want) } k, v := "key0", 1 if got, ok := fields[k]; !ok { t.Errorf("Missing key %q", k) } else if v != got { t.Errorf(" got %q", got) t.Errorf(" want %q", v) } k, v = "key2", 3 if got, ok := fields[k]; !ok { t.Errorf("Missing key %q", k) } else if v != got { t.Errorf(" got %q", got) t.Errorf(" want %q", v) } }
explode_data.jsonl/71114
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 312 }
[ 2830, 3393, 47023, 2354, 35, 90104, 2623, 703, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 609, 47023, 31483, 55276, 11, 1922, 1669, 435, 26124, 7, 16, 11, 220, 17, 11, 220, 18, 568, 8941, 741, 743, 2684, 11, 1366, 1669, 2422, 37701, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestJSONLogger(t *testing.T) { iters := 500 out := new(bytes.Buffer) app := iris.New() app.Logger().SetTimeFormat("") // disable timestamps. app.Logger().SetStacktraceLimit(1) // limit debug stacktrace to 1, show only the first caller. app.Logger().SetOutput(out) app.Logger().Handle(func(l *golog.Log) bool { enc := json.NewEncoder(l.Logger.Printer) // you can change the output to a file as well. err := enc.Encode(l) return err == nil }) app.Get("/ping", ping) const expectedLogStr = `{"level":"debug","message":"Request path: /ping","fields":{"request_id":null},"stacktrace":[{"function":"json-logger/ping","source":"/home/runner/work/iris/iris/_examples/logging/json-logger/main.go:78"}]}` // gh actions-specific. e := httptest.New(t, app, httptest.LogLevel("debug")) wg := new(sync.WaitGroup) wg.Add(iters) for i := 0; i < iters; i++ { go func() { e.GET("/ping").Expect().Status(httptest.StatusOK).Body().Equal("pong") wg.Done() }() } wg.Wait() expected := "" for i := 0; i < iters; i++ { expected += expectedLogStr + "\n" } got := out.String() got = got[strings.Index(got, "{"):] // take only the json we care and after. if expected != got { if !strings.HasSuffix(got, expected) { // C:/mygopath vs /home/travis vs any file system, // pure check but it does the job. t.Fatalf("expected:\n%s\nbut got:\n%s", expected, got) } } }
explode_data.jsonl/58509
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 547 }
[ 2830, 3393, 5370, 7395, 1155, 353, 8840, 836, 8, 341, 23374, 388, 1669, 220, 20, 15, 15, 271, 13967, 1669, 501, 23158, 22622, 692, 28236, 1669, 63942, 7121, 741, 28236, 12750, 1005, 1649, 1462, 4061, 39047, 257, 442, 11156, 48781, 624, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStopAgentDatabaseFailure(t *testing.T) { logger := zap.NewNop().Sugar() pipeline := &testutil.Pipeline{} pipeline.On("Stop").Return(nil) database := &testutil.Database{} failure := fmt.Errorf("failed to close database") database.On("Close").Return(failure) agent := LogAgent{ SugaredLogger: logger, pipeline: pipeline, database: database, } err := agent.Stop() require.Error(t, err, failure) pipeline.AssertCalled(t, "Stop") database.AssertCalled(t, "Close") }
explode_data.jsonl/19973
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 196 }
[ 2830, 3393, 10674, 16810, 5988, 17507, 1155, 353, 8840, 836, 8, 341, 17060, 1669, 32978, 7121, 45, 453, 1005, 83414, 741, 3223, 8790, 1669, 609, 1944, 1314, 1069, 8790, 16094, 3223, 8790, 8071, 445, 10674, 1827, 5598, 27907, 340, 2698, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFirestoreListDocuments(t *testing.T) { var nextPageToken string = "" var documentsElement *firestorepb.Document = &firestorepb.Document{} var documents = []*firestorepb.Document{documentsElement} var expectedResponse = &firestorepb.ListDocumentsResponse{ NextPageToken: nextPageToken, Documents: documents, } mockFirestore.err = nil mockFirestore.reqs = nil mockFirestore.resps = append(mockFirestore.resps[:0], expectedResponse) var formattedParent string = fmt.Sprintf("projects/%s/databases/%s/documents/%s/%s", "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]") var collectionId string = "collectionId-821242276" var request = &firestorepb.ListDocumentsRequest{ Parent: formattedParent, CollectionId: collectionId, } c, err := NewClient(context.Background(), clientOpt) if err != nil { t.Fatal(err) } resp, err := c.ListDocuments(context.Background(), request).Next() if err != nil { t.Fatal(err) } if want, got := request, mockFirestore.reqs[0]; !proto.Equal(want, got) { t.Errorf("wrong request %q, want %q", got, want) } want := (interface{})(expectedResponse.Documents[0]) got := (interface{})(resp) var ok bool switch want := (want).(type) { case proto.Message: ok = proto.Equal(want, got.(proto.Message)) default: ok = want == got } if !ok { t.Errorf("wrong response %q, want %q)", got, want) } }
explode_data.jsonl/27372
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 505 }
[ 2830, 3393, 48513, 852, 27143, 1155, 353, 8840, 836, 8, 341, 2405, 83595, 3323, 914, 284, 8389, 2405, 9293, 1691, 353, 10796, 4314, 16650, 26256, 284, 609, 10796, 4314, 16650, 26256, 16094, 2405, 9293, 284, 29838, 10796, 4314, 16650, 2625...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestNodeMetadata(t *testing.T) { labels := map[string]string{ "l1": "v1", "l2": "v2", "istio": "sidecar", } anno := map[string]string{ "istio.io/enable": "{\"abc\": 20}", } _, envs := createEnv(t, labels, nil) nm := getNodeMetaData(envs) if !reflect.DeepEqual(nm, labels) { t.Fatalf("Maps are not equal.\ngot: %v\nwant: %v", nm, labels) } merged, envs := createEnv(t, labels, anno) nm = getNodeMetaData(envs) if !reflect.DeepEqual(nm, merged) { t.Fatalf("Maps are not equal.\ngot: %v\nwant: %v", nm, merged) } t.Logf("envs => %v\nnm=> %v", envs, nm) // encode string incorrectly, // a warning is logged, but everything else works. envs = envEncode(anno, IstioMetaJSONPrefix, func(s string) string { return s }, envs) nm = getNodeMetaData(envs) if !reflect.DeepEqual(nm, merged) { t.Fatalf("Maps are not equal.\ngot: %v\nwant: %v", nm, merged) } }
explode_data.jsonl/35255
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 399 }
[ 2830, 3393, 1955, 14610, 1155, 353, 8840, 836, 8, 341, 95143, 1669, 2415, 14032, 30953, 515, 197, 197, 1, 75, 16, 788, 262, 330, 85, 16, 756, 197, 197, 1, 75, 17, 788, 262, 330, 85, 17, 756, 197, 197, 1, 380, 815, 788, 330, 29...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsWindowsService(t *testing.T) { isSvc, err := svc.IsWindowsService() if err != nil { t.Fatal(err) } if isSvc { t.Error("IsWindowsService retuns true when not running in a service.") } }
explode_data.jsonl/18603
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 80 }
[ 2830, 3393, 3872, 13164, 1860, 1155, 353, 8840, 836, 8, 341, 19907, 92766, 11, 1848, 1669, 46154, 4506, 13164, 1860, 741, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 743, 374, 92766, 341, 197, 3244, 6141, 445, 38...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
3
func TestRangeCacheUseIntents(t *testing.T) { defer leaktest.AfterTest(t)() defer log.Scope(t).Close(t) db := initTestDescriptorDB(t) ctx := context.Background() // A request initially looks up the range descriptor ["a"-"b"). abDesc, evictToken := doLookup(ctx, db.cache, "aa") db.assertLookupCountEq(t, 2, "aa") // Perform a lookup now that the cache is populated. abDescLookup, _ := doLookup(ctx, db.cache, "aa") db.assertLookupCountEq(t, 0, "aa") // The descriptors should be the same. if !reflect.DeepEqual(abDesc, abDescLookup) { t.Errorf("expected initial range descriptor to be returned from lookup, found %v", abDescLookup) } // The current descriptor is found to be stale, so it is evicted. The next cache // lookup should return the descriptor from the intents, without performing another // db lookup. evictToken.Evict(ctx) abDescIntent, _ := doLookup(ctx, db.cache, "aa") db.assertLookupCountEq(t, 0, "aa") // The descriptors should be different. if reflect.DeepEqual(abDesc, abDescIntent) { t.Errorf("expected initial range descriptor to be different from the one from intents, found %v", abDesc) } // Check that the intent had been inserted into the cache with Generation=0, // signifying a speculative descriptor. require.Equal(t, roachpb.RangeGeneration(0), abDescIntent.Generation) }
explode_data.jsonl/28190
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 442 }
[ 2830, 3393, 6046, 8233, 10253, 1072, 805, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 16867, 1487, 77940, 1155, 568, 7925, 1155, 340, 20939, 1669, 2930, 2271, 11709, 3506, 1155, 340, 20985, 1669, 2266, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestLevel_CtxPrintf(t *testing.T) { OnlyConsole() ctx := context.Background() ctx = NewContext(ctx, []zapcore.Field{ {Key: "test1", Type: zapcore.StringType, String: "test123456"}, }...) ctx = NewContext(ctx, []zapcore.Field{ {Key: "test2", Type: zapcore.StringType, String: "test1234567"}, }...) DebugLevel.CtxPrintf(ctx, "a is %s", errors.New("zui le")) InfoLevel.CtxPrintf(ctx, "a is %s", errors.New("zui le")) WarnLevel.CtxPrintf(ctx, "a is %s", errors.New("zui le")) ErrorLevel.CtxPrintf(ctx, "a is %s", errors.New("zui le")) FatalLevel.CtxPrintf(ctx, "a is %s", errors.New("zui le")) }
explode_data.jsonl/7430
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 261 }
[ 2830, 3393, 4449, 920, 3998, 42736, 1155, 353, 8840, 836, 8, 341, 197, 7308, 12372, 741, 20985, 1669, 2266, 19047, 741, 20985, 284, 1532, 1972, 7502, 11, 3056, 92371, 2153, 17087, 515, 197, 197, 90, 1592, 25, 330, 1944, 16, 497, 3990,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCrConflictMoveAndSetexWrittenFile(t *testing.T) { test(t, users("alice", "bob"), as(alice, mkdir("a"), write("a/b", "hello"), ), as(bob, disableUpdates(), ), as(alice, write("a/b", "world"), ), as(bob, noSync(), rename("a/b", "a/c"), setex("a/c", true), reenableUpdates(), lsdir("a/", m{"c$": "EXEC"}), read("a/c", "world"), ), as(alice, lsdir("a/", m{"c$": "EXEC"}), read("a/c", "world"), ), ) }
explode_data.jsonl/31373
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 252 }
[ 2830, 3393, 16001, 57974, 9860, 3036, 1649, 327, 35624, 1703, 1155, 353, 8840, 836, 8, 341, 18185, 1155, 345, 197, 90896, 445, 63195, 497, 330, 47086, 4461, 197, 60451, 17643, 558, 345, 298, 88650, 445, 64, 4461, 298, 24945, 445, 64, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInodeIdsReuseCheck(t *testing.T) { runTestCustomConfig(t, dirtyDelay100Ms, func(test *testHelper) { workspace := test.NewWorkspace() func() { defer test.qfs.inodeIds.lock.Lock().Unlock() test.qfs.inodeIds.reusableDelay = time.Millisecond * 2 test.qfs.inodeIds.gcPeriod = time.Millisecond * 20 }() test.AssertNoErr(os.MkdirAll(workspace+"/dirA/dirB", 0777)) test.MakeFile(workspace + "/dirA/dirB/fileA") test.AssertNoErr(os.MkdirAll(workspace+"/dirA/dirC", 0777)) test.MakeFile(workspace + "/dirA/dirC/fileB") fileA := test.getInodeNum(workspace + "/dirA/dirB/fileA") dirC := test.getInodeNum(workspace + "/dirA/dirC") fileB := test.getInodeNum(workspace + "/dirA/dirC/fileB") test.Assert(dirC == fileA+1, "inode id not simply incremented") test.Assert(fileB == dirC+1, "inode id not simply incremented") c := test.newCtx() // wait for garbage collection to happen at least once test.WaitFor("inode ids to be garbage collected", func() bool { defer test.qfs.inodeIds.lock.Lock().Unlock() test.qfs.inodeIds.testHighmark_(c) return test.qfs.inodeIds.highMark < uint64(fileB) }) test.MakeFile(workspace + "/dirA/fileC") test.MakeFile(workspace + "/dirA/fileD") fileC := test.getInodeNum(workspace + "/dirA/fileC") fileD := test.getInodeNum(workspace + "/dirA/fileD") test.Assert(fileC == fileB+1, "inode id not incremented after GC") test.Assert(fileD == fileC+1, "inode id not incremented after GC") }) }
explode_data.jsonl/1828
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 623 }
[ 2830, 3393, 641, 534, 12701, 38081, 3973, 1155, 353, 8840, 836, 8, 341, 56742, 2271, 10268, 2648, 1155, 11, 18595, 20039, 16, 15, 15, 21634, 11, 2915, 8623, 353, 1944, 5511, 8, 341, 197, 197, 42909, 1669, 1273, 7121, 45981, 2822, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFirestoreGetDocumentError(t *testing.T) { errCode := codes.PermissionDenied mockFirestore.err = gstatus.Error(errCode, "test error") var formattedName string = fmt.Sprintf("projects/%s/databases/%s/documents/%s/%s", "[PROJECT]", "[DATABASE]", "[DOCUMENT]", "[ANY_PATH]") var request = &firestorepb.GetDocumentRequest{ Name: formattedName, } c, err := NewClient(context.Background(), clientOpt) if err != nil { t.Fatal(err) } resp, err := c.GetDocument(context.Background(), request) if st, ok := gstatus.FromError(err); !ok { t.Errorf("got error %v, expected grpc error", err) } else if c := st.Code(); c != errCode { t.Errorf("got error code %q, want %q", c, errCode) } _ = resp }
explode_data.jsonl/27371
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 268 }
[ 2830, 3393, 48513, 1949, 7524, 1454, 1155, 353, 8840, 836, 8, 341, 9859, 2078, 1669, 13912, 73409, 54481, 198, 77333, 48513, 18441, 284, 342, 2829, 6141, 3964, 2078, 11, 330, 1944, 1465, 5130, 2405, 23126, 675, 914, 284, 8879, 17305, 44...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestPrintingDoesNotLeakKey(t *testing.T) { v := fmt.Sprintf("%v", k) assert.Equal(t, v+"\n", fmt.Sprintln(k)) assert.Regexp(t, pkr, v) assert.NotContains(t, v, fmt.Sprintf("%x", sk)) // Other verbs just give the corresponding encoding of .String() assert.Equal(t, fmt.Sprintf("%x", k), hex.EncodeToString([]byte(v))) }
explode_data.jsonl/70242
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 137 }
[ 2830, 3393, 38528, 21468, 2623, 2304, 585, 1592, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 8879, 17305, 4430, 85, 497, 595, 340, 6948, 12808, 1155, 11, 348, 36269, 77, 497, 8879, 808, 33655, 5969, 1171, 6948, 8989, 4580, 1155, 11, 281...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServerDiscover(t *testing.T) { ifs, err := net.Interfaces() require.NoError(t, err) var iface net.Interface for _, i := range ifs { if i.Flags&net.FlagMulticast == net.FlagMulticast && i.Flags&net.FlagUp == net.FlagUp { iface = i break } } require.NotEmpty(t, iface) type args struct { opts []coapNet.MulticastOption } tests := []struct { name string args args wantErr bool }{ { name: "valid any interface", args: args{ opts: []coapNet.MulticastOption{coapNet.WithAnyMulticastInterface()}, }, }, { name: "valid first interface", args: args{ opts: []coapNet.MulticastOption{coapNet.WithMulticastInterface(iface)}, }, }, { name: "valid all interfaces", args: args{ opts: []coapNet.MulticastOption{coapNet.WithAllMulticastInterface()}, }, }, } timeout := time.Millisecond * 500 multicastAddr := "224.0.1.187:9999" path := "/oic/res" l, err := coapNet.NewListenUDP("udp4", multicastAddr) require.NoError(t, err) defer func() { err := l.Close() require.NoError(t, err) }() ifaces, err := net.Interfaces() require.NoError(t, err) a, err := net.ResolveUDPAddr("udp4", multicastAddr) require.NoError(t, err) for _, iface := range ifaces { err := l.JoinGroup(&iface, a) if err != nil { t.Logf("cannot JoinGroup(%v, %v): %v", iface, a, err) } } err = l.SetMulticastLoopback(true) require.NoError(t, err) var wg sync.WaitGroup defer wg.Wait() s := udp.NewServer(udp.WithHandlerFunc(func(w *client.ResponseWriter, r *pool.Message) { err := w.SetResponse(codes.BadRequest, message.TextPlain, bytes.NewReader(make([]byte, 5330))) require.NoError(t, err) require.NotNil(t, w.ClientConn()) })) defer s.Stop() wg.Add(1) go func() { defer wg.Done() err := s.Serve(l) require.NoError(t, err) }() ld, err := coapNet.NewListenUDP("udp4", "") require.NoError(t, err) defer func() { err := ld.Close() require.NoError(t, err) }() sd := udp.NewServer() defer sd.Stop() wg.Add(1) go func() { defer wg.Done() err := sd.Serve(ld) require.NoError(t, err) }() for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), timeout) defer cancel() recv := &mcastreceiver{} err = sd.Discover(ctx, multicastAddr, path, recv.process, tt.args.opts...) require.NoError(t, err) got := recv.pop() require.Greater(t, len(got), 0) require.Equal(t, codes.BadRequest, got[0].Code()) }) } }
explode_data.jsonl/34011
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1123 }
[ 2830, 3393, 5475, 50002, 1155, 353, 8840, 836, 8, 341, 743, 82, 11, 1848, 1669, 4179, 28159, 741, 17957, 35699, 1155, 11, 1848, 340, 2405, 49313, 4179, 41065, 198, 2023, 8358, 600, 1669, 2088, 84073, 341, 197, 743, 600, 51887, 5, 4711...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Int64(t *testing.T) { a := assert.New(t) a.Equal(Int64(0, 1), int64(1)) a.Equal(Int64(2, 1), int64(2)) var s int64 a.Equal(Int64(s, 1), int64(1)) }
explode_data.jsonl/12072
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 32054, 21, 19, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 2060, 7121, 1155, 692, 11323, 12808, 24123, 21, 19, 7, 15, 11, 220, 16, 701, 526, 21, 19, 7, 16, 1171, 11323, 12808, 24123, 21, 19, 7, 17, 11, 220, 16, 701, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestU8_Eq(t *testing.T) { assertEq(t, []eqAssert{ {NewU8(23), NewU8(23), true}, {NewU8(23), NewBool(false), false}, }) }
explode_data.jsonl/18390
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 69 }
[ 2830, 3393, 52, 23, 2089, 80, 1155, 353, 8840, 836, 8, 341, 6948, 27312, 1155, 11, 3056, 11006, 8534, 515, 197, 197, 90, 3564, 52, 23, 7, 17, 18, 701, 1532, 52, 23, 7, 17, 18, 701, 830, 1583, 197, 197, 90, 3564, 52, 23, 7, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestFloatListDecoderReuseRecords(t *testing.T) { b1 := NewFloatListEncoder().Encode([]float64{1.2, 2.3}) b2 := NewFloatListEncoder().Encode([]float64{3, -4, 5}) d := NewFloatListDecoder(false) sl1 := d.Decode(b1) sl2 := d.Decode(b2) assert.Equal(t, []float64{1.2, 2.3}, sl1) assert.Equal(t, []float64{3, -4, 5}, sl2) d = NewFloatListDecoder(true) sl1 = d.Decode(b1) sl2 = d.Decode(b2) assert.Equal(t, []float64{3, -4}, sl1) assert.Equal(t, []float64{3, -4, 5}, sl2) }
explode_data.jsonl/60191
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 238 }
[ 2830, 3393, 5442, 852, 20732, 38081, 25876, 1155, 353, 8840, 836, 8, 341, 2233, 16, 1669, 1532, 5442, 852, 19921, 1005, 32535, 10556, 3649, 21, 19, 90, 16, 13, 17, 11, 220, 17, 13, 18, 3518, 2233, 17, 1669, 1532, 5442, 852, 19921, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRecordIterator_Next(t *testing.T) { ctx := inslogger.TestContext(t) t.Run("returns err, if AtPosition returns err", func(t *testing.T) { pn := gen.PulseNumber() positionAccessor := object.NewRecordPositionAccessorMock(t) positionAccessor.LastKnownPositionMock.Expect(pn).Return(10, nil) positionAccessor.AtPositionMock.Expect(pn, uint32(2)).Return(insolar.ID{}, store.ErrNotFound) iter := newRecordIterator(pn, 1, 0, positionAccessor, nil, nil, nil) _, err := iter.Next(ctx) require.Error(t, err) require.Contains(t, err.Error(), store.ErrNotFound.Error()) }) t.Run("returns err, if ForID returns err", func(t *testing.T) { pn := gen.PulseNumber() id := gen.ID() positionAccessor := object.NewRecordPositionAccessorMock(t) positionAccessor.LastKnownPositionMock.Expect(pn).Return(10, nil) positionAccessor.AtPositionMock.Expect(pn, uint32(2)).Return(id, nil) recordsAccessor := object.NewRecordAccessorMock(t) recordsAccessor.ForIDMock.Expect(ctx, id).Return(record.Material{}, store.ErrNotFound) iter := newRecordIterator(pn, 1, 0, positionAccessor, recordsAccessor, nil, nil) _, err := iter.Next(ctx) require.Error(t, err) require.Contains(t, err.Error(), store.ErrNotFound.Error()) }) t.Run("reading data works", func(t *testing.T) { pn := gen.PulseNumber() id := gen.IDWithPulse(pn) positionAccessor := object.NewRecordPositionAccessorMock(t) positionAccessor.LastKnownPositionMock.Expect(pn).Return(10, nil) positionAccessor.AtPositionMock.Expect(pn, uint32(2)).Return(id, nil) record := record.Material{ JetID: gen.JetID(), ID: id, } recordsAccessor := object.NewRecordAccessorMock(t) recordsAccessor.ForIDMock.Expect(ctx, id).Return(record, nil) iter := newRecordIterator(pn, 1, 0, positionAccessor, recordsAccessor, nil, nil) next, err := iter.Next(ctx) require.NoError(t, err) require.Equal(t, uint32(1), iter.read) require.Equal(t, pn, next.Record.ID.Pulse()) require.Equal(t, uint32(2), next.RecordNumber) require.Equal(t, id, next.Record.ID) require.Equal(t, record, next.Record) }) t.Run("cross-pulse edges", func(t *testing.T) { t.Run("Forwards returns error", func(t *testing.T) { pn := gen.PulseNumber() positionAccessor := object.NewRecordPositionAccessorMock(t) positionAccessor.LastKnownPositionMock.Expect(pn).Return(1, nil) pulseCalculator := network.NewPulseCalculatorMock(t) pulseCalculator.ForwardsMock.Expect(ctx, pn, 1).Return(insolar.Pulse{}, store.ErrNotFound) iter := newRecordIterator(pn, 1, 0, positionAccessor, nil, nil, pulseCalculator) _, err := iter.Next(ctx) require.Error(t, err) require.Contains(t, err.Error(), store.ErrNotFound.Error()) }) t.Run("Changing pulse works successfully", func(t *testing.T) { firstPN := gen.PulseNumber() nextPN := firstPN + 10 id := gen.IDWithPulse(nextPN) jetKeeper := executor.NewJetKeeperMock(t) jetKeeper.TopSyncPulseMock.Return(nextPN) positionAccessor := object.NewRecordPositionAccessorMock(t) positionAccessor.LastKnownPositionMock.When(firstPN).Then(5, nil) positionAccessor.LastKnownPositionMock.When(nextPN).Then(1, nil) positionAccessor.AtPositionMock.Expect(nextPN, uint32(1)).Return(id, nil) rec := record.Material{ JetID: gen.JetID(), ID: id, } recordsAccessor := object.NewRecordAccessorMock(t) recordsAccessor.ForIDMock.Expect(ctx, id).Return(rec, nil) pulseCalculator := network.NewPulseCalculatorMock(t) pulseCalculator.ForwardsMock.Expect(ctx, firstPN, 1).Return(insolar.Pulse{PulseNumber: nextPN}, nil) iter := newRecordIterator(firstPN, 10, 0, positionAccessor, recordsAccessor, jetKeeper, pulseCalculator) next, err := iter.Next(ctx) require.NoError(t, err) require.Equal(t, nextPN, iter.currentPulse) require.Equal(t, uint32(1), iter.read) require.Equal(t, nextPN, next.Record.ID.Pulse()) require.Equal(t, uint32(1), next.RecordNumber) require.Equal(t, id, next.Record.ID) require.Equal(t, rec, next.Record) }) }) }
explode_data.jsonl/17009
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1599 }
[ 2830, 3393, 6471, 11951, 1604, 427, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 1640, 9786, 8787, 1972, 1155, 692, 3244, 16708, 445, 4216, 1848, 11, 421, 2411, 3812, 4675, 1848, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 3223, 77, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMatchScheme(t *testing.T) { cases := []struct { value string scheme string matches bool }{ {"http", "http", true}, {"https", "https", true}, {"http", "https", false}, {"", "https", true}, {"https", "", true}, } for _, test := range cases { req := &http.Request{URL: &url.URL{Scheme: test.scheme}} ereq := &Request{URLStruct: &url.URL{Scheme: test.value}} matches, err := MatchScheme(req, ereq) st.Expect(t, err, nil) st.Expect(t, matches, test.matches) } }
explode_data.jsonl/9198
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 211 }
[ 2830, 3393, 8331, 28906, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 16309, 256, 914, 198, 197, 1903, 8058, 220, 914, 198, 197, 2109, 9118, 1807, 198, 197, 59403, 197, 197, 4913, 1254, 497, 330, 1254, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestValidateStringWithVariablesFail(t *testing.T) { testStrings := []string{ "$scheme}", "${sch${eme}${host}", "host$", "${host", "${invalid}", } validVars := map[string]bool{"scheme": true, "host": true} for _, test := range testStrings { allErrs := validateStringWithVariables(test, field.NewPath("string"), nil, validVars) if len(allErrs) == 0 { t.Errorf("validateStringWithVariables(%v) returned no errors for invalid input", test) } } specialVars := []string{"arg", "http", "cookie"} testStringsSpecial := []string{ "${arg_username%}", "${http_header-name}", "${cookie_cookie?name}", } for _, test := range testStringsSpecial { allErrs := validateStringWithVariables(test, field.NewPath("string"), specialVars, validVars) if len(allErrs) == 0 { t.Errorf("validateStringWithVariables(%v) returned no errors for invalid input", test) } } }
explode_data.jsonl/65909
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 342 }
[ 2830, 3393, 17926, 703, 2354, 22678, 19524, 1155, 353, 8840, 836, 8, 341, 18185, 20859, 1669, 3056, 917, 515, 197, 197, 33732, 46141, 24375, 197, 197, 74013, 21062, 2365, 3894, 31192, 3790, 24375, 197, 197, 1, 3790, 3, 756, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestDoltTransactionCommitLateFkResolution(t *testing.T) { if types.IsFormat_DOLT_1(types.Format_Default) { t.Skip() } harness := newDoltHarness(t) enginetest.TestTransactionScript(t, harness, enginetest.TransactionTest{ Name: "delayed foreign key resolution with transaction commits", SetUpScript: []string{ "SET foreign_key_checks=0;", "CREATE TABLE child (pk BIGINT PRIMARY KEY, v1 BIGINT, CONSTRAINT fk_late FOREIGN KEY (v1) REFERENCES parent (pk));", "SET foreign_key_checks=1;", "CREATE TABLE parent (pk BIGINT PRIMARY KEY);", "INSERT INTO parent VALUES (1), (2);", }, Assertions: []enginetest.ScriptTestAssertion{ { Query: "/* client a */ SET @@autocommit=0;", Expected: []sql.Row{{}}, }, { Query: "/* client b */ SET @@autocommit=0;", Expected: []sql.Row{{}}, }, { Query: "/* client a */ START TRANSACTION;", Expected: []sql.Row{}, }, { Query: "/* client b */ START TRANSACTION;", Expected: []sql.Row{}, }, { Query: "/* client a */ INSERT INTO child VALUES (1, 1);", Expected: []sql.Row{{sql.NewOkResult(1)}}, }, { Query: "/* client b */ INSERT INTO child VALUES (2, 2);", Expected: []sql.Row{{sql.NewOkResult(1)}}, }, { Query: "/* client a */ COMMIT;", Expected: []sql.Row{}, }, { Query: "/* client b */ COMMIT;", Expected: []sql.Row{}, }, { Query: "/* client a */ SELECT * FROM child ORDER BY pk;", Expected: []sql.Row{{1, 1}, {2, 2}}, }, { Query: "/* client b */ SELECT * FROM child ORDER BY pk;", Expected: []sql.Row{{1, 1}, {2, 2}}, }, { // This uses the index, which is automatically created by the late fk resolution, so it's also tested here Query: "/* client a */ SELECT * FROM child WHERE v1 > 0 ORDER BY pk;", Expected: []sql.Row{{1, 1}, {2, 2}}, }, { // This uses the index, which is automatically created by the late fk resolution, so it's also tested here Query: "/* client b */ SELECT * FROM child WHERE v1 > 0 ORDER BY pk;", Expected: []sql.Row{{1, 1}, {2, 2}}, }, { Query: "/* client a */ INSERT INTO child VALUES (3, 3);", ExpectedErr: sql.ErrForeignKeyChildViolation, }, { Query: "/* client b */ INSERT INTO child VALUES (3, 3);", ExpectedErr: sql.ErrForeignKeyChildViolation, }, }, }) }
explode_data.jsonl/5290
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1060 }
[ 2830, 3393, 35, 6181, 8070, 33441, 61457, 37, 74, 38106, 1155, 353, 8840, 836, 8, 341, 743, 4494, 4506, 4061, 1557, 35320, 62, 16, 52613, 9978, 60336, 8, 341, 197, 3244, 57776, 741, 197, 630, 9598, 23518, 1669, 501, 35, 6181, 74248, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2