text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestPBStandardDeviationOverflow(t *testing.T) { valAddr := sdk.ValAddress(secp256k1.GenPrivKey().PubKey().Address()) exchangeRate, err := sdk.NewDecFromStr("100000000000000000000000000000000000000000000000000000000.0") require.NoError(t, err) pb := ExchangeRateBallot{NewVoteForTally( sdk.ZeroDec(), core.MicroSDRDenom, valAddr, 2, ), NewVoteForTally( exchangeRate, core.MicroSDRDenom, valAddr, 1, )} require.Equal(t, sdk.ZeroDec(), pb.StandardDeviation(pb.WeightedMedianWithAssertion())) }
explode_data.jsonl/38396
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 203 }
[ 2830, 3393, 40637, 19781, 14592, 7101, 42124, 1155, 353, 8840, 836, 8, 341, 19302, 13986, 1669, 45402, 77819, 4286, 10478, 4672, 17, 20, 21, 74, 16, 65384, 32124, 1592, 1005, 29162, 1592, 1005, 4286, 2398, 8122, 3373, 11564, 11, 1848, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_DeployHandler_Execution_NoUUIDorSHA_Override(t *testing.T) { release := MockRelease() release.UUID = to.Strp("badString") release.ReleaseSHA256 = "badString" awsc := MockAwsClients(release) state_machine := createTestStateMachine(t, awsc) exec, err := state_machine.Execute(release) output := exec.Output assert.NoError(t, err) assert.Equal(t, output["success"], true) assert.NotEqual(t, output["uuid"], "badString") assert.NotEqual(t, output["release_sha256"], "badString") }
explode_data.jsonl/62289
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 177 }
[ 2830, 3393, 90680, 1989, 3050, 62, 20294, 36989, 24754, 269, 33145, 62, 2177, 1155, 353, 8840, 836, 8, 341, 17200, 1623, 1669, 14563, 16077, 741, 17200, 1623, 39636, 284, 311, 27318, 79, 445, 13855, 703, 1138, 17200, 1623, 58693, 33145, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDelNick(t *testing.T) { ch := NewChannel("#test1") nk := NewNick("test1") cp := new(ChanPrivs) ch.addNick(nk, cp) ch.delNick(nk) if len(ch.nicks) != 0 || len(ch.lookup) != 0 { t.Errorf("Nick lists not updated correctly for del.") } if c, ok := ch.nicks[nk]; ok || c != nil { t.Errorf("Nick test1 not properly removed from nicks map.") } if n, ok := ch.lookup["#test1"]; ok || n != nil { t.Errorf("Nick test1 not properly removed from lookup map.") } }
explode_data.jsonl/2722
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 197 }
[ 2830, 3393, 16532, 30356, 1155, 353, 8840, 836, 8, 341, 23049, 1669, 1532, 9629, 3584, 1944, 16, 1138, 9038, 74, 1669, 1532, 30356, 445, 1944, 16, 1138, 52018, 1669, 501, 7, 46019, 32124, 82, 692, 23049, 1364, 30356, 1445, 74, 11, 124...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestBasic(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) defer cancel() ds, rt := setupRoot(ctx, t) rootdir := rt.GetValue().(*Directory) // test making a basic dir _, err := rootdir.Mkdir("a") if err != nil { t.Fatal(err) } path := "a/b/c/d/e/f/g" d := mkdirP(t, rootdir, path) fi := getRandFile(t, ds, 1000) // test inserting that file err = d.AddChild("afile", fi) if err != nil { t.Fatal(err) } err = assertFileAtPath(ds, rootdir, fi, "a/b/c/d/e/f/g/afile") if err != nil { t.Fatal(err) } }
explode_data.jsonl/35816
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 250 }
[ 2830, 3393, 15944, 1155, 353, 8840, 836, 8, 341, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 16867, 9121, 741, 83336, 11, 16677, 1669, 6505, 8439, 7502, 11, 259, 692, 33698, 3741, 1669, 16677, 25816, 1005, 4071, 9310, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestUDP(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) wg := &sync.WaitGroup{} wg.Add(1) go func(ctx context.Context) { l := alistener.NewRealListener(server.NewBaseTcp("127.0.0.1", 4888).Listen()) l.RegisterAcceptor(alistener.NewSocksAcceptMid(ctx, "tag", map[string]interface{}{"host": "127.0.0.1", "port": 4888.0})) go func() { <-ctx.Done() l.Close() }() main: for { select { case <-ctx.Done(): break main default: } _, _, err := l.Accept(ctx) if err != nil && err != alistener.ErrUDP { t.Log(err) continue } t.Log(1) } wg.Done() }(ctx) time.Sleep(time.Second) c, err := socks5.NewClient("127.0.0.1:4888", "", "", 0, 60) if err != nil { panic(err) } conn, err := c.Dial("udp", "8.8.8.8:53") if err != nil { panic(err) } b, err := hex.DecodeString("0001010000010000000000000a74787468696e6b696e6703636f6d0000010001") if err != nil { panic(err) } t.Log(b) if _, err := conn.Write(b); err != nil { panic(err) } b2 := make([]byte, 2048) n, err := conn.Read(b2) if err != nil { panic(err) } b2 = b2[:n] b2 = b2[len(b2)-4:] log.Println("result", net.IPv4(b2[0], b2[1], b2[2], b2[3])) if _, err := conn.Write(b); err != nil { panic(err) } b2 = make([]byte, 2048) n, err = conn.Read(b2) if err != nil { panic(err) } b2 = b2[:n] b2 = b2[len(b2)-4:] log.Println("result", net.IPv4(b2[0], b2[1], b2[2], b2[3])) cancel() wg.Wait() }
explode_data.jsonl/57459
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 742 }
[ 2830, 3393, 41648, 1155, 353, 8840, 836, 8, 341, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 12367, 72079, 1669, 609, 12996, 28384, 2808, 16094, 72079, 1904, 7, 16, 340, 30680, 2915, 7502, 2266, 9328, 8, 341, 197, 8810, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSearchTokens(t *testing.T) { node1 := resource.TrieNode{ ChildrenNodes: nil, SvgIds: map[string]struct{}{"group_1": {}, "group_31": {}}, SvIds: map[string]struct{}{"sv_1_2": {}}, } node3 := resource.TrieNode{ ChildrenNodes: nil, SvgIds: nil, SvIds: map[string]struct{}{"sv_1_1": {}, "sv_1_2": {}}, } nodeX := resource.TrieNode{ ChildrenNodes: nil, SvgIds: map[string]struct{}{"group_3": {}}, SvIds: map[string]struct{}{"sv_1_2": {}, "sv_3": {}}, } nodeDX := resource.TrieNode{ ChildrenNodes: map[rune]*resource.TrieNode{ 'x': &nodeX, }, SvgIds: nil, SvIds: nil, } nodeC := resource.TrieNode{ ChildrenNodes: map[rune]*resource.TrieNode{ '3': &node3, }, SvgIds: nil, SvIds: nil, } nodeZ := resource.TrieNode{ ChildrenNodes: map[rune]*resource.TrieNode{ 'd': &nodeDX, }, SvgIds: nil, SvIds: nil, } nodeB := resource.TrieNode{ ChildrenNodes: map[rune]*resource.TrieNode{ '1': &node1, }, SvgIds: nil, SvIds: nil, } nodeA := resource.TrieNode{ ChildrenNodes: map[rune]*resource.TrieNode{ 'b': &nodeB, 'c': &nodeC, }, SvgIds: nil, SvIds: nil, } for _, c := range []struct { tokens []string index *resource.SearchIndex wantSv []*pb.EntityInfo wantSvg []*pb.EntityInfo }{ { tokens: []string{"ab1"}, index: &resource.SearchIndex{ RootTrieNode: &resource.TrieNode{ ChildrenNodes: map[rune]*resource.TrieNode{ 'a': &nodeA, 'z': &nodeZ, }, SvgIds: nil, SvIds: nil, }, Ranking: map[string]*resource.RankingInfo{ "group_1": { ApproxNumPv: 2, RankingName: "token1 token2", }, "sv_1_2": { ApproxNumPv: 3, RankingName: "token1 token3 token4", }, "group_31": { ApproxNumPv: 2, RankingName: "token1 token5 token6", }, }, }, wantSv: []*pb.EntityInfo{ { Dcid: "sv_1_2", Name: "token1 token3 token4", }, }, wantSvg: []*pb.EntityInfo{ { Dcid: "group_1", Name: "token1 token2", }, { Dcid: "group_31", Name: "token1 token5 token6", }, }, }, { tokens: []string{"ab", "zd", "ac3"}, index: &resource.SearchIndex{ RootTrieNode: &resource.TrieNode{ ChildrenNodes: map[rune]*resource.TrieNode{ 'a': &nodeA, 'z': &nodeZ, }, SvgIds: nil, SvIds: nil, }, Ranking: map[string]*resource.RankingInfo{ "sv_1_1": { ApproxNumPv: 3, RankingName: "token2 token3", }, "sv_1_2": { ApproxNumPv: 3, RankingName: "token2 token3 token4", }, "sv_3": { ApproxNumPv: 20, RankingName: "token4", }, "group_3": { ApproxNumPv: 2, RankingName: "token2 token4 token6", }, }, }, wantSv: []*pb.EntityInfo{ { Dcid: "sv_1_2", Name: "token2 token3 token4", }, }, wantSvg: []*pb.EntityInfo{}, }, } { sv, svg := searchTokens(c.tokens, c.index) if diff := cmp.Diff(sv, c.wantSv, protocmp.Transform()); diff != "" { t.Errorf("Stat var list got diff %v", diff) } if diff := cmp.Diff(svg, c.wantSvg, protocmp.Transform()); diff != "" { t.Errorf("Stat var group list got diff %v", diff) } } }
explode_data.jsonl/68104
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1793 }
[ 2830, 3393, 5890, 29300, 1155, 353, 8840, 836, 8, 341, 20831, 16, 1669, 5101, 836, 7231, 1955, 515, 197, 197, 11539, 12288, 25, 2092, 345, 197, 7568, 7239, 12701, 25, 286, 2415, 14032, 60, 1235, 6257, 4913, 4074, 62, 16, 788, 16452, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGitCommandUnstageFile(t *testing.T) { type scenario struct { testName string command func(string, ...string) *exec.Cmd test func(error) reset bool } scenarios := []scenario{ { "Remove an untracked file from staging", func(cmd string, args ...string) *exec.Cmd { assert.EqualValues(t, "git", cmd) assert.EqualValues(t, []string{"rm", "--cached", "--force", "--", "test.txt"}, args) return secureexec.Command("echo") }, func(err error) { assert.NoError(t, err) }, false, }, { "Remove a tracked file from staging", func(cmd string, args ...string) *exec.Cmd { assert.EqualValues(t, "git", cmd) assert.EqualValues(t, []string{"reset", "HEAD", "--", "test.txt"}, args) return secureexec.Command("echo") }, func(err error) { assert.NoError(t, err) }, true, }, } for _, s := range scenarios { t.Run(s.testName, func(t *testing.T) { gitCmd := NewDummyGitCommand() gitCmd.OSCommand.Command = s.command s.test(gitCmd.UnStageFile([]string{"test.txt"}, s.reset)) }) } }
explode_data.jsonl/6317
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 463 }
[ 2830, 3393, 46562, 4062, 1806, 20743, 1703, 1155, 353, 8840, 836, 8, 341, 13158, 15048, 2036, 341, 197, 18185, 675, 914, 198, 197, 45566, 220, 2915, 3609, 11, 2503, 917, 8, 353, 11748, 64512, 198, 197, 18185, 257, 2915, 6390, 340, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRollupMemoryConstraint(t *testing.T) { defer leaktest.AfterTest(t)() tm := newTestModelRunner(t) tm.Start() defer tm.Stop() series1 := tsd("test.metric", "a") series2 := tsd("test.othermetric", "a") for i := 0; i < 500; i++ { series1.Datapoints = append(series1.Datapoints, tsdp(time.Duration(i), float64(i))) series2.Datapoints = append(series2.Datapoints, tsdp(time.Duration(i), float64(i))) } tm.storeTimeSeriesData(resolution1ns, []tspb.TimeSeriesData{series1, series2}) tm.assertKeyCount(100) tm.assertModelCorrect() // Construct a memory monitor that will be used to measure the high-water // mark of memory usage for the rollup process. adjustedMon := mon.NewMonitor( "timeseries-test-worker-adjusted", mon.MemoryResource, nil, nil, 1, math.MaxInt64, cluster.MakeTestingClusterSettings(), ) adjustedMon.Start(context.Background(), tm.workerMemMonitor, mon.BoundAccount{}) defer adjustedMon.Stop(context.Background()) // Roll up time series with the new monitor to measure high-water mark // of qmc := MakeQueryMemoryContext(adjustedMon, adjustedMon, QueryMemoryOptions{ // Large budget, but not maximum to avoid overflows. BudgetBytes: math.MaxInt64, EstimatedSources: 1, // Not needed for rollups Columnar: tm.DB.WriteColumnar(), }) tm.rollupWithMemoryContext(qmc, 500+resolution1nsDefaultRollupThreshold.Nanoseconds(), timeSeriesResolutionInfo{ Name: "test.othermetric", Resolution: resolution1ns, }) tm.prune(500+resolution1nsDefaultRollupThreshold.Nanoseconds(), timeSeriesResolutionInfo{ Name: "test.othermetric", Resolution: resolution1ns, }) tm.assertKeyCount(51) tm.assertModelCorrect() // Ensure that we used at least 50 slabs worth of memory at one time. if a, e := adjustedMon.MaximumBytes(), 50*qmc.computeSizeOfSlab(resolution1ns); a < e { t.Fatalf("memory usage for query was %d, wanted at least %d", a, e) } // Limit testing: set multiple constraints on memory and ensure that they // are being respected through chunking. for i, limit := range []int64{ 25 * qmc.computeSizeOfSlab(resolution1ns), 10 * qmc.computeSizeOfSlab(resolution1ns), } { // Generate a new series. seriesName := fmt.Sprintf("metric.series%d", i) seriesData := tsd(seriesName, "a") for j := 0; j < 500; j++ { seriesData.Datapoints = append(seriesData.Datapoints, tsdp(time.Duration(j), float64(j))) } tm.storeTimeSeriesData(resolution1ns, []tspb.TimeSeriesData{seriesData}) tm.assertModelCorrect() tm.assertKeyCount(51 + i /* rollups from previous iterations */ + 50) // Restart monitor to clear query memory options. adjustedMon.Stop(context.Background()) adjustedMon.Start(context.Background(), tm.workerMemMonitor, mon.BoundAccount{}) qmc := MakeQueryMemoryContext(adjustedMon, adjustedMon, QueryMemoryOptions{ // Large budget, but not maximum to avoid overflows. BudgetBytes: limit, EstimatedSources: 1, // Not needed for rollups Columnar: tm.DB.WriteColumnar(), }) tm.rollupWithMemoryContext(qmc, 500+resolution1nsDefaultRollupThreshold.Nanoseconds(), timeSeriesResolutionInfo{ Name: seriesName, Resolution: resolution1ns, }) tm.prune(500+resolution1nsDefaultRollupThreshold.Nanoseconds(), timeSeriesResolutionInfo{ Name: seriesName, Resolution: resolution1ns, }) tm.assertKeyCount(51 + i + 1) tm.assertModelCorrect() // Check budget was not exceeded. Computation of budget usage is not exact // in the case of rollups, due to the fact that results are tracked with // the same monitor but may vary in size based on the specific input // rows. Because of this, allow up to 20% over limit. if a, e := float64(adjustedMon.MaximumBytes()), float64(limit)*1.2; a > e { t.Fatalf("memory usage for query was %f, wanted a limit of %f", a, e) } // Check that budget was used. if a, e := float64(adjustedMon.MaximumBytes()), float64(limit)*0.95; a < e { t.Fatalf("memory usage for query was %f, wanted at least %f", a, e) } } }
explode_data.jsonl/71877
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1449 }
[ 2830, 3393, 32355, 454, 10642, 17890, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 3244, 76, 1669, 501, 2271, 1712, 19486, 1155, 340, 3244, 76, 12101, 741, 16867, 17333, 30213, 2822, 197, 19880, 16, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestMetricRules(t *testing.T) { var tcs []CrossAgentRulesTestcase err := crossagent.ReadJSON("rules.json", &tcs) if err != nil { t.Fatal(err) } for _, tc := range tcs { tc.run(t) } }
explode_data.jsonl/63828
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 87 }
[ 2830, 3393, 54310, 26008, 1155, 353, 8840, 836, 8, 341, 2405, 259, 4837, 3056, 28501, 16810, 26008, 2271, 5638, 271, 9859, 1669, 5312, 8092, 6503, 5370, 445, 21977, 4323, 497, 609, 83, 4837, 340, 743, 1848, 961, 2092, 341, 197, 3244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
3
func TestDecodedLen(t *testing.T) { for _, tt := range []struct { enc *Encoding n int want int }{ {RawStdEncoding, 0, 0}, {RawStdEncoding, 2, 1}, {RawStdEncoding, 3, 2}, {RawStdEncoding, 4, 3}, {RawStdEncoding, 10, 7}, {StdEncoding, 0, 0}, {StdEncoding, 4, 3}, {StdEncoding, 8, 6}, } { if got := tt.enc.DecodedLen(tt.n); got != tt.want { t.Errorf("DecodedLen(%d): got %d, want %d", tt.n, got, tt.want) } } }
explode_data.jsonl/35057
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 233 }
[ 2830, 3393, 4900, 6737, 11271, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 3056, 1235, 341, 197, 197, 954, 220, 353, 14690, 198, 197, 9038, 262, 526, 198, 197, 50780, 526, 198, 197, 59403, 197, 197, 90, 20015, 22748, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestClusterPoolFromClaimWithLabels(t *testing.T) { testCases := []struct { description string pools []ctrlruntimeclient.Object labels map[string]string expected *hivev1.ClusterPool expectErr error }{ { description: "select the clusters to satisfy labels", labels: map[string]string{"a": "b"}, pools: []ctrlruntimeclient.Object{ &hivev1.ClusterPool{ObjectMeta: v1.ObjectMeta{Name: "pool", Labels: map[string]string{ "architecture": "amd64", "cloud": "aws", "owner": "o", "product": "ocp", "version": "v", }, }, Spec: hivev1.ClusterPoolSpec{Size: 3, MaxSize: pointer.Int32(4)}, Status: hivev1.ClusterPoolStatus{Ready: 0}}, &hivev1.ClusterPool{ObjectMeta: v1.ObjectMeta{Name: "pool with label", Labels: map[string]string{"a": "b", "architecture": "amd64", "cloud": "aws", "owner": "o", "product": "ocp", "version": "v", }}, Spec: hivev1.ClusterPoolSpec{Size: 3, MaxSize: pointer.Int32(3)}, Status: hivev1.ClusterPoolStatus{Ready: 0}}, }, expected: &hivev1.ClusterPool{ObjectMeta: v1.ObjectMeta{Name: "pool with label", Labels: map[string]string{"a": "b", "architecture": "amd64", "cloud": "aws", "owner": "o", "product": "ocp", "version": "v", }}, Spec: hivev1.ClusterPoolSpec{Size: 3, MaxSize: pointer.Int32(3)}, Status: hivev1.ClusterPoolStatus{Ready: 0}}, }, { description: "select the clusters without labels", pools: []ctrlruntimeclient.Object{ &hivev1.ClusterPool{ObjectMeta: v1.ObjectMeta{Name: "pool", Labels: map[string]string{ "architecture": "amd64", "cloud": "aws", "owner": "o", "product": "ocp", "version": "v", }, }, Spec: hivev1.ClusterPoolSpec{Size: 3, MaxSize: pointer.Int32(4)}, Status: hivev1.ClusterPoolStatus{Ready: 0}}, &hivev1.ClusterPool{ObjectMeta: v1.ObjectMeta{Name: "pool with label", Labels: map[string]string{"a": "b", "architecture": "amd64", "cloud": "aws", "owner": "o", "product": "ocp", "version": "v", }}, Spec: hivev1.ClusterPoolSpec{Size: 3, MaxSize: pointer.Int32(3)}, Status: hivev1.ClusterPoolStatus{Ready: 0}}, }, expected: &hivev1.ClusterPool{ObjectMeta: v1.ObjectMeta{Name: "pool", Labels: map[string]string{ "architecture": "amd64", "cloud": "aws", "owner": "o", "product": "ocp", "version": "v", }}, Spec: hivev1.ClusterPoolSpec{Size: 3, MaxSize: pointer.Int32(4)}, Status: hivev1.ClusterPoolStatus{Ready: 0}}, }, } for _, tc := range testCases { t.Run(tc.description, func(t *testing.T) { got, err := ClusterPoolFromClaim(context.TODO(), &api.ClusterClaim{Labels: tc.labels, Architecture: api.ReleaseArchitectureAMD64, Cloud: api.CloudAWS, Owner: "o", Product: api.ReleaseProductOCP, Version: "v", }, fakectrlruntimeclient.NewClientBuilder().WithObjects(tc.pools...).Build()) if diff := cmp.Diff(tc.expectErr, err, testhelper.EquateErrorMessage); diff != "" { t.Errorf("error differs from expected:\n%s", diff) return } if diff := cmp.Diff(tc.expected, got, testhelper.RuntimeObjectIgnoreRvTypeMeta); err == nil && diff != "" { t.Errorf("Selected pool differs from expected:\n%s", diff) } }) } }
explode_data.jsonl/29486
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1623 }
[ 2830, 3393, 28678, 10551, 3830, 45544, 2354, 23674, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 42407, 914, 198, 197, 3223, 6178, 981, 3056, 11933, 22255, 2972, 8348, 198, 197, 95143, 414, 2415, 14032, 30953, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRedis_Hkeys(t *testing.T) { runOnRedis(t, func(client *Redis) { assert.Nil(t, client.Hset("a", "aa", "aaa")) assert.Nil(t, client.Hset("a", "bb", "bbb")) _, err := NewRedis(client.Addr, "").Hkeys("a") assert.NotNil(t, err) vals, err := client.Hkeys("a") assert.Nil(t, err) assert.ElementsMatch(t, []string{"aa", "bb"}, vals) }) }
explode_data.jsonl/39161
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 168 }
[ 2830, 3393, 48137, 2039, 10563, 1155, 353, 8840, 836, 8, 341, 56742, 1925, 48137, 1155, 11, 2915, 12805, 353, 48137, 8, 341, 197, 6948, 59678, 1155, 11, 2943, 3839, 746, 445, 64, 497, 330, 5305, 497, 330, 32646, 5455, 197, 6948, 59678...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestJoinerWithReference(t *testing.T) { store := mock.NewStorer() ctx, cancel := context.WithCancel(context.Background()) defer cancel() // create root chunk and two data chunks referenced in the root chunk rootChunk := filetest.GenerateTestRandomFileChunk(swarm.ZeroAddress, swarm.ChunkSize*2, swarm.SectionSize*2) _, err := store.Put(ctx, storage.ModePutUpload, rootChunk) if err != nil { t.Fatal(err) } firstAddress := swarm.NewAddress(rootChunk.Data()[8 : swarm.SectionSize+8]) firstChunk := filetest.GenerateTestRandomFileChunk(firstAddress, swarm.ChunkSize, swarm.ChunkSize) _, err = store.Put(ctx, storage.ModePutUpload, firstChunk) if err != nil { t.Fatal(err) } secondAddress := swarm.NewAddress(rootChunk.Data()[swarm.SectionSize+8:]) secondChunk := filetest.GenerateTestRandomFileChunk(secondAddress, swarm.ChunkSize, swarm.ChunkSize) _, err = store.Put(ctx, storage.ModePutUpload, secondChunk) if err != nil { t.Fatal(err) } // read back data and compare joinReader, l, err := joiner.New(ctx, store, rootChunk.Address()) if err != nil { t.Fatal(err) } if l != int64(swarm.ChunkSize*2) { t.Fatalf("expected join data length %d, got %d", swarm.ChunkSize*2, l) } resultBuffer := make([]byte, swarm.ChunkSize) n, err := joinReader.Read(resultBuffer) if err != nil { t.Fatal(err) } if n != len(resultBuffer) { t.Fatalf("expected read count %d, got %d", len(resultBuffer), n) } if !bytes.Equal(resultBuffer, firstChunk.Data()[8:]) { t.Fatalf("expected resultbuffer %v, got %v", resultBuffer, firstChunk.Data()[:len(resultBuffer)]) } }
explode_data.jsonl/51985
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 595 }
[ 2830, 3393, 12292, 261, 2354, 8856, 1155, 353, 8840, 836, 8, 341, 57279, 1669, 7860, 7121, 623, 14827, 2822, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 16867, 9121, 2822, 197, 322, 1855, 3704, 11879, 323, 1378, 821, 26...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestNameToIndex(t *testing.T) { // invalid id, err := fullNameToIndex("m") assert.NotNil(t, err) assert.Equal(t, -1, id) id, err = fullNameToIndex("mon") assert.NotNil(t, err) assert.Equal(t, -1, id) id, err = fullNameToIndex("rook-ceph-monitor0") assert.NotNil(t, err) assert.Equal(t, -1, id) // valid id, err = fullNameToIndex("rook-ceph-mon-a") assert.Nil(t, err) assert.Equal(t, 0, id) id, err = fullNameToIndex("rook-ceph-mon123") assert.Nil(t, err) assert.Equal(t, 123, id) }
explode_data.jsonl/39530
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 236 }
[ 2830, 3393, 675, 1249, 1552, 1155, 353, 8840, 836, 8, 341, 197, 322, 8318, 198, 15710, 11, 1848, 1669, 48008, 1249, 1552, 445, 76, 1138, 6948, 93882, 1155, 11, 1848, 340, 6948, 12808, 1155, 11, 481, 16, 11, 877, 340, 15710, 11, 1848...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMessageSetFilter(t *testing.T) { var ( value = make(MessageSet, 3) req = require.New(t) ) // filter nothing { set, err := value.Filter(func(*Message) (bool, error) { return true, nil }) req.NoError(err) req.Equal(len(set), len(value)) } // filter one item { found := false set, err := value.Filter(func(*Message) (bool, error) { if !found { found = true return found, nil } return false, nil }) req.NoError(err) req.Len(set, 1) } // filter error { _, err := value.Filter(func(*Message) (bool, error) { return false, errors.New("filter error") }) req.Error(err) } }
explode_data.jsonl/5843
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 278 }
[ 2830, 3393, 2052, 1649, 5632, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 16309, 284, 1281, 29359, 1649, 11, 220, 18, 340, 197, 24395, 256, 284, 1373, 7121, 1155, 340, 197, 692, 197, 322, 4051, 4302, 198, 197, 515, 197, 8196, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_addBlobsFromBlobsJSONToState(t *testing.T) { tests := []struct { name string blobMap map[string]*Blob icuDataMap map[string]*Node distributedShlibsMap map[string]*Node blobs []BlobFromJSON expectedBlobMap map[string]*Blob expectedSize int64 }{ { "Adding Asset Blob", map[string]*Blob{"hash": {size: 1}}, map[string]*Node{"test.asset": {fullPath: "test.asset", size: 0, copies: 1, children: map[string]*Node{}}}, map[string]*Node{"lib/ld.so.1": {fullPath: "lib/ld.so.1", size: 0, copies: 1, children: map[string]*Node{}}}, []BlobFromJSON{{Path: "test.asset", Merkle: "hash"}}, map[string]*Blob{}, 1, }, { "Adding Non-asset Blob", map[string]*Blob{"hash": {size: 1, dep: []string{"not used"}}}, map[string]*Node{"test.asset": {fullPath: "test.asset", size: 0, copies: 1, children: map[string]*Node{}}}, map[string]*Node{"lib/ld.so.1": {fullPath: "lib/ld.so.1", size: 0, copies: 1, children: map[string]*Node{}}}, []BlobFromJSON{{Path: "test.notasset", Merkle: "hash"}}, map[string]*Blob{"hash": {size: 1, dep: []string{"not used"}}}, 0, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { st := processingState{ test.blobMap, test.icuDataMap, test.distributedShlibsMap, newDummyNode(), } addBlobsFromBlobsJSONToState(&st, test.blobs, "") if !reflect.DeepEqual(st.blobMap, test.expectedBlobMap) { t.Fatalf("blob map: %v; expect %v", test.blobMap, test.expectedBlobMap) } var totalIcuDataSize int64 for _, node := range test.icuDataMap { totalIcuDataSize += node.size } if totalIcuDataSize != test.expectedSize { t.Fatalf("ICU Data size: %d; expect %d", totalIcuDataSize, test.expectedSize) } }) } }
explode_data.jsonl/12617
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 854 }
[ 2830, 3393, 2891, 33, 68164, 3830, 33, 68164, 5370, 1249, 1397, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 338, 914, 198, 197, 2233, 1684, 2227, 1060, 2415, 14032, 8465, 37985, 198, 197, 197, 292, 84, 1043...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestStorageKey_String(t *testing.T) { assertString(t, []stringAssert{ {NewStorageKey([]byte{0, 0, 0}), "[0 0 0]"}, {NewStorageKey([]byte{171, 18, 52}), "[171 18 52]"}, {NewStorageKey([]byte{0, 1}), "[0 1]"}, {NewStorageKey([]byte{18, 52, 86}), "[18 52 86]"}, }) }
explode_data.jsonl/3065
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 124 }
[ 2830, 3393, 5793, 1592, 31777, 1155, 353, 8840, 836, 8, 341, 6948, 703, 1155, 11, 3056, 917, 8534, 515, 197, 197, 90, 3564, 5793, 1592, 10556, 3782, 90, 15, 11, 220, 15, 11, 220, 15, 38842, 10545, 15, 220, 15, 220, 15, 60, 7115, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIdDoug(t *testing.T) { tc := SetupEngineTest(t, "id") defer tc.Cleanup() idUI, result, err := runIdentify(&tc, "t_doug") if err != nil { t.Fatal(err) } checkDougProofs(t, idUI, &result.Upk) checkDisplayKeys(t, idUI, 1, 1) }
explode_data.jsonl/52985
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 110 }
[ 2830, 3393, 764, 91576, 1155, 353, 8840, 836, 8, 341, 78255, 1669, 18626, 4571, 2271, 1155, 11, 330, 307, 1138, 16867, 17130, 727, 60639, 741, 15710, 2275, 11, 1102, 11, 1848, 1669, 1598, 28301, 1437, 2099, 10413, 11, 330, 83, 814, 48...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAdjacencyList(t *testing.T) { root := &Node{ Children: []*Node{ {Meta: Meta{ID: "N1"}, Ports: []*Port{{Meta: Meta{ID: "P1"}}}}, {Meta: Meta{ID: "N2"}, Ports: []*Port{{Meta: Meta{ID: "P2"}}}}, {Meta: Meta{ID: "N3"}, Ports: []*Port{{Meta: Meta{ID: "P3"}}}}, {Meta: Meta{ID: "N4"}, Ports: []*Port{{Meta: Meta{ID: "P4"}}}}, {Meta: Meta{ID: "N5"}, Ports: []*Port{{Meta: Meta{ID: "P5"}}}}, }, Edges: []*Edge{ {Sources: []string{"P1"}, Targets: []string{"P2"}}, {Sources: []string{"P1"}, Targets: []string{"P4"}}, {Sources: []string{"P2"}, Targets: []string{"P3"}}, {Sources: []string{"P3"}, Targets: []string{"P5"}}, {Sources: []string{"P1"}, Targets: []string{"P5"}}, {Sources: []string{"P4"}, Targets: []string{"P3"}}, {Sources: []string{"P2"}, Targets: []string{"P5"}}, }, } adj := root.AdjacencyList() assert.ElementsMatch(t, adj["N1"], []string{"N2", "N4", "N5"}) assert.ElementsMatch(t, adj["N2"], []string{"N3", "N5"}) assert.ElementsMatch(t, adj["N3"], []string{"N5"}) assert.ElementsMatch(t, adj["N4"], []string{"N3"}) assert.Nil(t, adj["N5"]) }
explode_data.jsonl/78244
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 505 }
[ 2830, 3393, 54866, 40624, 852, 1155, 353, 8840, 836, 8, 341, 33698, 1669, 609, 1955, 515, 197, 197, 11539, 25, 29838, 1955, 515, 298, 197, 90, 12175, 25, 15819, 90, 915, 25, 330, 45, 16, 14345, 69866, 25, 29838, 7084, 2979, 12175, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSinglePartitionSubscriberStopDuringAdminSeek(t *testing.T) { subscription := subscriptionPartition{"projects/123456/locations/us-central1-b/subscriptions/my-sub", 0} receiver := newTestMessageReceiver(t) msg1 := seqMsgWithOffsetAndSize(1, 100) msg2 := seqMsgWithOffsetAndSize(2, 100) verifiers := test.NewVerifiers(t) subStream := test.NewRPCVerifier(t) subStream.Push(initSubReqCommit(subscription), initSubResp(), nil) subStream.Push(initFlowControlReq(), msgSubResp(msg1, msg2), nil) // Server disconnects the stream with the RESET signal. subBarrier := subStream.PushWithBarrier(nil, nil, makeStreamResetSignal()) verifiers.AddSubscribeStream(subscription.Path, subscription.Partition, subStream) cmtStream := test.NewRPCVerifier(t) cmtStream.Push(initCommitReq(subscription), initCommitResp(), nil) cmtBarrier := cmtStream.PushWithBarrier(commitReq(3), commitResp(1), nil) verifiers.AddCommitStream(subscription.Path, subscription.Partition, cmtStream) mockServer.OnTestStart(verifiers) defer mockServer.OnTestEnd() sub := newTestSinglePartitionSubscriber(t, receiver.onMessage, subscription) if gotErr := sub.WaitStarted(); gotErr != nil { t.Errorf("Start() got err: (%v)", gotErr) } receiver.ValidateMsg(msg1).Ack() receiver.ValidateMsg(msg2).Ack() subBarrier.Release() // Ensure that the user is able to call Stop while a reset is in progress. // Verifies that the subscribeStream is not holding mutexes while waiting and // that the subscribe stream is not reconnected. cmtBarrier.ReleaseAfter(func() { sub.Stop() }) if gotErr := sub.WaitStopped(); gotErr != nil { t.Errorf("Stop() got err: (%v)", gotErr) } }
explode_data.jsonl/31652
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 572 }
[ 2830, 3393, 10888, 49978, 40236, 10674, 16014, 7210, 39350, 1155, 353, 8840, 836, 8, 341, 28624, 12124, 1669, 15142, 49978, 4913, 17161, 14, 16, 17, 18, 19, 20, 21, 14, 31309, 62431, 84081, 16, 1455, 37885, 29966, 34198, 17967, 497, 220...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateResponseBody_StableMarshal(t *testing.T) { responseFrom := generateCreateSessionResponseBody("ID", "Session Public Key") transport := new(grpc.CreateResponse_Body) t.Run("non empty", func(t *testing.T) { wire, err := responseFrom.StableMarshal(nil) require.NoError(t, err) err = goproto.Unmarshal(wire, transport) require.NoError(t, err) responseTo := session.CreateResponseBodyFromGRPCMessage(transport) require.Equal(t, responseFrom, responseTo) }) }
explode_data.jsonl/79968
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 169 }
[ 2830, 3393, 4021, 29637, 70645, 480, 55438, 1155, 353, 8840, 836, 8, 341, 21735, 3830, 1669, 6923, 4021, 5283, 29637, 445, 915, 497, 330, 5283, 3066, 5309, 1138, 197, 26445, 1669, 501, 35963, 3992, 7251, 2582, 1668, 1076, 692, 3244, 167...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestActivate(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) email := &EmailAddress{ ID: int64(1), UID: int64(1), Email: "user11@example.com", } assert.NoError(t, ActivateEmail(email)) emails, _ := GetEmailAddresses(int64(1)) assert.Len(t, emails, 3) assert.True(t, emails[0].IsActivated) assert.True(t, emails[0].IsPrimary) assert.False(t, emails[1].IsPrimary) assert.True(t, emails[2].IsActivated) assert.False(t, emails[2].IsPrimary) }
explode_data.jsonl/67891
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 201 }
[ 2830, 3393, 31242, 1155, 353, 8840, 836, 8, 341, 6948, 35699, 1155, 11, 19905, 28770, 3380, 2271, 5988, 12367, 57549, 1669, 609, 79986, 515, 197, 29580, 25, 262, 526, 21, 19, 7, 16, 1326, 197, 197, 6463, 25, 256, 526, 21, 19, 7, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNodeGetVolumeStats_NotFound(t *testing.T) { // Create server and client connection s := newTestServer(t) defer s.Stop() c := csi.NewNodeClient(s.Conn()) // Get Capabilities - no volumes found id := "myvol123" gomock.InOrder( s.MockDriver(). EXPECT(). Inspect([]string{id}). Return([]*api.Volume{}, nil). Times(1), ) _, err := c.NodeGetVolumeStats( context.Background(), &csi.NodeGetVolumeStatsRequest{VolumeId: id, VolumePath: "/test"}) assert.Error(t, err) statusErr, ok := status.FromError(err) assert.Equal(t, true, ok) assert.Equal(t, codes.NotFound.String(), statusErr.Code().String()) // Get Capabilities - err not found gomock.InOrder( s.MockDriver(). EXPECT(). Inspect([]string{id}). Return([]*api.Volume{}, kvdb.ErrNotFound). Times(1), ) _, err = c.NodeGetVolumeStats( context.Background(), &csi.NodeGetVolumeStatsRequest{VolumeId: id, VolumePath: "/test"}) assert.Error(t, err) statusErr, ok = status.FromError(err) assert.Equal(t, true, ok) assert.Equal(t, codes.NotFound.String(), statusErr.Code().String()) // Get Capabilities - attach path does not match VolumePath gomock.InOrder( s.MockDriver(). EXPECT(). Inspect([]string{id}). Return([]*api.Volume{{ Id: id, AttachPath: []string{"bad-test", "test-2"}, }}, nil). Times(1), ) _, err = c.NodeGetVolumeStats( context.Background(), &csi.NodeGetVolumeStatsRequest{VolumeId: id, VolumePath: "/test"}) assert.Error(t, err) statusErr, ok = status.FromError(err) assert.Equal(t, true, ok) assert.Equal(t, codes.NotFound.String(), statusErr.Code().String()) assert.Contains(t, statusErr.Err().Error(), "not mounted on path") }
explode_data.jsonl/51456
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 687 }
[ 2830, 3393, 1955, 1949, 18902, 16635, 60816, 6650, 1155, 353, 8840, 836, 8, 341, 197, 322, 4230, 3538, 323, 2943, 3633, 198, 1903, 1669, 501, 2271, 5475, 1155, 340, 16867, 274, 30213, 2822, 1444, 1669, 272, 6321, 7121, 1955, 2959, 1141,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGreenPath(t *testing.T) { testGreenPath(t, blockDelivererConsumerWithRecv) testGreenPath(t, blockDelivererConsumerWithSend) assert.Equal(t, 0, connNumber) }
explode_data.jsonl/38320
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 61 }
[ 2830, 3393, 19576, 1820, 1155, 353, 8840, 836, 8, 341, 18185, 19576, 1820, 1155, 11, 2504, 16532, 1524, 261, 29968, 2354, 63483, 340, 18185, 19576, 1820, 1155, 11, 2504, 16532, 1524, 261, 29968, 2354, 11505, 340, 6948, 12808, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestAES256KeyGenSKI(t *testing.T) { t.Parallel() provider, _, cleanup := currentTestConfig.Provider(t) defer cleanup() k, err := provider.KeyGen(&bccsp.AESKeyGenOpts{Temporary: false}) if err != nil { t.Fatalf("Failed generating AES_256 key [%s]", err) } k2, err := provider.GetKey(k.SKI()) if err != nil { t.Fatalf("Failed getting AES_256 key [%s]", err) } if k2 == nil { t.Fatal("Failed getting AES_256 key. Key must be different from nil") } if !k2.Private() { t.Fatal("Failed getting AES_256 key. Key should be private") } if !k2.Symmetric() { t.Fatal("Failed getting AES_256 key. Key should be symmetric") } // Check that the SKIs are the same if !bytes.Equal(k.SKI(), k2.SKI()) { t.Fatalf("SKIs are different [%x]!=[%x]", k.SKI(), k2.SKI()) } }
explode_data.jsonl/29278
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 330 }
[ 2830, 3393, 69168, 17, 20, 21, 1592, 9967, 81545, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 19979, 11, 8358, 21290, 1669, 1482, 2271, 2648, 36208, 1155, 340, 16867, 21290, 2822, 16463, 11, 1848, 1669, 9109, 9610, 9967, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestCodeVarint(t *testing.T) { var buf = make([]byte, 10) var v uint64 = 128 var n uint64 n = EncodeVarint(buf, v) var d uint64 n = DecodeVarint(buf[:n], &d) if v != d { t.Errorf("error %d != %d", v, d) } if n != SizeofVarint(v) { t.Errorf("error %d != %d", n, len(buf[:n])) } }
explode_data.jsonl/58549
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 147 }
[ 2830, 3393, 2078, 3962, 396, 1155, 353, 8840, 836, 8, 341, 2405, 6607, 284, 1281, 10556, 3782, 11, 220, 16, 15, 340, 2405, 348, 2622, 21, 19, 284, 220, 16, 17, 23, 198, 2405, 308, 2622, 21, 19, 198, 9038, 284, 56562, 3962, 396, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInformationSchemaCreateTime(t *testing.T) { store, clean := realtikvtest.CreateMockStoreAndSetup(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("create table t (c int)") tk.MustExec(`set @@time_zone = 'Asia/Shanghai'`) ret := tk.MustQuery("select create_time from information_schema.tables where table_name='t';") // Make sure t1 is greater than t. time.Sleep(time.Second) tk.MustExec("alter table t modify c int default 11") ret1 := tk.MustQuery("select create_time from information_schema.tables where table_name='t';") ret2 := tk.MustQuery("show table status like 't'") require.Equal(t, ret2.Rows()[0][11].(string), ret1.Rows()[0][0].(string)) typ1, err := types.ParseDatetime(nil, ret.Rows()[0][0].(string)) require.NoError(t, err) typ2, err := types.ParseDatetime(nil, ret1.Rows()[0][0].(string)) require.NoError(t, err) r := typ2.Compare(typ1) require.Equal(t, 1, r) // Check that time_zone changes makes the create_time different tk.MustExec(`set @@time_zone = 'Europe/Amsterdam'`) ret = tk.MustQuery(`select create_time from information_schema.tables where table_name='t'`) ret2 = tk.MustQuery(`show table status like 't'`) require.Equal(t, ret2.Rows()[0][11].(string), ret.Rows()[0][0].(string)) typ3, err := types.ParseDatetime(nil, ret.Rows()[0][0].(string)) require.NoError(t, err) // Asia/Shanghai 2022-02-17 17:40:05 > Europe/Amsterdam 2022-02-17 10:40:05 r = typ2.Compare(typ3) require.Equal(t, 1, r) }
explode_data.jsonl/5773
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 592 }
[ 2830, 3393, 14873, 8632, 53251, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1931, 83, 1579, 85, 1944, 7251, 11571, 6093, 3036, 21821, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLightModulesAsModuleSource(t *testing.T) { logp.TestingSetup() type testMetricSet struct { name string module string isDefault bool hostParser HostParser } cases := map[string]struct { registered []testMetricSet expectedMetricSets map[string][]string expectedDefaultMetricSets map[string][]string }{ "no registered modules": { expectedMetricSets: map[string][]string{ "service": []string{"metricset", "nondefault"}, "broken": []string{}, "empty": []string{}, }, expectedDefaultMetricSets: map[string][]string{ "service": []string{"metricset"}, "broken": []string{}, "empty": []string{}, }, }, "same module registered (mixed modules case)": { registered: []testMetricSet{ {name: "other", module: "service"}, }, expectedMetricSets: map[string][]string{ "service": []string{"metricset", "nondefault", "other"}, }, expectedDefaultMetricSets: map[string][]string{ "service": []string{"metricset"}, }, }, "some metricsets registered": { registered: []testMetricSet{ {name: "other", module: "service"}, {name: "metricset", module: "something", isDefault: true}, {name: "metricset", module: "someotherthing"}, }, expectedMetricSets: map[string][]string{ "service": []string{"metricset", "nondefault", "other"}, "something": []string{"metricset"}, "someotherthing": []string{"metricset"}, }, expectedDefaultMetricSets: map[string][]string{ "service": []string{"metricset"}, "something": []string{"metricset"}, "someotherthing": []string{}, }, }, } fakeMetricSetFactory := func(base BaseMetricSet) (MetricSet, error) { return &base, nil } newRegistry := func(metricSets []testMetricSet) *Register { r := NewRegister() for _, m := range metricSets { opts := []MetricSetOption{} if m.isDefault { opts = append(opts, DefaultMetricSet()) } if m.hostParser != nil { opts = append(opts, WithHostParser(m.hostParser)) } r.MustAddMetricSet(m.module, m.name, fakeMetricSetFactory, opts...) } r.SetSecondarySource(NewLightModulesSource("testdata/lightmodules")) return r } for title, c := range cases { t.Run(title, func(t *testing.T) { r := newRegistry(c.registered) // Check metricsets for module, metricSets := range c.expectedMetricSets { t.Run("metricsets for "+module, func(t *testing.T) { assert.ElementsMatch(t, metricSets, r.MetricSets(module)) }) } // Check default metricsets for module, expected := range c.expectedDefaultMetricSets { t.Run("default metricsets for "+module, func(t *testing.T) { found, err := r.DefaultMetricSets(module) if len(expected) > 0 { assert.NoError(t, err) assert.ElementsMatch(t, expected, found) } else { assert.Error(t, err, "error expected when there are no default metricsets") } }) } }) } }
explode_data.jsonl/9712
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1232 }
[ 2830, 3393, 13911, 28201, 2121, 3332, 3608, 1155, 353, 8840, 836, 8, 341, 6725, 79, 8787, 287, 21821, 2822, 13158, 1273, 54310, 1649, 2036, 341, 197, 11609, 981, 914, 198, 197, 54020, 257, 914, 198, 197, 19907, 3675, 220, 1807, 198, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSuObjectPackValue(t *testing.T) { test := func(v1 Value) { t.Helper() enc := pack.NewEncoder(50) packValue(v1, 0, enc) s := enc.String() dec := pack.NewDecoder(s) v2 := unpackValue(dec) assert.T(t).This(v2).Is(v1) } test(SuInt(123)) test(SuStr("hello")) }
explode_data.jsonl/7120
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 36459, 1190, 30684, 1130, 1155, 353, 8840, 836, 8, 341, 18185, 1669, 2915, 3747, 16, 5162, 8, 341, 197, 3244, 69282, 741, 197, 197, 954, 1669, 3769, 7121, 19921, 7, 20, 15, 340, 197, 3223, 473, 1130, 3747, 16, 11, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStruct2Map(t *testing.T) { m, err := Struct2Map(Person{ Name: "Korbin", Age: 22, Gender: "男", Weight: 50, }) if err != nil { logger.Println("struct to map err:", err) return } fmt.Println(m) fmt.Println(Json2Map(`{"name": "korbin", "age": 22}`)) fmt.Println(Map2Json(map[string]string{"name": "korbin", "age": "22"})) stu := Stu{} err = Map2Struct(map[string]interface{}{"name": "korbin", "age": 22}, &stu) if err != nil { logger.Println("map to struct err:", err) } fmt.Println(stu) }
explode_data.jsonl/37130
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 250 }
[ 2830, 3393, 9422, 17, 2227, 1155, 353, 8840, 836, 8, 341, 2109, 11, 1848, 1669, 16139, 17, 2227, 58174, 515, 197, 21297, 25, 256, 330, 42, 269, 6863, 756, 197, 197, 16749, 25, 262, 220, 17, 17, 345, 197, 9600, 1659, 25, 330, 70108...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTest(t *testing.T) { tests := testSimpleCases() for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got, _ := Test(tt.args.pattern, tt.args.subject); got != tt.want { t.Errorf("Test() = %v, want %v", got, tt.want) } }) } testsErrors := testErrorCases() for _, tt := range testsErrors { t.Run(tt.name, func(t *testing.T) { if got, err := Test(tt.args.pattern, tt.args.subject); got != tt.want.result || err.Error() != tt.want.message { t.Errorf( "Test(%s, %s) = {%v, %v}, want %v", tt.args.pattern, tt.args.subject, got, err.Error(), tt.want, ) } }) } }
explode_data.jsonl/48664
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 314 }
[ 2830, 3393, 2271, 1155, 353, 8840, 836, 8, 1476, 78216, 1669, 1273, 16374, 37302, 2822, 2023, 8358, 17853, 1669, 2088, 7032, 341, 197, 3244, 16708, 47152, 2644, 11, 2915, 1155, 353, 8840, 836, 8, 341, 298, 743, 2684, 11, 716, 1669, 33...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEnsureKubeHostportChains(t *testing.T) { interfaceName := "cbr0" builtinChains := []string{"PREROUTING", "OUTPUT"} jumpRule := "-m comment --comment \"kube hostport portals\" -m addrtype --dst-type LOCAL -j KUBE-HOSTPORTS" masqRule := "-m comment --comment \"SNAT for localhost access to hostports\" -o cbr0 -s 127.0.0.0/8 -j MASQUERADE" fakeIPTables := NewFakeIPTables() assert.NoError(t, ensureKubeHostportChains(fakeIPTables, interfaceName)) _, _, err := fakeIPTables.getChain(utiliptables.TableNAT, utiliptables.Chain("KUBE-HOSTPORTS")) assert.NoError(t, err) _, chain, err := fakeIPTables.getChain(utiliptables.TableNAT, utiliptables.ChainPostrouting) assert.NoError(t, err) assert.EqualValues(t, len(chain.rules), 1) assert.Contains(t, chain.rules[0], masqRule) for _, chainName := range builtinChains { _, chain, err := fakeIPTables.getChain(utiliptables.TableNAT, utiliptables.Chain(chainName)) assert.NoError(t, err) assert.EqualValues(t, len(chain.rules), 1) assert.Contains(t, chain.rules[0], jumpRule) } }
explode_data.jsonl/19427
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 407 }
[ 2830, 3393, 64439, 42, 3760, 9296, 403, 1143, 1735, 1155, 353, 8840, 836, 8, 341, 58915, 1564, 675, 1669, 330, 66, 1323, 15, 698, 2233, 25628, 1143, 1735, 1669, 3056, 917, 4913, 6480, 640, 3656, 1718, 497, 330, 30301, 16707, 12428, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetLatestReleaseTagName_Errors(t *testing.T) { _, err := GetLatestReleaseTagName("error", "error") assert.EqualError(t, err, `failed to get latest release tag name on GitHub ("https://github.com/error/error/releases/latest"), status: 404 Not Found`) }
explode_data.jsonl/33574
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 82 }
[ 2830, 3393, 1949, 31992, 16077, 22616, 93623, 1087, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 2126, 31992, 16077, 22616, 445, 841, 497, 330, 841, 5130, 6948, 12808, 1454, 1155, 11, 1848, 11, 1565, 16091, 311, 633, 5535, 4879,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRecoverPendingConfig(t *testing.T) { tests := []struct { entType pb.EntryType wpending bool }{ {pb.EntryNormal, false}, {pb.EntryConfChange, true}, } for i, tt := range tests { r := newRaft(1, []uint64{1, 2}, 10, 1, NewMemoryStorage(), 0) r.appendEntry(pb.Entry{Type: tt.entType}) r.becomeCandidate() r.becomeLeader() if r.pendingConf != tt.wpending { t.Errorf("#%d: pendingConf = %v, want %v", i, r.pendingConf, tt.wpending) } } }
explode_data.jsonl/67367
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 209 }
[ 2830, 3393, 693, 3688, 32027, 2648, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 77655, 929, 220, 17310, 22330, 929, 198, 197, 6692, 28210, 1807, 198, 197, 59403, 197, 197, 90, 16650, 22330, 12206, 11, 895, 1583, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGroupObjxMap(t *testing.T) { v := &Value{data: [](Map){(Map)(New(1)), (Map)(New(1)), (Map)(New(1)), (Map)(New(1)), (Map)(New(1)), (Map)(New(1))}} grouped := v.GroupObjxMap(func(i int, val Map) string { return fmt.Sprintf("%v", i%2 == 0) }).data.(map[string][](Map)) assert.Equal(t, 2, len(grouped)) assert.Equal(t, 3, len(grouped["true"])) assert.Equal(t, 3, len(grouped["false"])) }
explode_data.jsonl/23397
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 180 }
[ 2830, 3393, 2808, 5261, 87, 2227, 1155, 353, 8840, 836, 8, 1476, 5195, 1669, 609, 1130, 90, 691, 25, 39444, 2227, 6098, 7, 2227, 2376, 3564, 7, 16, 5731, 320, 2227, 2376, 3564, 7, 16, 5731, 320, 2227, 2376, 3564, 7, 16, 5731, 320,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDiscoverViaProxy(t *testing.T) { // This (complicated) test tests that when the driver is given an initial host // that is infact a proxy it discovers the rest of the ring behind the proxy // and does not store the proxies address as a host in its connection pool. proxy, err := net.Listen("tcp", "localhost:0") if err != nil { t.Fatalf("unable to create proxy listener: %v", err) } ctx, cancel := context.WithCancel(context.Background()) defer cancel() var ( mu sync.Mutex proxyConns []net.Conn closed bool ) go func() { cassandraAddr := JoinHostPort(clusterHosts[0], 9042) cassandra := func() (net.Conn, error) { return net.Dial("tcp", cassandraAddr) } proxyFn := func(errs chan error, from, to net.Conn) { _, err := io.Copy(to, from) if err != nil { errs <- err } } // handle dials cassandra and then proxies requests and reponsess. It waits // for both the read and write side of the TCP connection to close before // returning. handle := func(conn net.Conn) error { cass, err := cassandra() if err != nil { return err } defer cass.Close() errs := make(chan error, 2) go proxyFn(errs, conn, cass) go proxyFn(errs, cass, conn) select { case <-ctx.Done(): return ctx.Err() case err := <-errs: return err } } for { // proxy just accepts connections and then proxies them to cassandra, // it runs until it is closed. conn, err := proxy.Accept() if err != nil { mu.Lock() if !closed { t.Error(err) } mu.Unlock() return } mu.Lock() proxyConns = append(proxyConns, conn) mu.Unlock() go func(conn net.Conn) { defer conn.Close() if err := handle(conn); err != nil { mu.Lock() if !closed { t.Error(err) } mu.Unlock() } }(conn) } }() proxyAddr := proxy.Addr().String() cluster := createCluster() cluster.NumConns = 1 // initial host is the proxy address cluster.Hosts = []string{proxyAddr} session := createSessionFromCluster(cluster, t) defer session.Close() // we shouldnt need this but to be safe time.Sleep(1 * time.Second) session.pool.mu.RLock() for _, host := range clusterHosts { if _, ok := session.pool.hostConnPools[host]; !ok { t.Errorf("missing host in pool after discovery: %q", host) } } session.pool.mu.RUnlock() mu.Lock() closed = true if err := proxy.Close(); err != nil { t.Log(err) } for _, conn := range proxyConns { if err := conn.Close(); err != nil { t.Log(err) } } mu.Unlock() }
explode_data.jsonl/11187
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1051 }
[ 2830, 3393, 50002, 54428, 16219, 1155, 353, 8840, 836, 8, 341, 197, 322, 1096, 320, 5689, 13724, 8, 1273, 7032, 429, 979, 279, 5579, 374, 2661, 458, 2856, 3468, 198, 197, 322, 429, 374, 4132, 531, 264, 13291, 432, 51014, 279, 2732, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTemplateWithoutJsonCamelCase(t *testing.T) { var tests = []struct { input string expected string }{ {"/test/{test_id}", "/test/{test_id}"}, {"/test1/{test1_id}/test2/{test2_id}", "/test1/{test1_id}/test2/{test2_id}"}, {"/test1/{test1_id}/{test2_id}", "/test1/{test1_id}/{test2_id}"}, {"/test1/test2/{test1_id}/{test2_id}", "/test1/test2/{test1_id}/{test2_id}"}, {"/test1/{test1_id1_id2}", "/test1/{test1_id1_id2}"}, {"/test1/{test1_id1_id2}/test2/{test2_id3_id4}", "/test1/{test1_id1_id2}/test2/{test2_id3_id4}"}, {"/test1/test2/{test1_id1_id2}/{test2_id3_id4}", "/test1/test2/{test1_id1_id2}/{test2_id3_id4}"}, {"test/{a}", "test/{a}"}, {"test/{ab}", "test/{ab}"}, {"test/{a_a}", "test/{a_a}"}, {"test/{json_name}", "test/{json_name}"}, {"test/{field_abc.field_newName}", "test/{field_abc.field_newName}"}, } reg := descriptor.NewRegistry() reg.SetUseJSONNamesForFields(false) for _, data := range tests { actual := templateToOpenAPIPath(data.input, reg, generateFieldsForJSONReservedName(), generateMsgsForJSONReservedName()) if data.expected != actual { t.Errorf("Expected templateToOpenAPIPath(%v) = %v, actual: %v", data.input, data.expected, actual) } } }
explode_data.jsonl/32799
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 592 }
[ 2830, 3393, 7275, 26040, 5014, 25406, 301, 4207, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 3056, 1235, 341, 197, 22427, 262, 914, 198, 197, 42400, 914, 198, 197, 59403, 197, 197, 90, 3115, 1944, 9388, 1944, 842, 9545, 3521, 1944,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetMetadataReturnsError(t *testing.T) { t.Parallel() Convey("When the api cannot connect to datastore return an internal server error", t, func() { r := httptest.NewRequest("GET", "http://localhost:22000/datasets/123/editions/2017/versions/1/metadata", nil) w := httptest.NewRecorder() mockedDataStore := &storetest.StorerMock{ GetVersionFunc: func(datasetID, edition, version, state string) (*models.Version, error) { return nil, errs.ErrInternalServer }, } datasetPermissions := getAuthorisationHandlerMock() permissions := getAuthorisationHandlerMock() api := GetAPIWithMocks(mockedDataStore, &mocks.DownloadsGeneratorMock{}, datasetPermissions, permissions) api.Router.ServeHTTP(w, r) So(w.Code, ShouldEqual, http.StatusInternalServerError) So(datasetPermissions.Required.Calls, ShouldEqual, 1) So(permissions.Required.Calls, ShouldEqual, 0) So(w.Body.String(), ShouldContainSubstring, errs.ErrInternalServer.Error()) So(len(mockedDataStore.GetVersionCalls()), ShouldEqual, 1) So(len(mockedDataStore.GetDatasetCalls()), ShouldEqual, 0) }) Convey("When the dataset document cannot be found return status not found", t, func() { r := httptest.NewRequest("GET", "http://localhost:22000/datasets/123/editions/2017/versions/1/metadata", nil) w := httptest.NewRecorder() mockedDataStore := &storetest.StorerMock{ GetDatasetFunc: func(datasetID string) (*models.DatasetUpdate, error) { return nil, errs.ErrDatasetNotFound }, GetVersionFunc: func(datasetID, edition, version, state string) (*models.Version, error) { return nil, nil }, } datasetPermissions := getAuthorisationHandlerMock() permissions := getAuthorisationHandlerMock() api := GetAPIWithMocks(mockedDataStore, &mocks.DownloadsGeneratorMock{}, datasetPermissions, permissions) api.Router.ServeHTTP(w, r) So(w.Code, ShouldEqual, http.StatusNotFound) So(w.Body.String(), ShouldContainSubstring, errs.ErrDatasetNotFound.Error()) So(datasetPermissions.Required.Calls, ShouldEqual, 1) So(permissions.Required.Calls, ShouldEqual, 0) So(len(mockedDataStore.GetDatasetCalls()), ShouldEqual, 1) So(len(mockedDataStore.CheckEditionExistsCalls()), ShouldEqual, 0) }) Convey("When the dataset document has no current sub document return status not found", t, func() { datasetDoc := createDatasetDoc() versionDoc := createPublishedVersionDoc() datasetDoc.Current = nil r := httptest.NewRequest("GET", "http://localhost:22000/datasets/123/editions/2017/versions/1/metadata", nil) w := httptest.NewRecorder() mockedDataStore := &storetest.StorerMock{ GetDatasetFunc: func(datasetID string) (*models.DatasetUpdate, error) { return datasetDoc, nil }, CheckEditionExistsFunc: func(datasetId, edition, state string) error { return nil }, GetVersionFunc: func(datasetID, edition, version, state string) (*models.Version, error) { return versionDoc, nil }, } datasetPermissions := getAuthorisationHandlerMock() permissions := getAuthorisationHandlerMock() api := GetAPIWithMocks(mockedDataStore, &mocks.DownloadsGeneratorMock{}, datasetPermissions, permissions) api.Router.ServeHTTP(w, r) So(w.Code, ShouldEqual, http.StatusNotFound) So(w.Body.String(), ShouldContainSubstring, errs.ErrDatasetNotFound.Error()) So(datasetPermissions.Required.Calls, ShouldEqual, 1) So(permissions.Required.Calls, ShouldEqual, 0) So(len(mockedDataStore.GetDatasetCalls()), ShouldEqual, 1) So(len(mockedDataStore.CheckEditionExistsCalls()), ShouldEqual, 0) }) Convey("When the edition document cannot be found for version return status not found", t, func() { datasetDoc := createDatasetDoc() versionDoc := createPublishedVersionDoc() r := httptest.NewRequest("GET", "http://localhost:22000/datasets/123/editions/2017/versions/1/metadata", nil) w := httptest.NewRecorder() mockedDataStore := &storetest.StorerMock{ GetDatasetFunc: func(datasetID string) (*models.DatasetUpdate, error) { return datasetDoc, nil }, CheckEditionExistsFunc: func(datasetId, edition, state string) error { return errs.ErrEditionNotFound }, GetVersionFunc: func(datasetID, edition, version, state string) (*models.Version, error) { return versionDoc, nil }, } datasetPermissions := getAuthorisationHandlerMock() permissions := getAuthorisationHandlerMock() api := GetAPIWithMocks(mockedDataStore, &mocks.DownloadsGeneratorMock{}, datasetPermissions, permissions) api.Router.ServeHTTP(w, r) So(w.Code, ShouldEqual, http.StatusNotFound) So(w.Body.String(), ShouldContainSubstring, errs.ErrEditionNotFound.Error()) So(datasetPermissions.Required.Calls, ShouldEqual, 1) So(permissions.Required.Calls, ShouldEqual, 0) So(len(mockedDataStore.GetDatasetCalls()), ShouldEqual, 1) So(len(mockedDataStore.CheckEditionExistsCalls()), ShouldEqual, 1) So(len(mockedDataStore.GetVersionCalls()), ShouldEqual, 1) }) Convey("When the version document cannot be found return status not found", t, func() { datasetDoc := createDatasetDoc() r := httptest.NewRequest("GET", "http://localhost:22000/datasets/123/editions/2017/versions/1/metadata", nil) w := httptest.NewRecorder() mockedDataStore := &storetest.StorerMock{ GetDatasetFunc: func(datasetID string) (*models.DatasetUpdate, error) { return datasetDoc, nil }, CheckEditionExistsFunc: func(datasetId, edition, state string) error { return nil }, GetVersionFunc: func(datasetID, edition, version, state string) (*models.Version, error) { return nil, errs.ErrVersionNotFound }, } datasetPermissions := getAuthorisationHandlerMock() permissions := getAuthorisationHandlerMock() api := GetAPIWithMocks(mockedDataStore, &mocks.DownloadsGeneratorMock{}, datasetPermissions, permissions) api.Router.ServeHTTP(w, r) So(w.Code, ShouldEqual, http.StatusNotFound) So(w.Body.String(), ShouldContainSubstring, errs.ErrVersionNotFound.Error()) So(datasetPermissions.Required.Calls, ShouldEqual, 1) So(permissions.Required.Calls, ShouldEqual, 0) So(len(mockedDataStore.GetVersionCalls()), ShouldEqual, 1) So(len(mockedDataStore.GetDatasetCalls()), ShouldEqual, 0) So(len(mockedDataStore.CheckEditionExistsCalls()), ShouldEqual, 0) }) Convey("When the version document state is invalid return an internal server error", t, func() { datasetDoc := createDatasetDoc() r, err := createRequestWithAuth("GET", "http://localhost:22000/datasets/123/editions/2017/versions/1/metadata", nil) So(err, ShouldBeNil) w := httptest.NewRecorder() mockedDataStore := &storetest.StorerMock{ GetDatasetFunc: func(datasetID string) (*models.DatasetUpdate, error) { return datasetDoc, nil }, CheckEditionExistsFunc: func(datasetId, edition, state string) error { return nil }, GetVersionFunc: func(datasetID, edition, version, state string) (*models.Version, error) { return &models.Version{State: "gobbly-gook"}, nil }, } datasetPermissions := getAuthorisationHandlerMock() permissions := getAuthorisationHandlerMock() api := GetAPIWithMocks(mockedDataStore, &mocks.DownloadsGeneratorMock{}, datasetPermissions, permissions) api.Router.ServeHTTP(w, r) assertInternalServerErr(w) So(datasetPermissions.Required.Calls, ShouldEqual, 1) So(permissions.Required.Calls, ShouldEqual, 0) So(len(mockedDataStore.GetDatasetCalls()), ShouldEqual, 1) So(len(mockedDataStore.CheckEditionExistsCalls()), ShouldEqual, 1) So(len(mockedDataStore.GetVersionCalls()), ShouldEqual, 1) }) }
explode_data.jsonl/39399
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2782 }
[ 2830, 3393, 1949, 14610, 16446, 1454, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 93070, 5617, 445, 4498, 279, 6330, 4157, 4564, 311, 64986, 470, 458, 5306, 3538, 1465, 497, 259, 11, 2915, 368, 1476, 197, 7000, 1669, 54320, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDecodeNortelDiscovery(t *testing.T) { // http://www.thetechfirm.com/packets/nortel_btdp/btdp_nai.enc data := []byte{ 0x01, 0x00, 0x81, 0x00, 0x01, 0x00, 0x00, 0x04, 0x38, 0xe0, 0xcc, 0xde, 0x00, 0x13, 0xaa, 0xaa, 0x03, 0x00, 0x00, 0x81, 0x01, 0xa2, 0xac, 0x13, 0x58, 0x03, 0x00, 0x04, 0x15, 0x30, 0x0c, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x04, 0x38, 0xe0, 0xcc, 0xde, 0x80, 0x6a, 0x00, 0x01, 0x14, 0x00, 0x02, 0x00, 0x0f, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, } p := gopacket.NewPacket(data, LinkTypeEthernet, testDecodeOptions) wantLayers := []gopacket.LayerType{LayerTypeEthernet, LayerTypeLLC, LayerTypeSNAP, LayerTypeNortelDiscovery} checkLayers(p, wantLayers, t) want := &NortelDiscovery{ IPAddress: []byte{172, 19, 88, 3}, SegmentID: []byte{0x00, 0x04, 0x15}, Chassis: NDPChassisBayStack450101001000Switches, Backplane: NDPBackplaneEthernetFastEthernetGigabitEthernet, State: NDPStateHeartbeat, NumLinks: 0, } ndpL := p.Layer(LayerTypeNortelDiscovery) info, _ := ndpL.(*NortelDiscovery) if !reflect.DeepEqual(info, want) { t.Errorf("Values mismatch, \ngot %#v\nwant %#v\n", info, want) } }
explode_data.jsonl/42256
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 600 }
[ 2830, 3393, 32564, 45, 371, 301, 67400, 1155, 353, 8840, 836, 8, 341, 197, 322, 1758, 1110, 2136, 13677, 295, 4737, 8802, 905, 4322, 18382, 9612, 371, 301, 880, 1296, 79, 3470, 1296, 79, 1089, 2143, 54444, 198, 8924, 1669, 3056, 3782,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTopLevelReturnForbiddenTLA(t *testing.T) { default_suite.expectBundled(t, bundled{ files: map[string]string{ "/entry.js": ` return await foo `, }, entryPaths: []string{"/entry.js"}, options: config.Options{ Mode: config.ModePassThrough, AbsOutputFile: "/out.js", }, expectedScanLog: `entry.js: error: Top-level return cannot be used inside an ECMAScript module entry.js: note: This file is considered an ECMAScript module because of the "await" keyword here `, }) }
explode_data.jsonl/38511
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 194 }
[ 2830, 3393, 5366, 4449, 5598, 69115, 13470, 32, 1155, 353, 8840, 836, 8, 341, 11940, 57239, 25952, 33, 1241, 832, 1155, 11, 51450, 515, 197, 74075, 25, 2415, 14032, 30953, 515, 298, 197, 3115, 4085, 2857, 788, 22074, 571, 853, 2535, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHistoryLog(t *testing.T) { db := newSQL(t) defer db.Close() bailIfErr(t, db.LogEvent("namespace", "service", "event 1")) bailIfErr(t, db.LogEvent("namespace", "service", "event 2")) bailIfErr(t, db.LogEvent("namespace", "other", "event 3")) es, err := db.EventsForService("namespace", "service") if err != nil { t.Fatal(err) } if len(es) != 2 { t.Fatalf("Expected 2 events, got %d\n", len(es)) } es, err = db.AllEvents("namespace") if err != nil { t.Fatal(err) } if len(es) != 3 { t.Fatalf("Expected 3 events, got %#v\n", es) } }
explode_data.jsonl/27172
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 249 }
[ 2830, 3393, 13424, 2201, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 501, 6688, 1155, 340, 16867, 2927, 10421, 2822, 2233, 604, 2679, 7747, 1155, 11, 2927, 5247, 1556, 445, 2231, 497, 330, 7936, 497, 330, 3087, 220, 16, 5455, 2233, 604...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestRecursiveSliceType(t *testing.T) { type recursiveSlice []recursiveSlice r1 := recursiveSlice{0: recursiveSlice{0: nil}, 1: nil} r2 := make(recursiveSlice, 0) if err := encAndDec(r1, &r2); err != nil { t.Error(err) } }
explode_data.jsonl/43390
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 97 }
[ 2830, 3393, 78542, 33236, 929, 1155, 353, 8840, 836, 8, 341, 13158, 30819, 33236, 3056, 49512, 33236, 198, 7000, 16, 1669, 30819, 33236, 90, 15, 25, 30819, 33236, 90, 15, 25, 2092, 2137, 220, 16, 25, 2092, 532, 7000, 17, 1669, 1281, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_MongoDBCollectionGetProperties_Status_ResourceARM_WhenSerializedToJson_DeserializesAsEqual(t *testing.T) { t.Parallel() parameters := gopter.DefaultTestParameters() parameters.MaxSize = 10 properties := gopter.NewProperties(parameters) properties.Property( "Round trip of MongoDBCollectionGetProperties_Status_ResourceARM via JSON returns original", prop.ForAll(RunJSONSerializationTestForMongoDBCollectionGetPropertiesStatusResourceARM, MongoDBCollectionGetPropertiesStatusResourceARMGenerator())) properties.TestingRun(t, gopter.NewFormatedReporter(true, 240, os.Stdout)) }
explode_data.jsonl/9504
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 184 }
[ 2830, 3393, 1245, 6363, 3506, 6482, 1949, 7903, 36449, 86346, 17911, 62, 4498, 77521, 78967, 98054, 2848, 4756, 2121, 2993, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 67543, 1669, 728, 73137, 13275, 2271, 9706, 741, 67543, 1453...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsStickyBuffer(t *testing.T) { for _, tt := range []struct { buf string want bool }{ { buf: "pkt_data", want: true, }, { buf: "foobarbaz", want: false, }, { buf: "http_request_line", want: true, }, } { got := isStickyBuffer(tt.buf) if got != tt.want { t.Fatalf("got=%v; want=%v", got, tt.want) } } }
explode_data.jsonl/59696
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 192 }
[ 2830, 3393, 3872, 623, 18964, 4095, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 3056, 1235, 341, 197, 26398, 220, 914, 198, 197, 50780, 1807, 198, 197, 59403, 197, 197, 515, 298, 26398, 25, 220, 330, 46775, 1769, 756,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFindMultiple(t *testing.T) { table := Load(data) result := Find(table, "UNITED") if len(result) < 5 { t.Log("Found ", len(result)) t.Fatal("Find failed to find several matches") } }
explode_data.jsonl/5875
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 75 }
[ 2830, 3393, 9885, 32089, 1155, 353, 8840, 836, 8, 341, 26481, 1669, 8893, 2592, 340, 9559, 1669, 7379, 15761, 11, 330, 23040, 1479, 1138, 743, 2422, 4456, 8, 366, 220, 20, 341, 197, 3244, 5247, 445, 6650, 3670, 2422, 4456, 1171, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestAnonymousFallback(t *testing.T) { client := fakeclient.NewSimpleClientset(&corev1.ServiceAccount{ ObjectMeta: metav1.ObjectMeta{ Name: "default", Namespace: "default", }, }) kc, err := New(client, Options{}) if err != nil { t.Errorf("New() = %v", err) } reg, err := name.NewRegistry("fake.registry.io", name.WeakValidation) if err != nil { t.Errorf("NewRegistry() = %v", err) } auth, err := kc.Resolve(reg) if err != nil { t.Errorf("Resolve(%v) = %v", reg, err) } if got, want := auth, authn.Anonymous; got != want { t.Errorf("Resolve() = %v, want %v", got, want) } }
explode_data.jsonl/76048
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 262 }
[ 2830, 3393, 32684, 87206, 1155, 353, 8840, 836, 8, 341, 25291, 1669, 12418, 2972, 7121, 16374, 2959, 746, 2099, 98645, 16, 13860, 7365, 515, 197, 23816, 12175, 25, 77520, 16, 80222, 515, 298, 21297, 25, 414, 330, 2258, 756, 298, 90823, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGetSchedulable(t *testing.T) { g := testGraph(t) tcs := []struct { name string finished []string expectedTasks sets.String }{{ name: "nothing-done", finished: []string{}, expectedTasks: sets.NewString("a", "b"), }, { name: "a-done", finished: []string{"a"}, expectedTasks: sets.NewString("b", "x"), }, { name: "b-done", finished: []string{"b"}, expectedTasks: sets.NewString("a"), }, { name: "a-and-b-done", finished: []string{"a", "b"}, expectedTasks: sets.NewString("x"), }, { name: "a-x-done", finished: []string{"a", "x"}, expectedTasks: sets.NewString("b", "y", "z"), }, { name: "a-x-b-done", finished: []string{"a", "x", "b"}, expectedTasks: sets.NewString("y", "z"), }, { name: "a-x-y-done", finished: []string{"a", "x", "y"}, expectedTasks: sets.NewString("b", "z"), }, { name: "a-x-y-done", finished: []string{"a", "x", "y"}, expectedTasks: sets.NewString("b", "z"), }, { name: "a-x-y-b-done", finished: []string{"a", "x", "y", "b"}, expectedTasks: sets.NewString("w", "z"), }} for _, tc := range tcs { t.Run(tc.name, func(t *testing.T) { tasks, err := dag.GetSchedulable(g, tc.finished...) if err != nil { t.Fatalf("Didn't expect error when getting next tasks for %v but got %v", tc.finished, err) } if d := cmp.Diff(tasks, tc.expectedTasks, cmpopts.IgnoreFields(v1alpha1.PipelineTask{}, "RunAfter")); d != "" { t.Errorf("expected that with %v done, %v would be ready to schedule but was different: %s", tc.finished, tc.expectedTasks, diff.PrintWantGot(d)) } }) } }
explode_data.jsonl/69245
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 832 }
[ 2830, 3393, 1949, 50, 2397, 360, 480, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1273, 11212, 1155, 340, 3244, 4837, 1669, 3056, 1235, 341, 197, 11609, 688, 914, 198, 197, 1166, 24657, 414, 3056, 917, 198, 197, 42400, 25449, 7289, 6431...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestScannerMinMaxIdleTime(t *testing.T) { defer leaktest.AfterTest(t)() const targetInterval = 100 * time.Millisecond const minIdleTime = 10 * time.Millisecond const maxIdleTime = 15 * time.Millisecond for count := range []int{1, 10, 20, 100} { startTime := timeutil.Now() ranges := newTestRangeSet(count, t) s := newReplicaScanner(makeAmbCtx(), nil, targetInterval, minIdleTime, maxIdleTime, ranges) if interval := s.paceInterval(startTime, startTime); interval < minIdleTime || interval > maxIdleTime { t.Errorf("expected interval %s <= %s <= %s", minIdleTime, interval, maxIdleTime) } } }
explode_data.jsonl/78119
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 31002, 92304, 41370, 1462, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 4777, 2169, 10256, 284, 220, 16, 15, 15, 353, 882, 71482, 198, 4777, 1308, 41370, 1462, 284, 220, 16, 15, 353, 882, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetUnusedMarginFunds(t *testing.T) { t.Parallel() _, err := b.GetUnusedMarginFunds() if err == nil { t.Error("Test Failed - GetUnusedMarginFunds() error", err) } }
explode_data.jsonl/79964
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 74 }
[ 2830, 3393, 1949, 94033, 21681, 37, 42950, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 6878, 1848, 1669, 293, 2234, 94033, 21681, 37, 42950, 741, 743, 1848, 621, 2092, 341, 197, 3244, 6141, 445, 2271, 21379, 481, 2126, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestSetLenCap(t *testing.T) { xs := []int{1, 2, 3, 4, 5, 6, 7, 8} xa := [8]int{10, 20, 30, 40, 50, 60, 70, 80} vs := ValueOf(&xs).Elem() shouldPanic(func() { vs.SetLen(10) }) shouldPanic(func() { vs.SetCap(10) }) shouldPanic(func() { vs.SetLen(-1) }) shouldPanic(func() { vs.SetCap(-1) }) shouldPanic(func() { vs.SetCap(6) }) // smaller than len vs.SetLen(5) if len(xs) != 5 || cap(xs) != 8 { t.Errorf("after SetLen(5), len, cap = %d, %d, want 5, 8", len(xs), cap(xs)) } vs.SetCap(6) if len(xs) != 5 || cap(xs) != 6 { t.Errorf("after SetCap(6), len, cap = %d, %d, want 5, 6", len(xs), cap(xs)) } vs.SetCap(5) if len(xs) != 5 || cap(xs) != 5 { t.Errorf("after SetCap(5), len, cap = %d, %d, want 5, 5", len(xs), cap(xs)) } shouldPanic(func() { vs.SetCap(4) }) // smaller than len shouldPanic(func() { vs.SetLen(6) }) // bigger than cap va := ValueOf(&xa).Elem() shouldPanic(func() { va.SetLen(8) }) shouldPanic(func() { va.SetCap(8) }) }
explode_data.jsonl/29579
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 450 }
[ 2830, 3393, 1649, 11271, 12903, 1155, 353, 8840, 836, 8, 341, 10225, 82, 1669, 3056, 396, 90, 16, 11, 220, 17, 11, 220, 18, 11, 220, 19, 11, 220, 20, 11, 220, 21, 11, 220, 22, 11, 220, 23, 532, 197, 9591, 1669, 508, 23, 63025,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGrantSucccessAppLevelMixed(t *testing.T) { assert := assert.New(t) pn := pubnub.NewPubNub(pamConfigCopy()) pn.Config.UUID = "asd,|//&aqwe" res, _, err := pn.Grant(). Read(false).Write(true).Manage(false).Delete(true). Execute() assert.Nil(err) log.Println(res) assert.NotNil(res) assert.True(res.WriteEnabled) assert.True(!res.ReadEnabled) assert.True(!res.ManageEnabled) assert.True(res.DeleteEnabled) }
explode_data.jsonl/43187
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 182 }
[ 2830, 3393, 67971, 50, 14570, 1120, 2164, 4449, 86433, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 3223, 77, 1669, 6675, 77, 392, 7121, 29162, 45, 392, 1295, 309, 2648, 12106, 12367, 3223, 77, 10753, 39636, 284, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDownloading(t *testing.T) { t.Run("downloadOkFileTest", downloadOkFileTest) t.Run("downloadNotFoundTest", downloadNotFoundTest) t.Run("downloadOkFileContentTest", downloadOkFileContentTest) t.Run("downloadTimeoutContextTest", downloadTimeoutContextTest) t.Run("downloadHeadNotSupported", downloadHeadNotSupported) t.Run("downloadPartialContentNotSupportedTest", downloadPartialContentNotSupportedTest) t.Run("getFilenameTest", getFilenameTest) t.Run("coverTests", coverTests) }
explode_data.jsonl/55969
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 146 }
[ 2830, 3393, 76017, 1155, 353, 8840, 836, 8, 1476, 3244, 16708, 445, 12885, 11578, 1703, 2271, 497, 4139, 11578, 1703, 2271, 340, 3244, 16708, 445, 12885, 10372, 2271, 497, 4139, 10372, 2271, 340, 3244, 16708, 445, 12885, 11578, 1703, 2762...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetAllGoFileInfo(t *testing.T) { searchDir := "testdata/pet" p := New() err := p.getAllGoFileInfo("testdata", searchDir) assert.NoError(t, err) assert.Equal(t, 2, len(p.packages.files)) }
explode_data.jsonl/63550
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 82 }
[ 2830, 3393, 1949, 2403, 10850, 45430, 1155, 353, 8840, 836, 8, 341, 45573, 6184, 1669, 330, 92425, 4322, 295, 1837, 3223, 1669, 1532, 741, 9859, 1669, 281, 28103, 10850, 45430, 445, 92425, 497, 2711, 6184, 692, 6948, 35699, 1155, 11, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSchemaDiff(t *testing.T) { old := mkSchema( mkTable("removed"), mkTable( "shared", mkCol("foo", TextColumn, false, false, nil), ), ) new := mkSchema( mkTable( "shared", mkCol("foo", TextColumn, false, false, nil), mkCol("bar", TextColumn, false, false, nil), ), mkTable("new"), ) expected := ChangeSet{ &DropTable{"removed"}, &AddColumn{mkCol("bar", TextColumn, false, false, nil), "shared"}, &CreateTable{mkTable("new")}, } require.Equal(t, expected, SchemaDiff(old, new)) }
explode_data.jsonl/19113
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 218 }
[ 2830, 3393, 8632, 21751, 1155, 353, 8840, 836, 8, 341, 61828, 1669, 23789, 8632, 1006, 197, 2109, 74, 2556, 445, 45756, 4461, 197, 2109, 74, 2556, 1006, 298, 197, 1, 6100, 756, 298, 2109, 74, 6127, 445, 7975, 497, 2918, 2933, 11, 89...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValuesFlat(t *testing.T) { var list ParamsList qs := dORM.QueryTable("user") num, err := qs.OrderBy("id").ValuesFlat(&list, "UserName") throwFail(t, err) throwFail(t, AssertIs(num, 3)) if num == 3 { throwFail(t, AssertIs(list[0], "slene")) throwFail(t, AssertIs(list[1], "astaxie")) throwFail(t, AssertIs(list[2], "nobody")) } }
explode_data.jsonl/18138
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 6227, 31019, 1155, 353, 8840, 836, 8, 341, 2405, 1140, 34352, 852, 198, 18534, 82, 1669, 294, 4365, 15685, 2556, 445, 872, 5130, 22431, 11, 1848, 1669, 32421, 43040, 445, 307, 1827, 6227, 31019, 2099, 1607, 11, 330, 18856, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestLoggingServiceV2ListLogEntries(t *testing.T) { var nextPageToken string = "" var entriesElement *loggingpb.LogEntry = &loggingpb.LogEntry{} var entries = []*loggingpb.LogEntry{entriesElement} var expectedResponse = &loggingpb.ListLogEntriesResponse{ NextPageToken: nextPageToken, Entries: entries, } mockLogging.err = nil mockLogging.reqs = nil mockLogging.resps = append(mockLogging.resps[:0], expectedResponse) var resourceNames []string = nil var request = &loggingpb.ListLogEntriesRequest{ ResourceNames: resourceNames, } c, err := NewClient(context.Background(), clientOpt) if err != nil { t.Fatal(err) } resp, err := c.ListLogEntries(context.Background(), request).Next() if err != nil { t.Fatal(err) } if want, got := request, mockLogging.reqs[0]; !proto.Equal(want, got) { t.Errorf("wrong request %q, want %q", got, want) } want := (interface{})(expectedResponse.Entries[0]) got := (interface{})(resp) var ok bool switch want := (want).(type) { case proto.Message: ok = proto.Equal(want, got.(proto.Message)) default: ok = want == got } if !ok { t.Errorf("wrong response %q, want %q)", got, want) } }
explode_data.jsonl/77761
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 446 }
[ 2830, 3393, 34575, 1860, 53, 17, 852, 2201, 24533, 1155, 353, 8840, 836, 8, 341, 2405, 83595, 3323, 914, 284, 8389, 2405, 10695, 1691, 353, 25263, 16650, 5247, 5874, 284, 609, 25263, 16650, 5247, 5874, 16094, 2405, 10695, 284, 29838, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func Test_AssertExpectationsForObjects_Helper(t *testing.T) { var mockedService1 = new(TestExampleImplementation) var mockedService2 = new(TestExampleImplementation) var mockedService3 = new(TestExampleImplementation) mockedService1.On("Test_AssertExpectationsForObjects_Helper", 1).Return() mockedService2.On("Test_AssertExpectationsForObjects_Helper", 2).Return() mockedService3.On("Test_AssertExpectationsForObjects_Helper", 3).Return() mockedService1.Called(1) mockedService2.Called(2) mockedService3.Called(3) assert.True(t, AssertExpectationsForObjects(t, &mockedService1.Mock, &mockedService2.Mock, &mockedService3.Mock)) assert.True(t, AssertExpectationsForObjects(t, mockedService1, mockedService2, mockedService3)) }
explode_data.jsonl/8597
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 278 }
[ 2830, 3393, 62222, 529, 17536, 804, 2461, 11543, 67828, 1155, 353, 8840, 836, 8, 8022, 2405, 46149, 1860, 16, 284, 501, 31159, 13314, 36850, 1218, 2405, 46149, 1860, 17, 284, 501, 31159, 13314, 36850, 1218, 2405, 46149, 1860, 18, 284, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetRepoIDsForIssuesOptions(t *testing.T) { assert.NoError(t, unittest.PrepareTestDatabase()) user := unittest.AssertExistsAndLoadBean(t, &User{ID: 2}).(*User) for _, test := range []struct { Opts IssuesOptions ExpectedRepoIDs []int64 }{ { IssuesOptions{ AssigneeID: 2, }, []int64{3}, }, { IssuesOptions{ RepoIDs: []int64{1, 2}, }, []int64{1, 2}, }, } { repoIDs, err := GetRepoIDsForIssuesOptions(&test.Opts, user) assert.NoError(t, err) if assert.Len(t, repoIDs, len(test.ExpectedRepoIDs)) { for i, repoID := range repoIDs { assert.EqualValues(t, test.ExpectedRepoIDs[i], repoID) } } } }
explode_data.jsonl/46846
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 321 }
[ 2830, 3393, 1949, 25243, 30466, 2461, 85828, 3798, 1155, 353, 8840, 836, 8, 341, 6948, 35699, 1155, 11, 19905, 28770, 3380, 2271, 5988, 2398, 19060, 1669, 19905, 11711, 15575, 3036, 5879, 10437, 1155, 11, 609, 1474, 90, 915, 25, 220, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestFieldValuerAndSetter(t *testing.T) { var ( userSchema, _ = schema.Parse(&tests.User{}, &sync.Map{}, schema.NamingStrategy{}) user = tests.User{ Model: gorm.Model{ ID: 10, CreatedAt: time.Now(), DeletedAt: gorm.DeletedAt{Time: time.Now(), Valid: true}, }, Name: "valuer_and_setter", Age: 18, Birthday: tests.Now(), Active: true, } reflectValue = reflect.ValueOf(&user) ) // test valuer values := map[string]interface{}{ "name": user.Name, "id": user.ID, "created_at": user.CreatedAt, "deleted_at": user.DeletedAt, "age": user.Age, "birthday": user.Birthday, "active": true, } checkField(t, userSchema, reflectValue, values) // test setter newValues := map[string]interface{}{ "name": "valuer_and_setter_2", "id": 2, "created_at": time.Now(), "deleted_at": time.Now(), "age": 20, "birthday": time.Now(), "active": false, } for k, v := range newValues { if err := userSchema.FieldsByDBName[k].Set(reflectValue, v); err != nil { t.Errorf("no error should happen when assign value to field %v, but got %v", k, err) } } checkField(t, userSchema, reflectValue, newValues) // test valuer and other type age := myint(10) newValues2 := map[string]interface{}{ "name": sql.NullString{String: "valuer_and_setter_3", Valid: true}, "id": &sql.NullInt64{Int64: 3, Valid: true}, "created_at": tests.Now(), "deleted_at": time.Now(), "age": &age, "birthday": mytime(time.Now()), "active": mybool(true), } for k, v := range newValues2 { if err := userSchema.FieldsByDBName[k].Set(reflectValue, v); err != nil { t.Errorf("no error should happen when assign value to field %v, but got %v", k, err) } } checkField(t, userSchema, reflectValue, newValues2) }
explode_data.jsonl/52679
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 843 }
[ 2830, 3393, 1877, 2208, 8801, 3036, 44294, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 19060, 8632, 11, 716, 284, 10802, 8937, 2099, 23841, 7344, 22655, 609, 12996, 10104, 22655, 10802, 2067, 6469, 19816, 37790, 197, 19060, 688, 284, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestDb_ReadWrite(t *testing.T) { trun(t, func(h *dbHarness) { h.put("foo", "v1") h.getVal("foo", "v1") h.put("bar", "v2") h.put("foo", "v3") h.getVal("foo", "v3") h.getVal("bar", "v2") h.reopenDB() h.getVal("foo", "v3") h.getVal("bar", "v2") }) }
explode_data.jsonl/6002
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 7994, 38381, 7985, 1155, 353, 8840, 836, 8, 341, 25583, 359, 1155, 11, 2915, 3203, 353, 1999, 74248, 8, 341, 197, 9598, 3597, 445, 7975, 497, 330, 85, 16, 1138, 197, 9598, 670, 2208, 445, 7975, 497, 330, 85, 16, 1138, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHTTPTomcat6(t *testing.T) { ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) fmt.Fprintln(w, tomcatStatus6) })) defer ts.Close() tc := Tomcat{ URL: ts.URL, Username: "tomcat", Password: "s3cret", } var acc testutil.Accumulator err := tc.Gather(&acc) require.NoError(t, err) // tomcat_jvm_memory jvmMemoryFields := map[string]interface{}{ "free": int64(1942681600), "total": int64(2040070144), "max": int64(2040070144), } acc.AssertContainsFields(t, "tomcat_jvm_memory", jvmMemoryFields) // tomcat_connector connectorFields := map[string]interface{}{ "bytes_received": int64(0), "bytes_sent": int64(550196), "current_thread_count": int64(2), "current_threads_busy": int64(2), "error_count": int(16), "max_threads": int64(150), "max_time": int(1005), "processing_time": int(2465), "request_count": int(436), } connectorTags := map[string]string{ "name": "http-8080", } acc.AssertContainsTaggedFields(t, "tomcat_connector", connectorFields, connectorTags) }
explode_data.jsonl/68784
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 529 }
[ 2830, 3393, 2545, 51, 2828, 316, 4616, 21, 1155, 353, 8840, 836, 8, 341, 57441, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 6692, 69794, 19886, 52989, 340, 197, 11009, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseComposition(t *testing.T) { searchDir := "testdata/composition" p := New() err := p.ParseAPI(searchDir, mainAPIFile, defaultParseDepth) assert.NoError(t, err) expected, err := ioutil.ReadFile(filepath.Join(searchDir, "expected.json")) assert.NoError(t, err) b, _ := json.MarshalIndent(p.swagger, "", " ") //windows will fail: \r\n \n assert.Equal(t, string(expected), string(b)) }
explode_data.jsonl/63560
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 161 }
[ 2830, 3393, 14463, 75683, 1155, 353, 8840, 836, 8, 341, 45573, 6184, 1669, 330, 92425, 25093, 3487, 698, 3223, 1669, 1532, 741, 9859, 1669, 281, 8937, 7082, 20447, 6184, 11, 1887, 7082, 1703, 11, 1638, 14463, 19776, 340, 6948, 35699, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParsePrivateCloudID(t *testing.T) { testData := []struct { Input string Error bool Expected *PrivateCloudId }{ { // Incomplete URI Input: "", Error: true, }, { // Incomplete URI Input: "/subscriptions", Error: true, }, { // Incomplete URI Input: "/subscriptions/12345678-1234-9876-4563-123456789012", Error: true, }, { // Incomplete URI Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups", Error: true, }, { // Incomplete URI Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/example-resource-group", Error: true, }, { // Incomplete URI Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/example-resource-group/providers", Error: true, }, { // Incomplete URI Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/example-resource-group/providers/Microsoft.AVS", Error: true, }, { // Incomplete URI Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/example-resource-group/providers/Microsoft.AVS/privateClouds", Error: true, }, { // Valid URI Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/example-resource-group/providers/Microsoft.AVS/privateClouds/privateCloudValue", Expected: &PrivateCloudId{ SubscriptionId: "12345678-1234-9876-4563-123456789012", ResourceGroupName: "example-resource-group", PrivateCloudName: "privateCloudValue", }, }, { // Invalid (Valid Uri with Extra segment) Input: "/subscriptions/12345678-1234-9876-4563-123456789012/resourceGroups/example-resource-group/providers/Microsoft.AVS/privateClouds/privateCloudValue/extra", Error: true, }, } for _, v := range testData { t.Logf("[DEBUG] Testing %q", v.Input) actual, err := ParsePrivateCloudID(v.Input) if err != nil { if v.Error { continue } t.Fatalf("Expect a value but got an error: %+v", err) } if v.Error { t.Fatal("Expect an error but didn't get one") } if actual.SubscriptionId != v.Expected.SubscriptionId { t.Fatalf("Expected %q but got %q for SubscriptionId", v.Expected.SubscriptionId, actual.SubscriptionId) } if actual.ResourceGroupName != v.Expected.ResourceGroupName { t.Fatalf("Expected %q but got %q for ResourceGroupName", v.Expected.ResourceGroupName, actual.ResourceGroupName) } if actual.PrivateCloudName != v.Expected.PrivateCloudName { t.Fatalf("Expected %q but got %q for PrivateCloudName", v.Expected.PrivateCloudName, actual.PrivateCloudName) } } }
explode_data.jsonl/55234
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1069 }
[ 2830, 3393, 14463, 16787, 16055, 915, 1155, 353, 8840, 836, 8, 341, 18185, 1043, 1669, 3056, 1235, 341, 197, 66588, 262, 914, 198, 197, 58421, 262, 1807, 198, 197, 197, 18896, 353, 16787, 16055, 764, 198, 197, 59403, 197, 197, 515, 29...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestRequest_Ver(t *testing.T) { tests := []struct { name string request *alice.Request want string }{ { name: "", request: getReq(0), want: "1.0", }, { name: "", request: getReq(1), want: "1.0", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { req := tt.request if got := req.Ver(); got != tt.want { t.Errorf("Request.Ver() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/18235
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 240 }
[ 2830, 3393, 1900, 2334, 261, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 23555, 353, 63195, 9659, 198, 197, 50780, 262, 914, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 262, 8324, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAccounts_EditMultisig(t *testing.T) { mutableTree, _ := tree.NewMutableTree(0, db.NewMemDB(), 1024) b := bus.NewBus() b.SetChecker(checker.NewChecker(b)) accounts, err := NewAccounts(b, mutableTree) if err != nil { t.Fatal(err) } msigAddress := CreateMultisigAddress([20]byte{4}, 12) _ = accounts.CreateMultisig([]uint32{3, 3, 6}, []types.Address{[20]byte{1, 1}, [20]byte{2, 3}, [20]byte{3, 3}}, 6, msigAddress) _ = accounts.EditMultisig(2, []uint32{1, 1, 2}, []types.Address{[20]byte{1}, [20]byte{2}, [20]byte{3}}, msigAddress) account := accounts.GetAccount(msigAddress) if account == nil { t.Fatal("account is nil") } if !account.IsMultisig() { t.Fatal("account is not multisig") } multisig := account.Multisig() if multisig.GetWeight([20]byte{1}) != 1 { t.Fatal("address weight not equal 1") } if multisig.GetWeight([20]byte{2}) != 1 { t.Fatal("address weight not equal 1") } if multisig.GetWeight([20]byte{3}) != 2 { t.Fatal("address weight not equal 2") } if multisig.Threshold != 2 { t.Fatal("threshold not equal 2") } }
explode_data.jsonl/23603
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 455 }
[ 2830, 3393, 41369, 66158, 40404, 285, 343, 1155, 353, 8840, 836, 8, 341, 2109, 5922, 6533, 11, 716, 1669, 4916, 7121, 11217, 6533, 7, 15, 11, 2927, 7121, 18816, 3506, 1507, 220, 16, 15, 17, 19, 340, 2233, 1669, 5828, 7121, 15073, 74...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestSuspendTemplate(t *testing.T) { controller := newController() wfcset := controller.wfclientset.ArgoprojV1alpha1().Workflows("") // operate the workflow. it should become in a suspended state after wf := unmarshalWF(suspendTemplate) wf, err := wfcset.Create(wf) assert.NoError(t, err) woc := newWorkflowOperationCtx(wf, controller) woc.operate() wf, err = wfcset.Get(wf.ObjectMeta.Name, metav1.GetOptions{}) assert.NoError(t, err) assert.True(t, util.IsWorkflowSuspended(wf)) // operate again and verify no pods were scheduled woc = newWorkflowOperationCtx(wf, controller) woc.operate() pods, err := controller.kubeclientset.CoreV1().Pods("").List(metav1.ListOptions{}) assert.NoError(t, err) assert.Equal(t, 0, len(pods.Items)) // resume the workflow. verify resume workflow edits nodestatus correctly err = util.ResumeWorkflow(wfcset, wf.ObjectMeta.Name) assert.NoError(t, err) wf, err = wfcset.Get(wf.ObjectMeta.Name, metav1.GetOptions{}) assert.NoError(t, err) assert.False(t, util.IsWorkflowSuspended(wf)) // operate the workflow. it should reach the second step woc = newWorkflowOperationCtx(wf, controller) woc.operate() pods, err = controller.kubeclientset.CoreV1().Pods("").List(metav1.ListOptions{}) assert.NoError(t, err) assert.Equal(t, 1, len(pods.Items)) }
explode_data.jsonl/54370
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 498 }
[ 2830, 3393, 50, 12758, 7275, 1155, 353, 8840, 836, 8, 341, 61615, 1669, 501, 2051, 741, 6692, 8316, 746, 1669, 6461, 1418, 69, 2972, 746, 18979, 45926, 73, 53, 16, 7141, 16, 1005, 6776, 38140, 445, 5130, 197, 322, 14476, 279, 28288, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetGPUDriversInstallScript(t *testing.T) { // VMSize with GPU and NVIDIA agreement for drivers distribution validSkus := []string{ "Standard_NC6", "Standard_NC12", "Standard_NC24", "Standard_NC24r", "Standard_NV6", "Standard_NV12", "Standard_NV24", "Standard_NV24r", "Standard_ND6s", "Standard_ND12s", "Standard_ND24s", "Standard_ND24rs", "Standard_NC6s_v2", "Standard_NC12s_v2", "Standard_NC24s_v2", "Standard_NC24rs_v2", "Standard_NC6s_v3", "Standard_NC12s_v3", "Standard_NC24s_v3", "Standard_NC24rs_v3", } for _, sku := range validSkus { s := getGPUDriversInstallScript(&api.AgentPoolProfile{VMSize: sku}) if s == "" { t.Fatalf("Expected NVIDIA driver install script for sku %v", sku) } } // VMSize without GPU s := getGPUDriversInstallScript(&api.AgentPoolProfile{VMSize: "Standard_D2_v2"}) if s != "" { t.Fatalf("VMSize without GPU should not receive a script, expected empty string, received %v", s) } }
explode_data.jsonl/21312
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 417 }
[ 2830, 3393, 1949, 24430, 4656, 81, 1945, 24690, 5910, 1155, 353, 8840, 836, 8, 1476, 197, 322, 17792, 1695, 448, 22670, 323, 33561, 9128, 369, 11788, 7982, 198, 56322, 19290, 355, 1669, 3056, 917, 515, 197, 197, 1, 19781, 95942, 21, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestLevelEncoders(t *testing.T) { tests := []struct { name string expected interface{} // output of encoding InfoLevel }{ {"capital", "INFO"}, {"lower", "info"}, {"", "info"}, {"something-random", "info"}, } for _, tt := range tests { var le LevelEncoder require.NoError(t, le.UnmarshalText([]byte(tt.name)), "Unexpected error unmarshaling %q.", tt.name) assertAppended( t, tt.expected, func(arr ArrayEncoder) { le(InfoLevel, arr) }, "Unexpected output serializing InfoLevel with %q.", tt.name, ) } }
explode_data.jsonl/51369
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 216 }
[ 2830, 3393, 4449, 7408, 52498, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 42400, 3749, 6257, 442, 2550, 315, 11170, 13074, 4449, 198, 197, 59403, 197, 197, 4913, 65063, 497, 330, 6637, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSimpleCommonJS(t *testing.T) { default_suite.expectBundled(t, bundled{ files: map[string]string{ "/entry.js": ` const fn = require('./foo') console.log(fn()) `, "/foo.js": ` module.exports = function() { return 123 } `, }, entryPaths: []string{"/entry.js"}, options: config.Options{ Mode: config.ModeBundle, AbsOutputFile: "/out.js", }, }) }
explode_data.jsonl/38437
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 196 }
[ 2830, 3393, 16374, 10839, 12545, 1155, 353, 8840, 836, 8, 341, 11940, 57239, 25952, 33, 1241, 832, 1155, 11, 51450, 515, 197, 74075, 25, 2415, 14032, 30953, 515, 298, 197, 3115, 4085, 2857, 788, 22074, 571, 4777, 5168, 284, 1373, 8283, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestQueries(t *testing.T) { chainID := util.GetTestChainID() lis, err := initPeer(chainID) if err != nil { t.Fail() t.Logf("Error creating peer: %s", err) } defer finitPeer(lis, chainID) var ctxt = context.Background() url := "github.com/hyperledger/fabric/examples/chaincode/go/map" cID := &pb.ChaincodeID{Name: "tmap", Path: url, Version: "0"} f := "init" args := util.ToChaincodeArgs(f) spec := &pb.ChaincodeSpec{Type: 1, ChaincodeId: cID, Input: &pb.ChaincodeInput{Args: args}} cccid := ccprovider.NewCCContext(chainID, "tmap", "0", "", false, nil, nil) var nextBlockNumber uint64 _, err = deploy(ctxt, cccid, spec, nextBlockNumber) nextBlockNumber++ chaincodeID := spec.ChaincodeId.Name if err != nil { t.Fail() t.Logf("Error initializing chaincode %s(%s)", chaincodeID, err) theChaincodeSupport.Stop(ctxt, cccid, &pb.ChaincodeDeploymentSpec{ChaincodeSpec: spec}) return } // Invoke second chaincode, which will inturn invoke the first chaincode f = "put" args = util.ToChaincodeArgs(f, "key1", "{\"shipmentID\":\"161003PKC7300\",\"customsInvoice\":{\"methodOfTransport\":\"GROUND\",\"invoiceNumber\":\"00091622\"},\"weightUnitOfMeasure\":\"KGM\",\"volumeUnitOfMeasure\": \"CO\",\"dimensionUnitOfMeasure\":\"CM\",\"currency\":\"USD\"}") spec = &pb.ChaincodeSpec{Type: 1, ChaincodeId: cID, Input: &pb.ChaincodeInput{Args: args}} _, _, _, err = invoke(ctxt, chainID, spec, nextBlockNumber) nextBlockNumber++ if err != nil { t.Fail() t.Logf("Error invoking <%s>: %s", chaincodeID, err) theChaincodeSupport.Stop(ctxt, cccid, &pb.ChaincodeDeploymentSpec{ChaincodeSpec: spec}) return } f = "put" args = util.ToChaincodeArgs(f, "key2", "{\"shipmentID\":\"161003PKC7300\",\"customsInvoice\":{\"methodOfTransport\":\"GROUND\",\"invoiceNumber\":\"00091622\"},\"weightUnitOfMeasure\":\"KGM\",\"volumeUnitOfMeasure\": \"CO\",\"dimensionUnitOfMeasure\":\"CM\",\"currency\":\"USD\"}") spec = &pb.ChaincodeSpec{Type: 1, ChaincodeId: cID, Input: &pb.ChaincodeInput{Args: args}} _, _, _, err = invoke(ctxt, chainID, spec, nextBlockNumber) nextBlockNumber++ if err != nil { t.Fail() t.Logf("Error invoking <%s>: %s", chaincodeID, err) theChaincodeSupport.Stop(ctxt, cccid, &pb.ChaincodeDeploymentSpec{ChaincodeSpec: spec}) return } f = "put" args = util.ToChaincodeArgs(f, "key3", "{\"shipmentID\":\"161003PKC7300\",\"customsInvoice\":{\"methodOfTransport\":\"GROUND\",\"invoiceNumber\":\"00091622\"},\"weightUnitOfMeasure\":\"KGM\",\"volumeUnitOfMeasure\": \"CO\",\"dimensionUnitOfMeasure\":\"CM\",\"currency\":\"USD\"}") spec = &pb.ChaincodeSpec{Type: 1, ChaincodeId: cID, Input: &pb.ChaincodeInput{Args: args}} _, _, _, err = invoke(ctxt, chainID, spec, nextBlockNumber) nextBlockNumber++ if err != nil { t.Fail() t.Logf("Error invoking <%s>: %s", chaincodeID, err) theChaincodeSupport.Stop(ctxt, cccid, &pb.ChaincodeDeploymentSpec{ChaincodeSpec: spec}) return } f = "keys" args = util.ToChaincodeArgs(f, "key0", "key3") spec = &pb.ChaincodeSpec{Type: 1, ChaincodeId: cID, Input: &pb.ChaincodeInput{Args: args}} _, _, _, err = invoke(ctxt, chainID, spec, nextBlockNumber) nextBlockNumber++ if err != nil { t.Fail() t.Logf("Error invoking <%s>: %s", chaincodeID, err) theChaincodeSupport.Stop(ctxt, cccid, &pb.ChaincodeDeploymentSpec{ChaincodeSpec: spec}) return } if ledgerconfig.IsCouchDBEnabled() == true { f = "query" args = util.ToChaincodeArgs(f, "{\"selector\":{\"currency\":\"USD\"}}") spec = &pb.ChaincodeSpec{Type: 1, ChaincodeId: cID, Input: &pb.ChaincodeInput{Args: args}} _, _, _, err = invoke(ctxt, chainID, spec, nextBlockNumber) if err != nil { t.Fail() t.Logf("Error invoking <%s>: %s", chaincodeID, err) theChaincodeSupport.Stop(ctxt, cccid, &pb.ChaincodeDeploymentSpec{ChaincodeSpec: spec}) return } } theChaincodeSupport.Stop(ctxt, cccid, &pb.ChaincodeDeploymentSpec{ChaincodeSpec: spec}) }
explode_data.jsonl/52838
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1507 }
[ 2830, 3393, 55261, 1155, 353, 8840, 836, 8, 1476, 197, 8819, 915, 1669, 4094, 2234, 2271, 18837, 915, 2822, 8810, 285, 11, 1848, 1669, 2930, 30888, 62591, 915, 340, 743, 1848, 961, 2092, 341, 197, 3244, 57243, 741, 197, 3244, 98954, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestResolveTerraformModulesTwoModulesWithDependenciesIncludedDirsWithNoDependency(t *testing.T) { t.Parallel() opts, _ := options.NewTerragruntOptionsForTest("running_module_test") opts.IncludeDirs = []string{canonical(t, "../test/fixture-modules/module-a")} moduleA := &TerraformModule{ Path: canonical(t, "../test/fixture-modules/module-a"), Dependencies: []*TerraformModule{}, Config: config.TerragruntConfig{ Terraform: &config.TerraformConfig{Source: ptr("test")}, IsPartial: true, }, TerragruntOptions: opts.Clone(canonical(t, "../test/fixture-modules/module-a/"+config.DefaultTerragruntConfigPath)), } moduleC := &TerraformModule{ Path: canonical(t, "../test/fixture-modules/module-c"), Dependencies: []*TerraformModule{moduleA}, Config: config.TerragruntConfig{ Dependencies: &config.ModuleDependencies{Paths: []string{"../module-a"}}, Terraform: &config.TerraformConfig{Source: ptr("temp")}, IsPartial: true, }, TerragruntOptions: opts.Clone(canonical(t, "../test/fixture-modules/module-c/"+config.DefaultTerragruntConfigPath)), } configPaths := []string{"../test/fixture-modules/module-a/" + config.DefaultTerragruntConfigPath, "../test/fixture-modules/module-c/" + config.DefaultTerragruntConfigPath} actualModules, actualErr := ResolveTerraformModules(configPaths, opts, mockHowThesePathsWereFound) // construct the expected list moduleC.FlagExcluded = true expected := []*TerraformModule{moduleA, moduleC} assert.Nil(t, actualErr, "Unexpected error: %v", actualErr) assertModuleListsEqual(t, expected, actualModules) }
explode_data.jsonl/26640
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 613 }
[ 2830, 3393, 56808, 51, 13886, 627, 28201, 11613, 28201, 2354, 48303, 84610, 6184, 16056, 2753, 36387, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 64734, 11, 716, 1669, 2606, 7121, 51402, 68305, 3850, 3798, 2461, 2271, 445, 2717...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestQueryToFilter(t *testing.T) { if queryToFilter(common.Query{}, "my-project", "my-log") != `logName = "projects/my-project/logs/my-log"` { t.Error("Empty search") } if queryToFilter(common.Query{QueryString: "My query"}, "my-project", "my-log") != `logName = "projects/my-project/logs/my-log" AND "My query"` { t.Error("Basic search filter") } if queryToFilter(common.Query{ EqualityFilters: []common.EqualityFilter{ { FieldName: "name", Operator: "=", Value: "pete", }, }}, "my-project", "my-log") != `logName = "projects/my-project/logs/my-log" AND jsonPayload.name = "pete"` { t.Error("Where filter fail") } }
explode_data.jsonl/2474
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 274 }
[ 2830, 3393, 2859, 1249, 5632, 1155, 353, 8840, 836, 8, 341, 743, 3239, 1249, 5632, 57802, 15685, 22655, 330, 2408, 33696, 497, 330, 2408, 46332, 899, 961, 1565, 839, 675, 284, 330, 17161, 34198, 33696, 77554, 34198, 46332, 39917, 341, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestNoLogRestore(t *testing.T) { ds1 := datastore.NewMapDatastore() putVals(t, ds1, 0, 10) bds, err := Wrap(ds1, NoLogdir) require.NoError(t, err) var bup bytes.Buffer require.NoError(t, bds.Backup(&bup)) putVals(t, ds1, 10, 20) ds2 := datastore.NewMapDatastore() require.NoError(t, RestoreInto(&bup, ds2)) checkVals(t, ds2, 0, 10, true) checkVals(t, ds2, 10, 20, false) }
explode_data.jsonl/26961
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 187 }
[ 2830, 3393, 2753, 2201, 56284, 1155, 353, 8840, 836, 8, 341, 83336, 16, 1669, 64986, 7121, 2227, 1043, 4314, 2822, 45062, 52452, 1155, 11, 11472, 16, 11, 220, 15, 11, 220, 16, 15, 692, 2233, 5356, 11, 1848, 1669, 42187, 33783, 16, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMeCommand(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() Client := th.Client channel := th.BasicChannel testString := "/me hello" r1 := Client.Must(Client.ExecuteCommand(channel.Id, testString)).(*model.CommandResponse) require.NotNil(t, r1, "Command failed to execute") time.Sleep(100 * time.Millisecond) p1 := Client.Must(Client.GetPostsForChannel(channel.Id, 0, 2, "")).(*model.PostList) require.Len(t, p1.Order, 2, "Command failed to send") pt := p1.Posts[p1.Order[0]].Type require.Equal(t, model.POST_ME, pt, "invalid post type") msg := p1.Posts[p1.Order[0]].Message want := "*hello*" require.Equal(t, want, msg, "invalid me response") }
explode_data.jsonl/26344
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 261 }
[ 2830, 3393, 7823, 4062, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 2822, 71724, 1669, 270, 11716, 198, 71550, 1669, 270, 48868, 9629, 271, 18185, 703, 1669, 3521, 2660, 238...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFinalizeBlockByzantineValidators(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) defer cancel() app := &testApp{} logger := log.NewNopLogger() cc := abciclient.NewLocalClient(logger, app) proxyApp := proxy.New(cc, logger, proxy.NopMetrics()) err := proxyApp.Start(ctx) require.NoError(t, err) state, stateDB, privVals := makeState(t, 1, 1) stateStore := sm.NewStore(stateDB) defaultEvidenceTime := time.Date(2019, 1, 1, 0, 0, 0, 0, time.UTC) privVal := privVals[state.Validators.Validators[0].Address.String()] blockID := makeBlockID([]byte("headerhash"), 1000, []byte("partshash")) header := &types.Header{ Version: version.Consensus{Block: version.BlockProtocol, App: 1}, ChainID: state.ChainID, Height: 10, Time: defaultEvidenceTime, LastBlockID: blockID, LastCommitHash: crypto.CRandBytes(crypto.HashSize), DataHash: crypto.CRandBytes(crypto.HashSize), ValidatorsHash: state.Validators.Hash(), NextValidatorsHash: state.Validators.Hash(), ConsensusHash: crypto.CRandBytes(crypto.HashSize), AppHash: crypto.CRandBytes(crypto.HashSize), LastResultsHash: crypto.CRandBytes(crypto.HashSize), EvidenceHash: crypto.CRandBytes(crypto.HashSize), ProposerAddress: crypto.CRandBytes(crypto.AddressSize), } // we don't need to worry about validating the evidence as long as they pass validate basic dve, err := types.NewMockDuplicateVoteEvidenceWithValidator(ctx, 3, defaultEvidenceTime, privVal, state.ChainID) require.NoError(t, err) dve.ValidatorPower = 1000 lcae := &types.LightClientAttackEvidence{ ConflictingBlock: &types.LightBlock{ SignedHeader: &types.SignedHeader{ Header: header, Commit: &types.Commit{ Height: 10, BlockID: makeBlockID(header.Hash(), 100, []byte("partshash")), Signatures: []types.CommitSig{{ BlockIDFlag: types.BlockIDFlagNil, ValidatorAddress: crypto.AddressHash([]byte("validator_address")), Timestamp: defaultEvidenceTime, Signature: crypto.CRandBytes(types.MaxSignatureSize)}}, }, }, ValidatorSet: state.Validators, }, CommonHeight: 8, ByzantineValidators: []*types.Validator{state.Validators.Validators[0]}, TotalVotingPower: 12, Timestamp: defaultEvidenceTime, } ev := []types.Evidence{dve, lcae} abciMb := []abci.Misbehavior{ { Type: abci.MisbehaviorType_DUPLICATE_VOTE, Height: 3, Time: defaultEvidenceTime, Validator: types.TM2PB.Validator(state.Validators.Validators[0]), TotalVotingPower: 10, }, { Type: abci.MisbehaviorType_LIGHT_CLIENT_ATTACK, Height: 8, Time: defaultEvidenceTime, Validator: types.TM2PB.Validator(state.Validators.Validators[0]), TotalVotingPower: 12, }, } evpool := &mocks.EvidencePool{} evpool.On("PendingEvidence", mock.AnythingOfType("int64")).Return(ev, int64(100)) evpool.On("Update", ctx, mock.AnythingOfType("state.State"), mock.AnythingOfType("types.EvidenceList")).Return() evpool.On("CheckEvidence", ctx, mock.AnythingOfType("types.EvidenceList")).Return(nil) mp := &mpmocks.Mempool{} mp.On("Lock").Return() mp.On("Unlock").Return() mp.On("FlushAppConn", mock.Anything).Return(nil) mp.On("Update", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(nil) eventBus := eventbus.NewDefault(logger) require.NoError(t, eventBus.Start(ctx)) blockStore := store.NewBlockStore(dbm.NewMemDB()) blockExec := sm.NewBlockExecutor(stateStore, log.NewNopLogger(), proxyApp, mp, evpool, blockStore, eventBus, sm.NopMetrics()) block := sf.MakeBlock(state, 1, new(types.Commit)) block.Evidence = ev block.Header.EvidenceHash = block.Evidence.Hash() bps, err := block.MakePartSet(testPartSize) require.NoError(t, err) blockID = types.BlockID{Hash: block.Hash(), PartSetHeader: bps.Header()} _, err = blockExec.ApplyBlock(ctx, state, blockID, block) require.NoError(t, err) // TODO check state and mempool assert.Equal(t, abciMb, app.ByzantineValidators) }
explode_data.jsonl/49113
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1737 }
[ 2830, 3393, 19357, 551, 4713, 1359, 89, 38357, 31748, 1155, 353, 8840, 836, 8, 341, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 16867, 9121, 2822, 28236, 1669, 609, 1944, 2164, 16094, 17060, 1669, 1487, 7121, 45, 453, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServices(t *testing.T) { testClient := newTestKubeClient() service := &corev1.Service{ ObjectMeta: metav1.ObjectMeta{Name: "test-deployment"}, } namespace := "testing" t.Run("create service", func(t *testing.T) { result, err := testClient.createOrUpdateService(namespace, service) require.NoError(t, err) require.Equal(t, service.GetName(), result.GetName()) }) t.Run("create duplicate service", func(t *testing.T) { result, err := testClient.createOrUpdateService(namespace, service) require.NoError(t, err) require.Equal(t, service.GetName(), result.GetName()) }) }
explode_data.jsonl/22947
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 215 }
[ 2830, 3393, 11025, 1155, 353, 8840, 836, 8, 341, 18185, 2959, 1669, 501, 2271, 42, 3760, 2959, 741, 52934, 1669, 609, 98645, 16, 13860, 515, 197, 23816, 12175, 25, 77520, 16, 80222, 63121, 25, 330, 1944, 6810, 52799, 7115, 197, 532, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAuthHashed(t *testing.T) { for _, conf := range []string{ "nofail", "wronguser", "wrongpass", } { t.Run(conf, func(t *testing.T) { se := NewValidator("sha256:rl3rgi4NcZkpAEcacZnQ2VuOfJ0FxAqCRaKB/SwdZoQ=", "sha256:E9JJ8stBJ7QM+nV4ZoUCeHk/gU3tPFh/5YieiJp6n2w=", []headers.AuthMethod{headers.AuthBasic, headers.AuthDigest}) va, err := NewSender(se.Header(), func() string { if conf == "wronguser" { return "test1user" } return "testuser" }(), func() string { if conf == "wrongpass" { return "test1pass" } return "testpass" }()) require.NoError(t, err) req := &base.Request{ Method: base.Announce, URL: mustParseURL("rtsp://myhost/mypath"), } va.AddAuthorization(req) err = se.ValidateRequest(req, nil) if conf != "nofail" { require.Error(t, err) } else { require.NoError(t, err) } }) } }
explode_data.jsonl/50256
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 482 }
[ 2830, 3393, 5087, 6370, 291, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 2335, 1669, 2088, 3056, 917, 515, 197, 197, 1, 26654, 604, 756, 197, 197, 1, 34870, 872, 756, 197, 197, 1, 34870, 6385, 756, 197, 92, 341, 197, 3244, 16708, 29...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestConfigLoadConfigurationFailTOMLConfigFileDuplicatedData(t *testing.T) { if os.Geteuid() == 0 { t.Skip(testDisabledNeedNonRoot) } tmpdir, err := ioutil.TempDir(testDir, "runtime-config-") assert.NoError(t, err) defer os.RemoveAll(tmpdir) testLoadConfiguration(t, tmpdir, func(config testRuntimeConfig, configFile string, ignoreLogging bool) (bool, error) { expectFail := true text, err := getFileContents(config.ConfigPath) if err != nil { return expectFail, err } // create a config file containing two sets of // data. err = createFile(config.ConfigPath, fmt.Sprintf("%s\n%s\n", text, text)) if err != nil { return expectFail, err } return expectFail, nil }) }
explode_data.jsonl/5122
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 275 }
[ 2830, 3393, 2648, 5879, 7688, 19524, 51, 1898, 43, 2648, 1703, 35, 98984, 1043, 1155, 353, 8840, 836, 8, 341, 743, 2643, 2234, 68, 2423, 368, 621, 220, 15, 341, 197, 3244, 57776, 8623, 25907, 23657, 8121, 8439, 340, 197, 630, 20082, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNotify(t *testing.T) { filename := "./tmp_priv_validator.json" save(filename, privValidatorFile) privValidator := LoadOrGenPrivValidatorFS(filename) now := time.Now().Unix() //task := dpos.DecideTaskByTime(now) // vote, vote voteItem := &VoteItem{ VotedNodeAddress: privValidator.Address, VotedNodeIndex: int32(0), Cycle: 100, CycleStart: 18888, CycleStop: 28888, PeriodStart: 20000, PeriodStop: 21000, Height: 100, } encode, err := json.Marshal(voteItem) if err != nil { panic("Marshal vote failed.") } voteItem.VoteID = crypto.Ripemd160(encode) chainID := "test-chain-Ep9EcD" notify := &Notify{ DPosNotify: &DPosNotify{ Vote: voteItem, HeightStop: 200, HashStop: []byte("abcdef121212"), NotifyTimestamp: now, NotifyNodeAddress: privValidator.GetAddress(), NotifyNodeIndex: int32(0), }, } err = privValidator.SignNotify(chainID, notify) require.Nil(t, err) notify2 := notify.Copy() err = notify2.Verify(chainID, privValidator.PubKey) require.Nil(t, err) assert.True(t, 0 < len(notify.Hash())) remove(filename) }
explode_data.jsonl/15825
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 590 }
[ 2830, 3393, 28962, 1155, 353, 8840, 836, 8, 972, 66434, 1669, 5924, 5173, 24726, 64959, 4323, 5031, 49230, 10961, 11, 6095, 14256, 1703, 1218, 71170, 14256, 1669, 8893, 2195, 9967, 32124, 14256, 8485, 10961, 7229, 80922, 1669, 882, 13244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_crnBuilder(t *testing.T) { type args struct { useraccount string regionZone string host string } tests := []struct { name string args args want string }{ { name: "Generate for Prod", args: args{"12345", "dal12", "dal.power-iaas.cloud.ibm.com"}, want: "crn:v1:bluemix:public:power-iaas:dal12:a/12345:%s::", }, { name: "Generate for Staging", args: args{"12345", "dal12", "dal.power-iaas.test.cloud.ibm.com"}, want: "crn:v1:staging:public:power-iaas:dal12:a/12345:%s::", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := crnBuilder(tt.args.useraccount, tt.args.regionZone, tt.args.host); got != tt.want { t.Errorf("crnBuilder() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/37959
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 366 }
[ 2830, 3393, 32331, 77, 3297, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 19060, 4608, 914, 198, 197, 197, 3943, 15363, 220, 914, 198, 197, 63104, 286, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRenderSimpleElseIfstatement(t *testing.T) { template, _ := ParseString("A-{% if 0 == 2 %}in if{% elseif 2 == 2 %}in elseif{% endif %}-Z", nil) assertRender(t, template, nil, `A-in elseif-Z`) }
explode_data.jsonl/42414
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 82 }
[ 2830, 3393, 6750, 16374, 22971, 2679, 24184, 1155, 353, 8840, 836, 8, 341, 22832, 11, 716, 1669, 14775, 703, 445, 32, 63347, 4, 421, 220, 15, 621, 220, 17, 1018, 92, 258, 421, 66365, 11755, 220, 17, 621, 220, 17, 1018, 92, 258, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReadDeltaByteArray(t *testing.T) { testData := [][]interface{}{ []interface{}{"Hello", "world"}, } for _, data := range testData { res, _ := ReadDeltaByteArray(bytes.NewReader(WriteDeltaByteArray(data))) if fmt.Sprintf("%v", data) != fmt.Sprintf("%v", res) { t.Errorf("ReadDeltaByteArray err, expect %v, get %v", data, res) } } }
explode_data.jsonl/23902
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 138 }
[ 2830, 3393, 4418, 20277, 18394, 1155, 353, 8840, 836, 8, 341, 18185, 1043, 1669, 52931, 4970, 67066, 197, 197, 1294, 4970, 6257, 4913, 9707, 497, 330, 14615, 7115, 197, 532, 2023, 8358, 821, 1669, 2088, 67348, 341, 197, 10202, 11, 716, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetUintKey(t *testing.T) { m := &HashMap{} _, ok := m.GetUintKey(0) if ok { t.Error("empty map should not return an item.") } c := uintptr(16) ok = m.Insert(uintptr(0), c) if !ok { t.Error("insert did not succeed.") } ok = m.Insert(uintptr(128), c) if !ok { t.Error("insert did not succeed.") } ok = m.Insert(uintptr(128), c) if ok { t.Error("insert on existing item did succeed.") } _, ok = m.GetUintKey(128) if !ok { t.Error("ok should be true for item stored within the map.") } _, ok = m.GetUintKey(127) if ok { t.Error("item for key should not exist.") } if m.Len() != 2 { t.Errorf("map should contain exactly 2 elements but has %v items.", m.Len()) } }
explode_data.jsonl/24426
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 297 }
[ 2830, 3393, 1949, 21570, 1592, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 609, 18497, 16094, 197, 6878, 5394, 1669, 296, 2234, 21570, 1592, 7, 15, 340, 743, 5394, 341, 197, 3244, 6141, 445, 3194, 2415, 1265, 537, 470, 458, 1509, 13053,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestIsRoot(t *testing.T) { loc := Tag{} if !loc.IsRoot() { t.Errorf("unspecified should be root.") } for i, tt := range parseTests() { loc, _ := Parse(tt.in) undef := tt.lang == "und" && tt.script == "" && tt.region == "" && tt.ext == "" if loc.IsRoot() != undef { t.Errorf("%d: was %v; want %v", i, loc.IsRoot(), undef) } } }
explode_data.jsonl/15828
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 161 }
[ 2830, 3393, 3872, 8439, 1155, 353, 8840, 836, 8, 341, 71128, 1669, 12353, 16094, 743, 753, 1074, 4506, 8439, 368, 341, 197, 3244, 13080, 445, 359, 53434, 1265, 387, 3704, 13053, 197, 532, 2023, 600, 11, 17853, 1669, 2088, 4715, 18200, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestUpdatePost(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() Client := th.Client channel := th.BasicChannel th.App.SetLicense(model.NewTestLicense()) fileIds := make([]string, 3) data, err := testutils.ReadTestFile("test.png") require.Nil(t, err) for i := 0; i < len(fileIds); i++ { fileResp, resp := Client.UploadFile(data, channel.Id, "test.png") CheckNoError(t, resp) fileIds[i] = fileResp.FileInfos[0].Id } rpost, err := th.App.CreatePost(&model.Post{ UserId: th.BasicUser.Id, ChannelId: channel.Id, Message: "zz" + model.NewId() + "a", FileIds: fileIds, }, channel, false) require.Nil(t, err) assert.Equal(t, rpost.Message, rpost.Message, "full name didn't match") assert.EqualValues(t, 0, rpost.EditAt, "Newly created post shouldn't have EditAt set") assert.Equal(t, model.StringArray(fileIds), rpost.FileIds, "FileIds should have been set") t.Run("same message, fewer files", func(t *testing.T) { msg := "zz" + model.NewId() + " update post" rpost.Message = msg rpost.UserId = "" rupost, resp := Client.UpdatePost(rpost.Id, &model.Post{ Id: rpost.Id, Message: rpost.Message, FileIds: fileIds[0:2], // one fewer file id }) CheckNoError(t, resp) assert.Equal(t, rupost.Message, msg, "failed to updates") assert.NotEqual(t, 0, rupost.EditAt, "EditAt not updated for post") assert.Equal(t, model.StringArray(fileIds), rupost.FileIds, "FileIds should have not have been updated") actual, resp := Client.GetPost(rpost.Id, "") CheckNoError(t, resp) assert.Equal(t, actual.Message, msg, "failed to updates") assert.NotEqual(t, 0, actual.EditAt, "EditAt not updated for post") assert.Equal(t, model.StringArray(fileIds), actual.FileIds, "FileIds should have not have been updated") }) t.Run("new message, invalid props", func(t *testing.T) { msg1 := "#hashtag a" + model.NewId() + " update post again" rpost.Message = msg1 rpost.AddProp(model.PROPS_ADD_CHANNEL_MEMBER, "no good") rrupost, resp := Client.UpdatePost(rpost.Id, rpost) CheckNoError(t, resp) assert.Equal(t, msg1, rrupost.Message, "failed to update message") assert.Equal(t, "#hashtag", rrupost.Hashtags, "failed to update hashtags") assert.Nil(t, rrupost.GetProp(model.PROPS_ADD_CHANNEL_MEMBER), "failed to sanitize Props['add_channel_member'], should be nil") actual, resp := Client.GetPost(rpost.Id, "") CheckNoError(t, resp) assert.Equal(t, msg1, actual.Message, "failed to update message") assert.Equal(t, "#hashtag", actual.Hashtags, "failed to update hashtags") assert.Nil(t, actual.GetProp(model.PROPS_ADD_CHANNEL_MEMBER), "failed to sanitize Props['add_channel_member'], should be nil") }) t.Run("join/leave post", func(t *testing.T) { rpost2, err := th.App.CreatePost(&model.Post{ ChannelId: channel.Id, Message: "zz" + model.NewId() + "a", Type: model.POST_JOIN_LEAVE, UserId: th.BasicUser.Id, }, channel, false) require.Nil(t, err) up2 := &model.Post{ Id: rpost2.Id, ChannelId: channel.Id, Message: "zz" + model.NewId() + " update post 2", } _, resp := Client.UpdatePost(rpost2.Id, up2) CheckBadRequestStatus(t, resp) }) rpost3, err := th.App.CreatePost(&model.Post{ ChannelId: channel.Id, Message: "zz" + model.NewId() + "a", UserId: th.BasicUser.Id, }, channel, false) require.Nil(t, err) t.Run("new message, add files", func(t *testing.T) { up3 := &model.Post{ Id: rpost3.Id, ChannelId: channel.Id, Message: "zz" + model.NewId() + " update post 3", FileIds: fileIds[0:2], } rrupost3, resp := Client.UpdatePost(rpost3.Id, up3) CheckNoError(t, resp) assert.Empty(t, rrupost3.FileIds) actual, resp := Client.GetPost(rpost.Id, "") CheckNoError(t, resp) assert.Equal(t, model.StringArray(fileIds), actual.FileIds) }) t.Run("add slack attachments", func(t *testing.T) { up4 := &model.Post{ Id: rpost3.Id, ChannelId: channel.Id, Message: "zz" + model.NewId() + " update post 3", } up4.AddProp("attachments", []model.SlackAttachment{ { Text: "Hello World", }, }) rrupost3, resp := Client.UpdatePost(rpost3.Id, up4) CheckNoError(t, resp) assert.NotEqual(t, rpost3.EditAt, rrupost3.EditAt) assert.NotEqual(t, rpost3.Attachments(), rrupost3.Attachments()) }) t.Run("logged out", func(t *testing.T) { Client.Logout() _, resp := Client.UpdatePost(rpost.Id, rpost) CheckUnauthorizedStatus(t, resp) }) t.Run("different user", func(t *testing.T) { th.LoginBasic2() _, resp := Client.UpdatePost(rpost.Id, rpost) CheckForbiddenStatus(t, resp) Client.Logout() }) t.Run("different user, but team admin", func(t *testing.T) { th.LoginTeamAdmin() _, resp := Client.UpdatePost(rpost.Id, rpost) CheckForbiddenStatus(t, resp) Client.Logout() }) t.Run("different user, but system admin", func(t *testing.T) { _, resp := th.SystemAdminClient.UpdatePost(rpost.Id, rpost) CheckNoError(t, resp) }) }
explode_data.jsonl/5244
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2092 }
[ 2830, 3393, 4289, 4133, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 741, 71724, 1669, 270, 11716, 198, 71550, 1669, 270, 48868, 9629, 271, 70479, 5105, 4202, 9827, 7635, 712...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_FileStore_Get(t *testing.T) { storedir := "/tmp/TestStore/_get" f := NewFileStore(storedir) f.Open() pm := newPublishMsg(QOS_ONE, "/a/b/c", []byte{0xBE, 0xEF, 0xED}) pm.setMsgId(120) key := obound_mid2key(pm.MsgId()) f.Put(key, pm) if !exists(storedir + "/o.120.msg") { t.Fatalf("message not in store") } exp := []byte{ /* msg type */ 0x32, // qos 1 /* remlen */ 0x0d, /* topic, msg id in varheader */ 0x00, // length of topic 0x06, 0x2F, // / 0x61, // a 0x2F, // / 0x62, // b 0x2F, // / 0x63, // c /* msg id (is always 2 bytes) */ 0x00, 0x78, /*payload */ 0xBE, 0xEF, 0xED, } m := f.Get(key) if m == nil { t.Fatalf("message not retreived from store") } if !bytes.Equal(exp, m.Bytes()) { t.Fatalf("message from store not same as what went in") } }
explode_data.jsonl/37287
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 414 }
[ 2830, 3393, 34061, 6093, 13614, 1155, 353, 8840, 836, 8, 341, 18388, 3018, 404, 1669, 3521, 5173, 80527, 6093, 19632, 455, 698, 1166, 1669, 1532, 1703, 6093, 5895, 3018, 404, 340, 1166, 12953, 741, 86511, 1669, 501, 50145, 6611, 6253, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestParseLoadMap(t *testing.T) { { loadList := "" m, err := ParseLoadMap(loadList) test.S(t).ExpectNil(err) test.S(t).ExpectEquals(len(m), 0) } { loadList := "threads_running=20,threads_connected=10" m, err := ParseLoadMap(loadList) test.S(t).ExpectNil(err) test.S(t).ExpectEquals(len(m), 2) test.S(t).ExpectEquals(m["threads_running"], int64(20)) test.S(t).ExpectEquals(m["threads_connected"], int64(10)) } { loadList := "threads_running=20=30,threads_connected=10" _, err := ParseLoadMap(loadList) test.S(t).ExpectNotNil(err) } { loadList := "threads_running=20,threads_connected" _, err := ParseLoadMap(loadList) test.S(t).ExpectNotNil(err) } }
explode_data.jsonl/1133
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 328 }
[ 2830, 3393, 14463, 5879, 2227, 1155, 353, 8840, 836, 8, 341, 197, 515, 197, 49386, 852, 1669, 8389, 197, 2109, 11, 1848, 1669, 14775, 5879, 2227, 49092, 852, 340, 197, 18185, 808, 1155, 568, 17536, 19064, 3964, 340, 197, 18185, 808, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIssue332_Part1(t *testing.T) { // Next shouldn't step inside a function call protest.AllowRecording(t) withTestProcess("issue332", t, func(p *proc.Target, fixture protest.Fixture) { setFileBreakpoint(p, t, fixture.Source, 8) assertNoError(p.Continue(), t, "Continue()") assertNoError(p.Next(), t, "first Next()") locations, err := proc.ThreadStacktrace(p.CurrentThread(), 2) assertNoError(err, t, "Stacktrace()") if locations[0].Call.Fn == nil { t.Fatalf("Not on a function") } if locations[0].Call.Fn.Name != "main.main" { t.Fatalf("Not on main.main after Next: %s (%s:%d)", locations[0].Call.Fn.Name, locations[0].Call.File, locations[0].Call.Line) } if locations[0].Call.Line != 9 { t.Fatalf("Not on line 9 after Next: %s (%s:%d)", locations[0].Call.Fn.Name, locations[0].Call.File, locations[0].Call.Line) } }) }
explode_data.jsonl/56245
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 337 }
[ 2830, 3393, 42006, 18, 18, 17, 97938, 16, 1155, 353, 8840, 836, 8, 341, 197, 322, 9295, 13133, 944, 3019, 4766, 264, 729, 1618, 198, 197, 776, 1944, 29081, 52856, 1155, 340, 46948, 2271, 7423, 445, 11159, 18, 18, 17, 497, 259, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestErrorReport(t *testing.T) { t.Run("missing ident", func(t *testing.T) { _, report := ParseGraphqlDocumentString(` { me { ... on Person @foo { personID: } } } `) if !report.HasErrors() { t.Fatalf("want err, got nil") } want := "external: unexpected token - got: RBRACE want one of: [IDENT], locations: [{Line:6 Column:6}], path: []" if report.Error() != want { t.Fatalf("want:\n%s\ngot:\n%s\n", want, report.Error()) } }) t.Run("at instead of on", func(t *testing.T) { _, report := ParseGraphqlDocumentString(` { me { ... on @Person @foo { personID } } } `) if !report.HasErrors() { t.Fatalf("want err, got nil") } want := "external: unexpected token - got: AT want one of: [IDENT], locations: [{Line:4 Column:13}], path: []" if report.Error() != want { t.Fatalf("want:\n%s\ngot:\n%s\n", want, report.Error()) } }) }
explode_data.jsonl/50805
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 434 }
[ 2830, 3393, 1454, 10361, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 30616, 3524, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 197, 6878, 1895, 1669, 14775, 11212, 1470, 7524, 703, 61528, 298, 197, 515, 13544, 197, 49294, 341, 46...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMultiReturnWithDeeplyNestedArray(t *testing.T) { // Similar to TestMultiReturnWithArray, but with a special case in mind: // values of nested static arrays count towards the size as well, and any element following // after such nested array argument should be read with the correct offset, // so that it does not read content from the previous array argument. const definition = `[{"name" : "multi", "outputs": [{"type": "uint64[3][2][4]"}, {"type": "uint64"}]}]` abi, err := JSON(strings.NewReader(definition)) if err != nil { t.Fatal(err) } buff := new(bytes.Buffer) // construct the test array, each 3 char element is joined with 61 '0' chars, // to from the ((3 + 61) * 0.5) = 32 byte elements in the array. buff.Write(common.Hex2Bytes(strings.Join([]string{ "", //empty, to apply the 61-char separator to the first element as well. "111", "112", "113", "121", "122", "123", "211", "212", "213", "221", "222", "223", "311", "312", "313", "321", "322", "323", "411", "412", "413", "421", "422", "423", }, "0000000000000000000000000000000000000000000000000000000000000"))) buff.Write(common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000009876")) ret1, ret1Exp := new([4][2][3]uint64), [4][2][3]uint64{ {{0x111, 0x112, 0x113}, {0x121, 0x122, 0x123}}, {{0x211, 0x212, 0x213}, {0x221, 0x222, 0x223}}, {{0x311, 0x312, 0x313}, {0x321, 0x322, 0x323}}, {{0x411, 0x412, 0x413}, {0x421, 0x422, 0x423}}, } ret2, ret2Exp := new(uint64), uint64(0x9876) if err := abi.Unpack(&[]interface{}{ret1, ret2}, "multi", buff.Bytes()); err != nil { t.Fatal(err) } if !reflect.DeepEqual(*ret1, ret1Exp) { t.Error("array result", *ret1, "!= Expected", ret1Exp) } if *ret2 != ret2Exp { t.Error("int result", *ret2, "!= Expected", ret2Exp) } }
explode_data.jsonl/22758
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 688 }
[ 2830, 3393, 20358, 5598, 2354, 33464, 398, 71986, 1857, 1155, 353, 8840, 836, 8, 341, 197, 322, 21476, 311, 3393, 20358, 5598, 2354, 1857, 11, 714, 448, 264, 3281, 1142, 304, 3971, 510, 197, 322, 220, 2750, 315, 24034, 1099, 18386, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGetAndSetUri(t *testing.T) { connection := connect.NewEmptyConnectionParams() assert.Equal(t, "", connection.Uri()) connection.SetUri("https://pipgoals:3000") assert.Equal(t, "https://pipgoals:3000", connection.Uri()) }
explode_data.jsonl/1894
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 86 }
[ 2830, 3393, 1949, 3036, 1649, 13899, 1155, 353, 8840, 836, 8, 341, 54590, 1669, 4564, 7121, 3522, 4526, 4870, 741, 6948, 12808, 1155, 11, 7342, 3633, 35777, 12367, 54590, 4202, 13899, 445, 2428, 1110, 51501, 84157, 25, 18, 15, 15, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestMetablockLoad(t *testing.T) { // Create a bunch of tmp json files with invalid format and test load errors: // - invalid json // - missing signatures and signed field // - invalid signatures field // - invalid signed field // - invalid signed type // - invalid signed field for type link // - invalid signed field for type layout invalidJSONBytes := [][]byte{ []byte("{"), []byte("{}"), []byte(`{"signatures": null, "signed": {}}`), []byte(`{"signatures": "string", "signed": {}}`), []byte(`{"signatures": [], "signed": []}`), []byte(`{"signatures": [], "signed": {"_type": "something else"}}`), []byte(`{"signatures": [], "signed": {"_type": "link", "materials": "invalid", "name": "some name", "products": "invalid", "byproducts": "invalid", "command": "some command", "environment": "some list"}}`), []byte(`{"signatures": [], "signed": {"_type": "layout", "steps": "invalid", "inspect": "invalid", "readme": "some readme", "keys": "some keys", "expires": "some date"}}`), []byte(`{"signatures": [], "signed": {"_type": "layout", "inspect": "invalid", "readme": "some readme", "keys": "some keys", "expires": "some date"}}`), []byte(`{"signatures": [], "signed": {"_type": "layout", "steps": "invalid", "readme": "some readme", "keys": "some keys", "expires": "some date"}}`), []byte(`{"signatures": [], "signed": {"_type": "layout", "steps": "invalid", "inspect": "invalid", "readme": "some readme", "expires": "some date"}}`), []byte(`{"signatures": [], "signed": {"_type": "layout", "steps": "invalid", "inspect": "invalid", "readme": "some readme", "keys": "some keys"}}`), []byte(`{"signatures": [], "signed": {"_type": "layout", "steps": "invalid", "inspect": "invalid", "keys": "some keys", "expires": "some date"}}`), []byte(`{"signatures": [], "signed": {"_type": "layout", "steps": [], "inspect": [], "readme": "some readme", "keys": {}, "expires": "some date", "foo": "bar"}}`), []byte(`{"signatures": [], "signed": {"_type": "link", "materials": "invalid", "products": "invalid", "byproducts": "invalid", "command": "some command", "environment": "some list"}}`), []byte(`{"signatures": [], "signed": {"_type": "link", "name": "some name", "products": "invalid", "byproducts": "invalid", "command": "some command", "environment": "some list"}}`), []byte(`{"signatures": [], "signed": {"_type": "link", "materials": "invalid", "name": "some name", "byproducts": "invalid", "command": "some command", "environment": "some list"}}`), []byte(`{"signatures": [], "signed": {"_type": "link", "materials": "invalid", "name": "some name", "products": "invalid", "command": "some command", "environment": "some list"}}`), []byte(`{"signatures": [], "signed": {"_type": "link", "materials": "invalid", "name": "some name", "products": "invalid", "byproducts": "invalid", "environment": "some list"}}`), []byte(`{"signatures": [], "signed": {"_type": "link", "materials": "invalid", "name": "some name", "products": "invalid", "byproducts": "invalid", "command": "some command"}}`), []byte(`{"signatures": [], "signed": {"_type": "link", "materials": {}, "name": "some name", "products": {}, "byproducts": {}, "command": [], "environment": {}, "foo": "bar"}}`), } expectedErrors := []string{ "unexpected end", "requires 'signed' and 'signatures' parts", "requires 'signed' and 'signatures' parts", "cannot unmarshal string into Go value of type []in_toto.Signature", "cannot unmarshal array into Go value of type map[string]interface {}", "metadata must be one of 'link' or 'layout'", "cannot unmarshal string into Go struct field Link.materials", "cannot unmarshal string into Go struct field Layout.steps", "required field steps missing", "required field inspect missing", "required field keys missing", "required field expires missing", "required field readme missing", "json: unknown field \"foo\"", "required field name missing", "required field materials missing", "required field products missing", "required field byproducts missing", "required field command missing", "required field environment missing", "json: unknown field \"foo\"", } for i := 0; i < len(invalidJSONBytes); i++ { fn := fmt.Sprintf("invalid-metadata-%v.tmp", i) if err := ioutil.WriteFile(fn, invalidJSONBytes[i], 0644); err != nil { fmt.Printf("Could not write file: %s", err) } var mb Metablock err := mb.Load(fn) if err == nil || !strings.Contains(err.Error(), expectedErrors[i]) { t.Errorf("Metablock.Load returned '%s', expected '%s' error", err, expectedErrors[i]) } if err := os.Remove(fn); err != nil { t.Errorf("Unable to remove directory %s: %s", fn, err) } } }
explode_data.jsonl/51756
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1782 }
[ 2830, 3393, 34673, 370, 1023, 5879, 1155, 353, 8840, 836, 8, 341, 197, 322, 4230, 264, 15493, 315, 4174, 2951, 3542, 448, 8318, 3561, 323, 1273, 2795, 5975, 510, 197, 322, 481, 8318, 2951, 198, 197, 322, 481, 7402, 32628, 323, 8499, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestVolumesFromRO(t *testing.T) { taskEngine, done, _ := setupWithDefaultConfig(t) defer done() stateChangeEvents := taskEngine.StateChangeEvents() testTask := createTestTask("testVolumeROContainer") testTask.Containers[0].Image = testVolumeImage for i := 0; i < 3; i++ { cont := createTestContainer() cont.Name = "test" + strconv.Itoa(i) cont.Image = testVolumeImage cont.Essential = i > 0 testTask.Containers = append(testTask.Containers, cont) } testTask.Containers[1].VolumesFrom = []apicontainer.VolumeFrom{{SourceContainer: testTask.Containers[0].Name, ReadOnly: true}} testTask.Containers[1].Command = []string{"New-Item c:/volume/readonly-fs; if ($?) { Exit 0 } else { Exit 42 }"} testTask.Containers[1].Essential = false testTask.Containers[2].VolumesFrom = []apicontainer.VolumeFrom{{SourceContainer: testTask.Containers[0].Name}} testTask.Containers[2].Command = []string{"New-Item c:/volume/readonly-fs-2; if ($?) { Exit 0 } else { Exit 42 }"} testTask.Containers[2].Essential = false testTask.Containers[3].VolumesFrom = []apicontainer.VolumeFrom{{SourceContainer: testTask.Containers[0].Name, ReadOnly: false}} testTask.Containers[3].Command = []string{"New-Item c:/volume/readonly-fs-3; if ($?) { Exit 0 } else { Exit 42 }"} testTask.Containers[3].Essential = false go taskEngine.AddTask(testTask) verifyTaskIsRunning(stateChangeEvents, testTask) // Make sure all the three test container stopped first verifyContainerStoppedStateChange(t, taskEngine) verifyContainerStoppedStateChange(t, taskEngine) verifyContainerStoppedStateChange(t, taskEngine) // Stop the task by stopping the essential container taskEngine.(*DockerTaskEngine).stopContainer(testTask, testTask.Containers[0]) verifyTaskIsStopped(stateChangeEvents, testTask) assert.NotEqual(t, *testTask.Containers[1].GetKnownExitCode(), 0, "didn't exit due to failure to touch ro fs as expected: ", *testTask.Containers[1].GetKnownExitCode()) assert.Equal(t, *testTask.Containers[2].GetKnownExitCode(), 0, "couldn't touch with default of rw") assert.Equal(t, *testTask.Containers[3].GetKnownExitCode(), 0, "couldn't touch with explicit rw") }
explode_data.jsonl/20162
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 711 }
[ 2830, 3393, 96325, 3830, 1285, 1155, 353, 8840, 836, 8, 341, 49115, 4571, 11, 2814, 11, 716, 1669, 6505, 2354, 3675, 2648, 1155, 340, 16867, 2814, 2822, 24291, 4072, 7900, 1669, 3383, 4571, 18942, 4072, 7900, 2822, 18185, 6262, 1669, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSHA1(t *testing.T) { uuid := NewSHA1(NameSpace_DNS, []byte("python.org")).String() want := "886313e1-3b8a-5372-9b90-0c9aee199e5d" if uuid != want { t.Errorf("SHA1: got %q expected %q", uuid, want) } }
explode_data.jsonl/31943
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 106 }
[ 2830, 3393, 33145, 16, 1155, 353, 8840, 836, 8, 341, 10676, 2423, 1669, 1532, 33145, 16, 21182, 9914, 1557, 2448, 11, 3056, 3782, 445, 12669, 2659, 15197, 703, 741, 50780, 1669, 330, 23, 23, 21, 18, 16, 18, 68, 16, 12, 18, 65, 23,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetContainerInfoOnNonExistContainer(t *testing.T) { mockCadvisor := &mockCadvisorClient{} kubelet, _, fakeDocker := makeTestKubelet(t) kubelet.cadvisorClient = mockCadvisor fakeDocker.containerList = []docker.APIContainers{} stats, _ := kubelet.GetContainerInfo("qux", "foo", nil) if stats != nil { t.Errorf("non-nil stats on non exist container") } mockCadvisor.AssertExpectations(t) }
explode_data.jsonl/2846
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 155 }
[ 2830, 3393, 1949, 4502, 1731, 1925, 8121, 25613, 4502, 1155, 353, 8840, 836, 8, 341, 77333, 34, 81794, 1669, 609, 16712, 34, 81794, 2959, 31483, 16463, 3760, 1149, 11, 8358, 12418, 35, 13659, 1669, 1281, 2271, 42, 3760, 1149, 1155, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPeerConnection_Renegotiation_AddTrack_Rename(t *testing.T) { api := NewAPI() lim := test.TimeOut(time.Second * 30) defer lim.Stop() report := test.CheckRoutines(t) defer report() api.mediaEngine.RegisterDefaultCodecs() pcOffer, pcAnswer, err := api.newPair(Configuration{}) if err != nil { t.Fatal(err) } haveRenegotiated := &atomicBool{} onTrackFired, onTrackFiredFunc := context.WithCancel(context.Background()) var atomicRemoteTrack atomic.Value pcOffer.OnTrack(func(track *Track, r *RTPReceiver) { if !haveRenegotiated.get() { t.Fatal("OnTrack was called before renegotation") } onTrackFiredFunc() atomicRemoteTrack.Store(track) }) _, err = pcOffer.AddTransceiverFromKind(RTPCodecTypeVideo, RtpTransceiverInit{Direction: RTPTransceiverDirectionRecvonly}) assert.NoError(t, err) vp8Track, err := pcAnswer.NewTrack(DefaultPayloadTypeVP8, rand.Uint32(), "foo1", "bar1") assert.NoError(t, err) _, err = pcAnswer.AddTrack(vp8Track) assert.NoError(t, err) assert.NoError(t, signalPair(pcOffer, pcAnswer)) vp8Track.id = "foo2" vp8Track.label = "bar2" haveRenegotiated.set(true) assert.NoError(t, signalPair(pcOffer, pcAnswer)) sendVideoUntilDone(onTrackFired.Done(), t, []*Track{vp8Track}) assert.NoError(t, pcOffer.Close()) assert.NoError(t, pcAnswer.Close()) remoteTrack, ok := atomicRemoteTrack.Load().(*Track) require.True(t, ok) require.NotNil(t, remoteTrack) assert.Equal(t, vp8Track.SSRC(), remoteTrack.SSRC()) assert.Equal(t, "foo2", remoteTrack.ID()) assert.Equal(t, "bar2", remoteTrack.Label()) }
explode_data.jsonl/3767
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 609 }
[ 2830, 3393, 30888, 4526, 2568, 268, 65978, 7101, 21346, 15667, 2568, 1840, 1155, 353, 8840, 836, 8, 341, 54299, 1669, 1532, 7082, 741, 197, 4659, 1669, 1273, 16299, 2662, 9730, 32435, 353, 220, 18, 15, 340, 16867, 4568, 30213, 2822, 699...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetFuturesSymbolPriceTicker(t *testing.T) { t.Parallel() _, err := b.GetFuturesSymbolPriceTicker(context.Background(), currency.NewPairWithDelimiter("BTCUSD", "PERP", "_"), "") if err != nil { t.Error(err) } }
explode_data.jsonl/76608
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 90 }
[ 2830, 3393, 1949, 37, 74606, 15090, 6972, 87278, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 6878, 1848, 1669, 293, 2234, 37, 74606, 15090, 6972, 87278, 5378, 19047, 1507, 11413, 7121, 12443, 2354, 91098, 445, 59118, 26749,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestDecodeDf(t *testing.T) { acc := &testutil.Accumulator{} err := decodeDf(acc, cephDFDump) require.NoError(t, err) for _, r := range cephDfResults { acc.AssertContainsTaggedFields(t, r.metric, r.fields, r.tags) } }
explode_data.jsonl/40225
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 102 }
[ 2830, 3393, 32564, 35, 69, 1155, 353, 8840, 836, 8, 341, 197, 4475, 1669, 609, 1944, 1314, 77538, 372, 10511, 16094, 9859, 1669, 16895, 35, 69, 48455, 11, 272, 23544, 5262, 51056, 340, 17957, 35699, 1155, 11, 1848, 692, 2023, 8358, 43...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFIFO_detectLineJumpers(t *testing.T) { f := New(testFifoObjectKeyFunc) f.Add(mkFifoObj("foo", 10)) // nolint: errcheck f.Add(mkFifoObj("bar", 1)) // nolint: errcheck f.Add(mkFifoObj("foo", 11)) // nolint: errcheck f.Add(mkFifoObj("foo", 13)) // nolint: errcheck f.Add(mkFifoObj("zab", 30)) // nolint: errcheck if e, a := 13, Pop(f).(testFifoObject).val; a != e { t.Fatalf("expected %d, got %d", e, a) } // ensure foo doesn't jump back in line f.Add(mkFifoObj("foo", 14)) // nolint: errcheck if e, a := 1, Pop(f).(testFifoObject).val; a != e { t.Fatalf("expected %d, got %d", e, a) } if e, a := 30, Pop(f).(testFifoObject).val; a != e { t.Fatalf("expected %d, got %d", e, a) } if e, a := 14, Pop(f).(testFifoObject).val; a != e { t.Fatalf("expected %d, got %d", e, a) } }
explode_data.jsonl/69923
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 372 }
[ 2830, 3393, 37, 25997, 56457, 2460, 33979, 388, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 1532, 8623, 37, 31497, 1190, 1592, 9626, 692, 1166, 1904, 1255, 74, 37, 31497, 5261, 445, 7975, 497, 220, 16, 15, 593, 442, 308, 337, 396, 25,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func Test_StreamingWrite(t *testing.T) { assert := assert.New(t) input := "abcdef123456789" r1 := xxhash.NewXXHash64() r2 := xxhash.NewXXHash64() for i, v := range []byte(input) { r1.Reset() r1.Write([]byte(input[:i+1])) r2.Write([]byte(string(v))) assert.Equal(r1.String(), r2.String()) } }
explode_data.jsonl/3960
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 143 }
[ 2830, 3393, 62, 76509, 7985, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 22427, 1669, 330, 41202, 16, 17, 18, 19, 20, 21, 22, 23, 24, 1837, 7000, 16, 1669, 20908, 8296, 7121, 6148, 6370, 21, 19, 741, 7000, 17,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2