text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestServer_acceptLanguage(t *testing.T) { a := assert.New(t, false) srv := newServer(a, &Options{Tag: language.Afrikaans}) b := srv.Locale().Builder() a.NotError(b.SetString(language.Und, "lang", "und")) a.NotError(b.SetString(language.SimplifiedChinese, "lang", "hans")) a.NotError(b.SetString(language.TraditionalChinese, "lang", "hant")) a.NotError(b.SetString(language.AmericanEnglish, "lang", "en_US")) tag := srv.acceptLanguage("") a.Equal(tag, language.Afrikaans, "v1:%s, v2:%s", tag.String(), language.Und.String()) tag = srv.acceptLanguage("zh") // 匹配 zh-hans a.Equal(tag, language.SimplifiedChinese, "v1:%s, v2:%s", tag.String(), language.SimplifiedChinese.String()) tag = srv.acceptLanguage("zh-Hant") a.Equal(tag, language.TraditionalChinese, "v1:%s, v2:%s", tag.String(), language.TraditionalChinese.String()) tag = srv.acceptLanguage("zh-Hans") a.Equal(tag, language.SimplifiedChinese, "v1:%s, v2:%s", tag.String(), language.SimplifiedChinese.String()) tag = srv.acceptLanguage("zh-Hans;q=0.1,zh-Hant;q=0.3,en") a.Equal(tag, language.AmericanEnglish, "v1:%s, v2:%s", tag.String(), language.AmericanEnglish.String()) }
explode_data.jsonl/34202
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 455 }
[ 2830, 3393, 5475, 35728, 13806, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 2060, 7121, 1155, 11, 895, 692, 1903, 10553, 1669, 501, 5475, 2877, 11, 609, 3798, 90, 5668, 25, 4128, 875, 69, 40454, 596, 3518, 2233, 1669, 43578, 59094, 100...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCapacityReconcile(t *testing.T) { ctx := context.Background() tests := []struct { name string existingCluster *clusterv1.ManagedCluster existinClusterInfo *clusterv1beta1.ManagedClusterInfo expectedCapacity clusterv1.ResourceList expectedNotFound bool }{ { name: "ManagedClusterNotFound", existingCluster: newCluster("bar", nil), existinClusterInfo: newClusterInfo("bar", nil, 1), expectedNotFound: true, }, { name: "ManagedClusterInfoNotFound", existingCluster: newCluster(ManagedClusterName, map[clusterv1.ResourceName]int64{"cpu": 1}), existinClusterInfo: newClusterInfo("bar", nil, 1), expectedCapacity: newCapacity(map[clusterv1.ResourceName]int64{"cpu": 1}), }, { name: "UpdateManagedClusterCapacity", existingCluster: newCluster(ManagedClusterName, map[clusterv1.ResourceName]int64{"cpu": 1}), existinClusterInfo: newClusterInfo(ManagedClusterName, map[string]bool{"node1": false}, 2), expectedCapacity: newCapacity(map[clusterv1.ResourceName]int64{"cpu": 1, "socket_worker": 0, "core_worker": 0}), }, { name: "UpdateManagedClusterCapacityWithWorker", existingCluster: newCluster(ManagedClusterName, map[clusterv1.ResourceName]int64{"cpu": 1}), existinClusterInfo: newClusterInfo(ManagedClusterName, map[string]bool{"node1": false, "node2": true}, 2), expectedCapacity: newCapacity(map[clusterv1.ResourceName]int64{"cpu": 1, "socket_worker": 2, "core_worker": 2}), }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { svrc, client := newTestCapacityReconciler(test.existinClusterInfo, test.existingCluster) svrc.Reconcile(ctx, reconcile.Request{NamespacedName: types.NamespacedName{Namespace: ManagedClusterName, Name: ManagedClusterName}}) actualCluster := &clusterv1.ManagedCluster{} err := client.Get(context.Background(), types.NamespacedName{Name: ManagedClusterName}, actualCluster) switch { case errors.IsNotFound(err): if !test.expectedNotFound { t.Errorf("unexpected err %v", err) } case err != nil: t.Errorf("unexpected err %v", err) } if !apiequality.Semantic.DeepEqual(actualCluster.Status.Capacity, test.expectedCapacity) { t.Errorf("unexpected capacity %v, %v", actualCluster.Status.Capacity, test.expectedCapacity) } }) } }
explode_data.jsonl/81956
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 987 }
[ 2830, 3393, 29392, 693, 40446, 457, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 78216, 1669, 3056, 1235, 341, 197, 11609, 2290, 914, 198, 197, 8122, 11083, 28678, 262, 353, 564, 590, 648, 16, 29902, 3279, 28678, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestMessageTransaction_ValidateExtended4(t *testing.T) { /* Test for signing a message transaction without slave with an used ots key */ xmss := helper.GetAliceXMSS(6) // Master XMSS message := "hello" fee := uint64(1) xmssPK := misc.UCharVectorToBytes(xmss.PK()) messageTx := NewTestMessageTransaction(message, fee, xmssPK, nil) assert.NotNil(t, messageTx.tx) addrFromState := addressstate.GetDefaultAddressState(misc.UCharVectorToBytes(xmss.Address())) addrFromState.AddBalance(1) messageTx.tx.Sign(xmss, misc.BytesToUCharVector(messageTx.tx.GetHashableBytes())) assert.True(t, messageTx.tx.ValidateExtended(addrFromState, addrFromState)) addrFromState.SetOTSKey(0) // Marked ots key 0 as used // Signed by an used ots key, validation must fail assert.False(t, messageTx.tx.ValidateExtended(addrFromState, addrFromState)) xmss.SetOTSIndex(10) messageTx.tx.Sign(xmss, misc.BytesToUCharVector(messageTx.tx.GetHashableBytes())) assert.True(t, messageTx.tx.ValidateExtended(addrFromState, addrFromState)) addrFromState.SetOTSKey(10) // Marked ots key 10 as used // Signed by an used ots key, validation must fail assert.False(t, messageTx.tx.ValidateExtended(addrFromState, addrFromState)) }
explode_data.jsonl/10392
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 426 }
[ 2830, 3393, 2052, 8070, 62, 17926, 53190, 19, 1155, 353, 8840, 836, 8, 341, 197, 3284, 197, 73866, 369, 15971, 264, 1943, 7745, 2041, 20362, 448, 458, 1483, 297, 2576, 1376, 198, 197, 3276, 10225, 76, 778, 1669, 13137, 2234, 61686, 52...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIterator(t *testing.T) { iter := newIterator(context.Background(), &TestFeed{max: 10}, func() interface{} { var i int64; return &i }()) expected := []int64{0, 1, 2, 3, 4, 5, 6, 7, 8, 9} result := []int64{} for iter.Next() { val, ok := iter.curVal.(*int64) if !ok { panic("Unexpected type") } result = append(result, *val) } if err := iter.Err(); err != nil { t.Errorf("Unexpected error: %s", err) } if d := diff.AsJSON(expected, result); d != nil { t.Errorf("Unexpected result:\n%s\n", d) } }
explode_data.jsonl/15117
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 220 }
[ 2830, 3393, 11951, 1155, 353, 8840, 836, 8, 341, 79924, 1669, 501, 11951, 5378, 19047, 1507, 609, 2271, 28916, 90, 2810, 25, 220, 16, 15, 2137, 2915, 368, 3749, 6257, 314, 762, 600, 526, 21, 19, 26, 470, 609, 72, 335, 2398, 42400, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestResolveMapParamUnknown(t *testing.T) { m := &Manifest{ Parameters: []ParameterDefinition{}, } s := &Step{ Description: "a test step", Data: map[string]interface{}{ "Parameters": map[string]interface{}{ "Thing": map[string]interface{}{ "source": "bundle.parameters.person", }, }, }, } err := m.ResolveStep(s) require.Error(t, err) assert.Equal(t, "unable to set value for Thing: no value found for source specification: bundle.parameters.person", err.Error()) }
explode_data.jsonl/10950
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 191 }
[ 2830, 3393, 56808, 2227, 2001, 13790, 1155, 353, 8840, 836, 8, 1476, 2109, 1669, 609, 38495, 515, 197, 197, 9706, 25, 3056, 4971, 10398, 38837, 197, 630, 1903, 1669, 609, 8304, 515, 197, 47414, 25, 330, 64, 1273, 3019, 756, 197, 40927...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultiplePositionals(t *testing.T) { var args struct { Input string `arg:"positional"` Multiple []string `arg:"positional,required"` } err := parse("foo a b c", &args) assert.NoError(t, err) assert.Equal(t, "foo", args.Input) assert.Equal(t, []string{"a", "b", "c"}, args.Multiple) }
explode_data.jsonl/13006
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 121 }
[ 2830, 3393, 32089, 3812, 1127, 1155, 353, 8840, 836, 8, 341, 2405, 2827, 2036, 341, 197, 66588, 262, 914, 256, 1565, 858, 2974, 966, 3005, 8805, 197, 9209, 12229, 3056, 917, 1565, 858, 2974, 966, 3005, 11, 6279, 8805, 197, 532, 9859, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHookMessageHasBeenPosted(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() var mockAPI plugintest.API mockAPI.On("LoadPluginConfiguration", mock.Anything).Return(nil) mockAPI.On("LogDebug", "message").Return(nil) tearDown, _, _ := SetAppEnvironmentWithPlugins(t, []string{ ` package main import ( "github.com/blastbao/mattermost-server/plugin" "github.com/blastbao/mattermost-server/model" ) type MyPlugin struct { plugin.MattermostPlugin } func (p *MyPlugin) MessageHasBeenPosted(c *plugin.Context, post *model.Post) { p.API.LogDebug(post.Message) } func main() { plugin.ClientMain(&MyPlugin{}) } `}, th.App, func(*model.Manifest) plugin.API { return &mockAPI }) defer tearDown() post := &model.Post{ UserId: th.BasicUser.Id, ChannelId: th.BasicChannel.Id, Message: "message", CreateAt: model.GetMillis() - 10000, } _, err := th.App.CreatePost(post, th.BasicChannel, false) if err != nil { t.Fatal(err) } }
explode_data.jsonl/30304
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 407 }
[ 2830, 3393, 31679, 2052, 87785, 17372, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 2822, 2405, 7860, 7082, 19633, 396, 477, 24922, 198, 77333, 7082, 8071, 445, 5879, 11546, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTarWithHardLinkAndRebase(t *testing.T) { tmpDir, err := ioutil.TempDir("", "docker-test-tar-hardlink-rebase") assert.NilError(t, err) defer os.RemoveAll(tmpDir) origin := filepath.Join(tmpDir, "origin") err = os.Mkdir(origin, 0700) assert.NilError(t, err) err = ioutil.WriteFile(filepath.Join(origin, "1"), []byte("hello world"), 0700) assert.NilError(t, err) err = os.Link(filepath.Join(origin, "1"), filepath.Join(origin, "2")) assert.NilError(t, err) var i1, i2 uint64 i1, err = getNlink(filepath.Join(origin, "1")) assert.NilError(t, err) // sanity check that we can hardlink if i1 != 2 { t.Skipf("skipping since hardlinks don't work here; expected 2 links, got %d", i1) } dest := filepath.Join(tmpDir, "dest") bRdr, err := TarResourceRebase(origin, "origin") assert.NilError(t, err) dstDir, srcBase := SplitPathDirEntry(origin) _, dstBase := SplitPathDirEntry(dest) content := RebaseArchiveEntries(bRdr, srcBase, dstBase) err = Untar(content, dstDir, &TarOptions{Compression: Uncompressed, NoLchown: true, NoOverwriteDirNonDir: true}) assert.NilError(t, err) i1, err = getInode(filepath.Join(dest, "1")) assert.NilError(t, err) i2, err = getInode(filepath.Join(dest, "2")) assert.NilError(t, err) assert.Check(t, is.Equal(i1, i2)) }
explode_data.jsonl/81929
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 520 }
[ 2830, 3393, 62733, 2354, 26907, 3939, 3036, 693, 3152, 1155, 353, 8840, 836, 8, 341, 20082, 6184, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 28648, 16839, 2385, 277, 70745, 2080, 5504, 3152, 1138, 6948, 59678, 1454, 1155, 11, 1848, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReaderWithInvalidBuffer(t *testing.T) { b := realByteSlice([]byte{0x81, 0x81, 0x81, 0x81, 0x81, 0x81}) r := &Reader{b: b} db := r.decbufUvarintAt(0) testutil.NotOk(t, db.err()) }
explode_data.jsonl/23981
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 92 }
[ 2830, 3393, 5062, 2354, 7928, 4095, 1155, 353, 8840, 836, 8, 341, 2233, 1669, 1931, 7153, 33236, 10556, 3782, 90, 15, 87, 23, 16, 11, 220, 15, 87, 23, 16, 11, 220, 15, 87, 23, 16, 11, 220, 15, 87, 23, 16, 11, 220, 15, 87, 23...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLikeLikeMissionBuff(t *testing.T) { convey.Convey("LikeMissionBuff", t, func(ctx convey.C) { var ( c = context.Background() id = int64(10256) mid = int64(77) ) ctx.Convey("When everything goes positive", func(ctx convey.C) { res, err := d.LikeMissionBuff(c, id, mid) ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(res, convey.ShouldNotBeNil) }) }) }) }
explode_data.jsonl/11262
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 202 }
[ 2830, 3393, 12949, 12949, 55951, 34261, 1155, 353, 8840, 836, 8, 341, 37203, 5617, 4801, 5617, 445, 12949, 55951, 34261, 497, 259, 11, 2915, 7502, 20001, 727, 8, 341, 197, 2405, 2399, 298, 1444, 256, 284, 2266, 19047, 741, 298, 15710, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMD5(t *testing.T) { uuid := NewMD5(NameSpace_DNS, []byte("python.org")).String() want := "6fa459ea-ee8a-3ca4-894e-db77e160355e" if uuid != want { t.Errorf("MD5: got %q expected %q", uuid, want) } }
explode_data.jsonl/31942
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 102 }
[ 2830, 3393, 6076, 20, 1155, 353, 8840, 836, 8, 341, 10676, 2423, 1669, 1532, 6076, 20, 21182, 9914, 1557, 2448, 11, 3056, 3782, 445, 12669, 2659, 15197, 703, 741, 50780, 1669, 330, 21, 3632, 19, 20, 24, 12508, 12, 2127, 23, 64, 12, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSqrt(t *testing.T) { f := NewSqrt( sql.NewEmptyContext(), expression.NewGetField(0, sql.Float64, "n", false), ) testCases := []struct { name string row sql.Row expected interface{} err bool }{ {"null input", sql.NewRow(nil), nil, false}, {"invalid string", sql.NewRow("foo"), nil, true}, {"valid string", sql.NewRow("9"), float64(3), false}, {"number is zero", sql.NewRow(0), float64(0), false}, {"positive number", sql.NewRow(8), float64(2.8284271247461903), false}, } for _, tt := range testCases { t.Run(tt.name, func(t *testing.T) { t.Helper() require := require.New(t) ctx := sql.NewEmptyContext() v, err := f.Eval(ctx, tt.row) if tt.err { require.Error(err) } else { require.NoError(err) require.Equal(tt.expected, v) } }) } // Test negative number f = NewSqrt( sql.NewEmptyContext(), expression.NewGetField(0, sql.Float64, "n", false), ) require := require.New(t) v, err := f.Eval(sql.NewEmptyContext(), []interface{}{float64(-4)}) require.NoError(err) require.IsType(float64(0), v) require.True(math.IsNaN(v.(float64))) }
explode_data.jsonl/21976
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 498 }
[ 2830, 3393, 50, 8140, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 1532, 50, 8140, 1006, 197, 30633, 7121, 3522, 1972, 3148, 197, 8122, 4011, 7121, 1949, 1877, 7, 15, 11, 5704, 29794, 21, 19, 11, 330, 77, 497, 895, 1326, 197, 340, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestHDWalletSignOK(t *testing.T) { assert := assert.New(t) key, _ := ecrypto.GenerateKey() addr := ecrypto.PubkeyToAddress(key.PublicKey) svr := httptest.NewServer(http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) { assert.Equal("/testinst/api/v1/testwallet/1234", req.URL.Path) res.WriteHeader(200) res.Write([]byte(` { "addr": "` + addr.String() + `", "key": "` + hex.EncodeToString(ecrypto.FromECDSA(key)) + `" }`)) })) defer svr.Close() hdr := IsHDWalletRequest("hd-testinst-testwallet-1234") assert.NotNil(hdr) hd := newHDWallet(&HDWalletConf{ URLTemplate: svr.URL + "/{{.InstanceID}}/api/v1/{{.WalletID}}/{{.Index}}", ChainID: "12345", PropNames: HDWalletConfPropNames{ Address: "addr", PrivateKey: "key", }, }).(*hdWallet) s, err := hd.SignerFor(hdr) assert.NoError(err) assert.Equal(s.Type(), "HD Wallet") assert.Equal(addr.String(), s.Address()) tx := types.NewContractCreation(12345, big.NewInt(0), 0, big.NewInt(0), []byte("hello world")) signed, err := s.Sign(tx) assert.NoError(err) eip155 := types.NewEIP155Signer(big.NewInt(12345)) tx2 := &types.Transaction{} err = tx2.DecodeRLP(rlp.NewStream(bytes.NewReader(signed), 0)) assert.NoError(err) sender, err := eip155.Sender(tx2) assert.NoError(err) assert.Equal(addr, sender) }
explode_data.jsonl/33503
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 574 }
[ 2830, 3393, 19147, 38259, 7264, 3925, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 23634, 11, 716, 1669, 11942, 9444, 57582, 1592, 741, 53183, 1669, 11942, 9444, 1069, 392, 792, 1249, 4286, 4857, 49139, 1592, 692, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseOptDomainSearchListInvalidLength(t *testing.T) { data := []byte{ 7, 'e', 'x', 'a', 'm', 'p', 'l', 'e', 3, 'c', 'o', 'm', 0, 6, 's', 'u', 'b', 'n', 'e', 't', 7, 'e', // truncated } _, err := ParseOptDomainSearchList(data) require.Error(t, err, "A truncated OptDomainSearchList should return an error") }
explode_data.jsonl/53133
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 138 }
[ 2830, 3393, 14463, 21367, 13636, 5890, 852, 7928, 4373, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 3056, 3782, 515, 197, 197, 22, 11, 364, 68, 516, 364, 87, 516, 364, 64, 516, 364, 76, 516, 364, 79, 516, 364, 75, 516, 364, 68, 51...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHbdmSwap_GetFutureOrder(t *testing.T) { t.Log(swap.GetFutureOrder("784118017750929408", goex.NewCurrencyPair2("DOT_USD"), goex.SWAP_CONTRACT)) }
explode_data.jsonl/43256
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 67 }
[ 2830, 3393, 39, 8940, 76, 46179, 13614, 24206, 4431, 1155, 353, 8840, 836, 8, 341, 3244, 5247, 1141, 21726, 2234, 24206, 4431, 445, 22, 23, 19, 16, 16, 23, 15, 16, 22, 22, 20, 15, 24, 17, 24, 19, 15, 23, 497, 728, 327, 7121, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestAuthenticate(t *testing.T) { context := map[string]interface{}{} visitor := createVisitor("firstID", context) err := visitor.Authenticate("newID", nil, false) assert.Nil(t, err) assert.Equal(t, "newID", visitor.ID) assert.Equal(t, "firstID", *visitor.AnonymousID) newContext := model.Context{ "test": "string", } visitor.Authenticate("newerID", newContext, false) assert.Equal(t, "newerID", visitor.ID) assert.Equal(t, newContext, visitor.Context) assert.Equal(t, "firstID", *visitor.AnonymousID) visitor.decisionMode = API newContext = model.Context{ "test2": "string", } err = visitor.Unauthenticate(newContext, false) assert.Nil(t, err) assert.Equal(t, "firstID", visitor.ID) assert.Equal(t, newContext, visitor.Context) assert.Nil(t, visitor.AnonymousID) visitor = createVisitor("firstID", context, WithAuthenticated(false)) assert.Nil(t, visitor.AnonymousID) visitor = createVisitor("firstID", context, WithAuthenticated(true)) assert.NotNil(t, visitor.AnonymousID) }
explode_data.jsonl/12287
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 370 }
[ 2830, 3393, 99087, 1155, 353, 8840, 836, 8, 341, 28413, 1669, 2415, 14032, 31344, 6257, 16094, 197, 39985, 1669, 1855, 16796, 445, 3896, 915, 497, 2266, 340, 9859, 1669, 20181, 25233, 22661, 445, 931, 915, 497, 2092, 11, 895, 340, 6948,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFindVarsFilesNotExist(t *testing.T) { td := testutil.TempDir(t) defer os.RemoveAll(td) if _, err := findVarsFiles(filepath.Join(td, "no_such_dir")); err != nil { t.Fatal("findVarsFiles failed: ", err) } }
explode_data.jsonl/61585
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 93 }
[ 2830, 3393, 9885, 28305, 10809, 45535, 1155, 353, 8840, 836, 8, 341, 76373, 1669, 1273, 1314, 65009, 6184, 1155, 340, 16867, 2643, 84427, 61241, 692, 743, 8358, 1848, 1669, 1477, 28305, 10809, 34793, 22363, 61241, 11, 330, 2152, 643, 1387...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestSelectHost(t *testing.T) { scheduler := genericScheduler{} tests := []struct { name string list schedulerapi.HostPriorityList possibleHosts sets.String expectsErr bool }{ { name: "unique properly ordered scores", list: []schedulerapi.HostPriority{ {Host: "machine1.1", Score: 1}, {Host: "machine2.1", Score: 2}, }, possibleHosts: sets.NewString("machine2.1"), expectsErr: false, }, { name: "equal scores", list: []schedulerapi.HostPriority{ {Host: "machine1.1", Score: 1}, {Host: "machine1.2", Score: 2}, {Host: "machine1.3", Score: 2}, {Host: "machine2.1", Score: 2}, }, possibleHosts: sets.NewString("machine1.2", "machine1.3", "machine2.1"), expectsErr: false, }, { name: "out of order scores", list: []schedulerapi.HostPriority{ {Host: "machine1.1", Score: 3}, {Host: "machine1.2", Score: 3}, {Host: "machine2.1", Score: 2}, {Host: "machine3.1", Score: 1}, {Host: "machine1.3", Score: 3}, }, possibleHosts: sets.NewString("machine1.1", "machine1.2", "machine1.3"), expectsErr: false, }, { name: "empty priority list", list: []schedulerapi.HostPriority{}, possibleHosts: sets.NewString(), expectsErr: true, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { // increase the randomness for i := 0; i < 10; i++ { got, err := scheduler.selectHost(test.list) if test.expectsErr { if err == nil { t.Error("Unexpected non-error") } } else { if err != nil { t.Errorf("Unexpected error: %v", err) } if !test.possibleHosts.Has(got) { t.Errorf("got %s is not in the possible map %v", got, test.possibleHosts) } } } }) } }
explode_data.jsonl/6743
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 866 }
[ 2830, 3393, 3379, 9296, 1155, 353, 8840, 836, 8, 341, 1903, 15222, 1669, 13954, 38878, 16094, 78216, 1669, 3056, 1235, 341, 197, 11609, 688, 914, 198, 197, 14440, 688, 28809, 2068, 29840, 20555, 852, 198, 197, 197, 10031, 9296, 82, 7289...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestParseFilterQueryParam(t *testing.T) { type args struct { in string } tests := []struct { name string args args want octant.Filter wantErr bool }{ { name: "valid", args: args{"foo:bar"}, want: octant.Filter{ Key: "foo", Value: "bar", }, wantErr: false, }, { name: "invalid", args: args{"foobar"}, want: octant.Filter{}, wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := api.ParseFilterQueryParam(tt.args.in) if (err != nil) != tt.wantErr { t.Errorf("ParseFilterQueryParam() error = %v, wantErr %v", err, tt.wantErr) return } if !reflect.DeepEqual(got, tt.want) { t.Errorf("ParseFilterQueryParam() got = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/48850
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 397 }
[ 2830, 3393, 14463, 5632, 84085, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 17430, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 31215, 262, 2827, 198, 197, 50780, 262, 18491, 517, 31...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestLruCache_Get(t *testing.T) { format := func(idx int) string { return strings.Repeat(fmt.Sprintf("%016d", idx), 16) } l := NewLRUCache(1024 * 1024) for idx := 0; idx < 100000; idx++ { l.Set(format(idx)) } if l.Size() > 2*1024*1024 { t.Fatal(l.Size()) } if l.Count() > 5000 { t.Fatal(l.Size()) } for idx := 99000; idx < 100000; idx++ { if l.Get(format(idx)) == "" { t.Fatal("Item should be in the cache: " + format(idx)) } } }
explode_data.jsonl/46296
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 220 }
[ 2830, 3393, 43, 2672, 8233, 13614, 1155, 353, 8840, 836, 8, 341, 59416, 1669, 2915, 19778, 526, 8, 914, 341, 197, 853, 9069, 2817, 10979, 28197, 17305, 4430, 15, 16, 21, 67, 497, 7187, 701, 220, 16, 21, 340, 197, 630, 8810, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseVersion(t *testing.T) { tests := []struct { input version.Info expected ServerVersion error bool }{ { input: version.Info{Major: "1", Minor: "6"}, expected: ServerVersion{Major: 1, Minor: 6}, }, { input: version.Info{Major: "1", Minor: "70"}, expected: ServerVersion{Major: 1, Minor: 70}, }, { input: version.Info{Major: "1", Minor: "6x"}, error: true, }, { input: version.Info{Major: "1", Minor: "8+"}, expected: ServerVersion{Major: 1, Minor: 8}, }, { input: version.Info{Major: "", Minor: "", GitVersion: "v1.8.0"}, expected: ServerVersion{Major: 1, Minor: 8}, }, { input: version.Info{Major: "1", Minor: "", GitVersion: "v1.8.0"}, expected: ServerVersion{Major: 1, Minor: 8}, }, { input: version.Info{Major: "", Minor: "8", GitVersion: "v1.8.0"}, expected: ServerVersion{Major: 1, Minor: 8}, }, { input: version.Info{Major: "", Minor: "", GitVersion: "v1.8.8-test.0"}, expected: ServerVersion{Major: 1, Minor: 8}, }, { input: version.Info{Major: "1", Minor: "8", GitVersion: "v1.9.0"}, expected: ServerVersion{Major: 1, Minor: 8}, }, { input: version.Info{Major: "", Minor: "", GitVersion: "v1.a"}, error: true, }, } for _, test := range tests { v, err := ParseVersion(&test.input) if test.error { if err == nil { t.Errorf("test %s should have failed and did not", test.input) } continue } if err != nil { t.Errorf("test %v failed: %v", test.input, err) continue } if v != test.expected { t.Errorf("Expected %v, got %v", test.expected, v) } } }
explode_data.jsonl/10847
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 731 }
[ 2830, 3393, 14463, 5637, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 22427, 262, 2319, 20132, 198, 197, 42400, 8422, 5637, 198, 197, 18290, 262, 1807, 198, 197, 59403, 197, 197, 515, 298, 22427, 25, 262, 2319, 201...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestRollDPoSCtx(t *testing.T) { t.Parallel() ctrl := gomock.NewController(t) defer ctrl.Finish() candidates := make([]string, 4) for i := 0; i < len(candidates); i++ { candidates[i] = testAddrs[i].RawAddress } clock := clock.NewMock() var prevHash hash.Hash32B blk := blockchain.NewBlock( 1, 8, prevHash, testutil.TimestampNowFromClock(clock), testAddrs[0].PublicKey, make([]action.Action, 0), ) ctx := makeTestRollDPoSCtx( testAddrs[0], ctrl, config.RollDPoS{ NumSubEpochs: 1, NumDelegates: 4, EnableDKG: true, }, func(blockchain *mock_blockchain.MockBlockchain) { blockchain.EXPECT().TipHeight().Return(uint64(8)).Times(4) blockchain.EXPECT().GetBlockByHeight(uint64(8)).Return(blk, nil).Times(1) blockchain.EXPECT().CandidatesByHeight(gomock.Any()).Return([]*state.Candidate{ {Address: candidates[0]}, {Address: candidates[1]}, {Address: candidates[2]}, {Address: candidates[3]}, }, nil).Times(1) }, func(_ *mock_actpool.MockActPool) {}, func(_ *mock_network.MockOverlay) {}, clock, ) epoch, height, err := ctx.calcEpochNumAndHeight() require.NoError(t, err) assert.Equal(t, uint64(2), epoch) assert.Equal(t, uint64(9), height) ctx.epoch.height = height subEpoch, err := ctx.calcSubEpochNum() require.NoError(t, err) assert.Equal(t, uint64(0), subEpoch) ctx.epoch.seed = crypto.CryptoSeed delegates, err := ctx.rollingDelegates(epoch) require.NoError(t, err) crypto.SortCandidates(candidates, epoch, crypto.CryptoSeed) assert.Equal(t, candidates, delegates) ctx.epoch.num = epoch ctx.epoch.height = height ctx.epoch.numSubEpochs = 2 ctx.epoch.delegates = delegates proposer, height, round, err := ctx.rotatedProposer() require.NoError(t, err) assert.Equal(t, candidates[1], proposer) assert.Equal(t, uint64(9), height) assert.Equal(t, uint32(0), round) clock.Add(time.Second) duration, err := ctx.calcDurationSinceLastBlock() require.NoError(t, err) assert.Equal(t, time.Second, duration) yes, no := ctx.calcQuorum(map[string]bool{ candidates[0]: true, candidates[1]: true, candidates[2]: true, }) assert.True(t, yes) assert.False(t, no) yes, no = ctx.calcQuorum(map[string]bool{ candidates[0]: false, candidates[1]: false, candidates[2]: false, }) assert.False(t, yes) assert.True(t, no) yes, no = ctx.calcQuorum(map[string]bool{ candidates[0]: true, candidates[1]: true, candidates[2]: false, candidates[3]: false, }) assert.False(t, yes) assert.True(t, no) }
explode_data.jsonl/48871
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1073 }
[ 2830, 3393, 32355, 10298, 78, 3540, 3998, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 1444, 26222, 1669, 1281, 10556, 917, 11, 220, 19, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestVisit(t *testing.T) { ctx := setupVisitTest(t) topModule := ctx.moduleGroupFromName("A", nil).modules.firstModule().logicModule.(*visitModule) assertString(t, topModule.properties.VisitDepsDepthFirst, "FEDCB") assertString(t, topModule.properties.VisitDepsDepthFirstIf, "FEDC") assertString(t, topModule.properties.VisitDirectDeps, "B") assertString(t, topModule.properties.VisitDirectDepsIf, "") eModule := ctx.moduleGroupFromName("E", nil).modules.firstModule().logicModule.(*visitModule) assertString(t, eModule.properties.VisitDepsDepthFirst, "F") assertString(t, eModule.properties.VisitDepsDepthFirstIf, "F") assertString(t, eModule.properties.VisitDirectDeps, "FF") assertString(t, eModule.properties.VisitDirectDepsIf, "FF") }
explode_data.jsonl/56882
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 256 }
[ 2830, 3393, 26218, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 6505, 26218, 2271, 1155, 692, 42118, 3332, 1669, 5635, 10076, 2808, 3830, 675, 445, 32, 497, 2092, 568, 11525, 7389, 3332, 1005, 24225, 3332, 41399, 27460, 3332, 340, 6948, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInterpreterSlicing(t *testing.T) { s, err := parseFile("src/parse/asp/test_data/interpreter/slicing.build") require.NoError(t, err) assert.Equal(t, pyInt(2), s.Lookup("a")) assert.Equal(t, pyList{pyInt(2), pyInt(3)}, s.Lookup("b")) assert.Equal(t, pyList{pyInt(1)}, s.Lookup("c")) assert.Equal(t, pyList{pyInt(2)}, s.Lookup("d")) assert.Equal(t, pyInt(3), s.Lookup("e")) assert.Equal(t, pyList{pyInt(1), pyInt(2)}, s.Lookup("f")) assert.Equal(t, pyList{pyInt(1), pyInt(2), pyInt(3)}, s.Lookup("g")) }
explode_data.jsonl/81065
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 245 }
[ 2830, 3393, 58426, 50, 89114, 1155, 353, 8840, 836, 8, 341, 1903, 11, 1848, 1669, 4715, 1703, 445, 3548, 14, 6400, 14, 13367, 12697, 1769, 14, 90554, 2687, 89114, 13239, 1138, 17957, 35699, 1155, 11, 1848, 340, 6948, 12808, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPut(t *testing.T) { ctx := setupTest(t) defer ctx.teardownTest() // regular case err := ctx.dataBroker.Put("key", []byte("data")) Expect(err).ShouldNot(HaveOccurred()) Expect(ctx.mockKV.mem["key"]).To(Equal("data")) // error case ctx.mockKV.shouldFail = true err = ctx.dataBroker.Put("key", []byte("data")) Expect(err).Should(HaveOccurred()) Expect(err.Error()).To(BeEquivalentTo("test-error")) }
explode_data.jsonl/29429
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 169 }
[ 2830, 3393, 19103, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 6505, 2271, 1155, 340, 16867, 5635, 31853, 37496, 2271, 2822, 197, 322, 5792, 1142, 198, 9859, 1669, 5635, 2196, 65545, 39825, 445, 792, 497, 3056, 3782, 445, 691, 5455, 3591...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLiveMigration_CreateLinkToken(t *testing.T) { client, mux, teardown := setup() defer teardown() mux.HandleFunc(fmt.Sprintf("/api/atlas/v1.0/orgs/%s/liveMigrations/linkTokens", orgID), func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, http.MethodPost) fmt.Fprint(w, `{ "linkToken": "test" }`) }) body := &TokenCreateRequest{ AccessListIPs: []string{"test"}, } response, _, err := client.LiveMigration.CreateLinkToken(ctx, orgID, body) if err != nil { t.Fatalf("LiveMigration.CreateLinkToken returned error: %v", err) } expected := &LinkToken{ LinkToken: "test", } if diff := deep.Equal(response, expected); diff != nil { t.Error(diff) } }
explode_data.jsonl/14790
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 279 }
[ 2830, 3393, 20324, 20168, 34325, 3939, 3323, 1155, 353, 8840, 836, 8, 341, 25291, 11, 59807, 11, 49304, 1669, 6505, 741, 16867, 49304, 2822, 2109, 2200, 63623, 28197, 17305, 4283, 2068, 80730, 14493, 5457, 16, 13, 15, 41361, 82, 12627, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRequestWrite(t *testing.T) { for _, c := range casesRequest { t.Run(c.name, func(t *testing.T) { var buf bytes.Buffer bw := bufio.NewWriter(&buf) err := c.req.Write(bw) require.NoError(t, err) // do NOT call flush(), write() must have already done it require.Equal(t, c.byts, buf.Bytes()) }) } }
explode_data.jsonl/62586
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 141 }
[ 2830, 3393, 1900, 7985, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 272, 1669, 2088, 5048, 1900, 341, 197, 3244, 16708, 1337, 2644, 11, 2915, 1155, 353, 8840, 836, 8, 341, 298, 2405, 6607, 5820, 22622, 198, 298, 2233, 86, 1669, 96917, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddBootEntryMultiple(t *testing.T) { vpdDir := t.TempDir() err := os.MkdirAll(path.Join(vpdDir, "rw"), 0o700) if err != nil { t.Errorf(`os.MkdirAll(path.Join(%q, "rw"), 0o700) = %v, want nil`, vpdDir, err) } defer os.RemoveAll(vpdDir) for i := 1; i < 5; i++ { if err := addBootEntry(&systembooter.LocalBooter{ Method: "grub", }, vpdDir); err != nil { t.Errorf(`addBootEntry(&systembooter.LocalBooter{Method: "grub"}, %q) = %v, want nil`, vpdDir, err) } file, err := os.ReadFile(path.Join(vpdDir, "rw", fmt.Sprintf("Boot%04d", i))) if err != nil { t.Errorf(`os.ReadFile(path.Join(%q, "rw", fmt.Sprintf("Boot%04d", i))) = %v, want nil`, vpdDir, i, err) } var out systembooter.LocalBooter if err := json.Unmarshal([]byte(file), &out); err != nil { t.Errorf(`json.Unmarshal([]byte(%q), %v) = %v, want nil`, file, &out, err) } if out.Method != "grub" { t.Errorf(`out.Method = %q, want grub`, out.Method) } } }
explode_data.jsonl/25082
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 446 }
[ 2830, 3393, 2212, 17919, 5874, 32089, 1155, 353, 8840, 836, 8, 341, 5195, 15360, 6184, 1669, 259, 65009, 6184, 741, 9859, 1669, 2643, 1321, 12438, 2403, 5581, 22363, 3747, 15360, 6184, 11, 330, 31768, 3975, 220, 15, 78, 22, 15, 15, 34...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func Test_AddIndex_Migration(t *testing.T) { argsss := []string{"add", "ai", "test123", "first_col", "second_col", "third_col"} fileName, mm, _ := generateMigration(argsss) expectedString := `{"id":"` + getID(fileName) + `","up":{"addIndex":[{"tableName":"test123","columns":[{"fieldname":"first_col"},{"fieldname":"second_col"},{"fieldname":"third_col"}]}]},"down":{"dropIndex":[{"tableName":"test123","columns":[{"fieldname":"first_col"},{"fieldname":"second_col"},{"fieldname":"third_col"}]}]}}` content1, _ := json.Marshal(mm) checkError(t, expectedString, string(content1)) }
explode_data.jsonl/22636
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 194 }
[ 2830, 3393, 21346, 1552, 1245, 5033, 1155, 353, 8840, 836, 8, 341, 47903, 84160, 1669, 3056, 917, 4913, 718, 497, 330, 2143, 497, 330, 1944, 16, 17, 18, 497, 330, 3896, 10211, 497, 330, 5569, 10211, 497, 330, 31727, 10211, 16707, 1766...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHitCondBreakpointGEQ(t *testing.T) { protest.AllowRecording(t) withTestProcess("break", t, func(p *proc.Target, fixture protest.Fixture) { bp := setFileBreakpoint(p, t, fixture.Source, 7) bp.UserBreaklet().HitCond = &struct { Op token.Token Val int }{token.GEQ, 3} for it := 3; it <= 10; it++ { assertNoError(p.Continue(), t, "Continue()") ivar := evalVariable(p, t, "i") i, _ := constant.Int64Val(ivar.Value) if int(i) != it { t.Fatalf("Stopped on wrong hitcount %d\n", i) } } assertNoError(p.Continue(), t, "Continue()") }) }
explode_data.jsonl/56241
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 248 }
[ 2830, 3393, 19498, 49696, 22524, 2768, 10777, 48, 1155, 353, 8840, 836, 8, 341, 197, 776, 1944, 29081, 52856, 1155, 340, 46948, 2271, 7423, 445, 8960, 497, 259, 11, 2915, 1295, 353, 15782, 35016, 11, 12507, 8665, 991, 12735, 8, 341, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNodes_Swap(t *testing.T) { nodes := Nodes{ &Node{val: []byte("a")}, &Node{val: []byte("b")}, } expi := nodes[1] expj := nodes[0] nodes.Swap(0, 1) if nodes[0] != expi { t.Errorf("i should have been %s", expi.val) } if nodes[1] != expj { t.Errorf("j should have been %s", expi.val) } }
explode_data.jsonl/57988
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 12288, 1098, 21726, 1155, 353, 8840, 836, 8, 341, 79756, 1669, 52501, 515, 197, 197, 5, 1955, 90, 831, 25, 3056, 3782, 445, 64, 79583, 197, 197, 5, 1955, 90, 831, 25, 3056, 3782, 445, 65, 79583, 197, 630, 8122, 2493, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestParseTypes(t *testing.T) { checkParseType(t, "int", "int") checkParseType(t, "(int, real)", "(tuple-type int real)") checkParseType(t, "(int)", "(tuple-type int)") checkParseType(t, "{string: bool}", "(map-type string bool)") checkParseType(t, "[[int]]", "(list-type (list-type int))") checkParseType(t, "{[[int]]: ({real: real})}", "(map-type (list-type (list-type int)) (tuple-type (map-type real real)))") checkParseType(t, "func (int, int) -> int", "(function-type (int int) int)") checkParseType(t, "func (int) -> int", "(function-type (int) int)") checkParseType(t, "func (int)", "(function-type (int))") checkParseType(t, "func () -> int", "(function-type () int)") checkParseType(t, "func ()", "(function-type ())") }
explode_data.jsonl/33530
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 290 }
[ 2830, 3393, 14463, 4173, 1155, 353, 8840, 836, 8, 341, 25157, 14463, 929, 1155, 11, 330, 396, 497, 330, 396, 1138, 25157, 14463, 929, 1155, 11, 11993, 396, 11, 1931, 11583, 11993, 24590, 10604, 526, 1931, 19107, 25157, 14463, 929, 1155,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNumFeasibleNodesToFind(t *testing.T) { tests := []struct { name string percentageOfNodesToScore int32 numAllNodes int32 wantNumNodes int32 }{ { name: "not set percentageOfNodesToScore and nodes number not more than 50", numAllNodes: 10, wantNumNodes: 10, }, { name: "set percentageOfNodesToScore and nodes number not more than 50", percentageOfNodesToScore: 40, numAllNodes: 10, wantNumNodes: 10, }, { name: "not set percentageOfNodesToScore and nodes number more than 50", numAllNodes: 1000, wantNumNodes: 420, }, { name: "set percentageOfNodesToScore and nodes number more than 50", percentageOfNodesToScore: 40, numAllNodes: 1000, wantNumNodes: 400, }, { name: "not set percentageOfNodesToScore and nodes number more than 50*125", numAllNodes: 6000, wantNumNodes: 300, }, { name: "set percentageOfNodesToScore and nodes number more than 50*125", percentageOfNodesToScore: 40, numAllNodes: 6000, wantNumNodes: 2400, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { g := &genericScheduler{ percentageOfNodesToScore: tt.percentageOfNodesToScore, } if gotNumNodes := g.numFeasibleNodesToFind(tt.numAllNodes); gotNumNodes != tt.wantNumNodes { t.Errorf("genericScheduler.numFeasibleNodesToFind() = %v, want %v", gotNumNodes, tt.wantNumNodes) } }) } }
explode_data.jsonl/2398
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 802 }
[ 2830, 3393, 4651, 6123, 300, 1238, 12288, 1249, 9885, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 3824, 914, 198, 197, 197, 40550, 2124, 12288, 1249, 10570, 526, 18, 17, 198, 197, 22431, 2403, 12288, 1060, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMountingSimilarPattern(t *testing.T) { r := NewRouter() r.Get("/hi", HandlerFunc(func(ctx context.Context, rc *fasthttp.RequestCtx) { rc.Write([]byte("bye")) })) r2 := NewRouter() r2.Get("/", HandlerFunc(func(ctx context.Context, rc *fasthttp.RequestCtx) { rc.Write([]byte("foobar")) })) r3 := NewRouter() r3.Get("/", HandlerFunc(func(ctx context.Context, rc *fasthttp.RequestCtx) { rc.Write([]byte("foo")) })) r.Mount("/foobar", r2) r.Mount("/foo", r3) ts := NewTestServer(r) defer ts.Close() if _, body := testRequest(t, ts, "GET", "/hi", nil); body != "bye" { t.Fatalf(body) } }
explode_data.jsonl/47959
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 271 }
[ 2830, 3393, 16284, 287, 34402, 15760, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1532, 9523, 741, 7000, 2234, 4283, 6023, 497, 19954, 9626, 18552, 7502, 2266, 9328, 11, 10192, 353, 9349, 1254, 9659, 23684, 8, 341, 197, 30295, 4073, 10556...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTargetQueue_CachedBuild(t *testing.T) { f := newTargetQueueFixture(t) fooTarget := model.NewImageTarget(container.MustParseSelector("foo")) s1 := store.BuildState{ LastSuccessfulResult: store.NewImageBuildResult( fooTarget.ID(), container.MustParseNamedTagged("foo:1234"), ), } targets := []model.ImageTarget{fooTarget} buildStateSet := store.BuildStateSet{fooTarget.ID(): s1} f.run(targets, buildStateSet) // last result is still valid, so handler doesn't get called at all expectedCalls := map[model.TargetID]fakeBuildHandlerCall{} assert.Equal(t, expectedCalls, f.handler.calls) }
explode_data.jsonl/2250
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 216 }
[ 2830, 3393, 6397, 7554, 920, 3854, 11066, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 6397, 7554, 18930, 1155, 692, 197, 7975, 6397, 1669, 1614, 7121, 1906, 6397, 28168, 50463, 14463, 5877, 445, 7975, 5455, 1903, 16, 1669, 3553, 2521...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestForeignKey(t *testing.T) { store := createMockStore(t) defer func() { err := store.Close() require.NoError(t, err) }() d, err := testNewDDLAndStart( context.Background(), WithStore(store), WithLease(testLease), ) require.NoError(t, err) defer func() { err := d.Stop() require.NoError(t, err) }() dbInfo, err := testSchemaInfo(d, "test_foreign") require.NoError(t, err) ctx := testNewContext(d) testCreateSchema(t, ctx, d, dbInfo) tblInfo, err := testTableInfo(d, "t", 3) require.NoError(t, err) err = ctx.NewTxn(context.Background()) require.NoError(t, err) testCreateTable(t, ctx, d, dbInfo, tblInfo) txn, err := ctx.Txn(true) require.NoError(t, err) err = txn.Commit(context.Background()) require.NoError(t, err) // fix data race var mu sync.Mutex checkOK := false var hookErr error tc := &TestDDLCallback{} tc.onJobUpdated = func(job *model.Job) { if job.State != model.JobStateDone { return } mu.Lock() defer mu.Unlock() var t table.Table t, err = testGetTableWithError(d, dbInfo.ID, tblInfo.ID) if err != nil { hookErr = errors.Trace(err) return } fk := getForeignKey(t, "c1_fk") if fk == nil { hookErr = errors.New("foreign key not exists") return } checkOK = true } originalHook := d.GetHook() defer d.SetHook(originalHook) d.SetHook(tc) job := testCreateForeignKey(t, d, ctx, dbInfo, tblInfo, "c1_fk", []string{"c1"}, "t2", []string{"c1"}, ast.ReferOptionCascade, ast.ReferOptionSetNull) testCheckJobDone(t, d, job, true) txn, err = ctx.Txn(true) require.NoError(t, err) err = txn.Commit(context.Background()) require.NoError(t, err) mu.Lock() hErr := hookErr ok := checkOK mu.Unlock() require.NoError(t, hErr) require.True(t, ok) v := getSchemaVer(t, ctx) checkHistoryJobArgs(t, ctx, job.ID, &historyJobArgs{ver: v, tbl: tblInfo}) mu.Lock() checkOK = false mu.Unlock() // fix data race pr/#9491 tc2 := &TestDDLCallback{} tc2.onJobUpdated = func(job *model.Job) { if job.State != model.JobStateDone { return } mu.Lock() defer mu.Unlock() var t table.Table t, err = testGetTableWithError(d, dbInfo.ID, tblInfo.ID) if err != nil { hookErr = errors.Trace(err) return } fk := getForeignKey(t, "c1_fk") if fk != nil { hookErr = errors.New("foreign key has not been dropped") return } checkOK = true } d.SetHook(tc2) job = testDropForeignKey(t, ctx, d, dbInfo, tblInfo, "c1_fk") testCheckJobDone(t, d, job, false) mu.Lock() hErr = hookErr ok = checkOK mu.Unlock() require.NoError(t, hErr) require.True(t, ok) err = ctx.NewTxn(context.Background()) require.NoError(t, err) job = testDropTable(t, ctx, d, dbInfo, tblInfo) testCheckJobDone(t, d, job, false) txn, err = ctx.Txn(true) require.NoError(t, err) err = txn.Commit(context.Background()) require.NoError(t, err) }
explode_data.jsonl/33914
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1264 }
[ 2830, 3393, 28445, 1155, 353, 8840, 836, 8, 341, 57279, 1669, 1855, 11571, 6093, 1155, 340, 16867, 2915, 368, 341, 197, 9859, 1669, 3553, 10421, 741, 197, 17957, 35699, 1155, 11, 1848, 340, 197, 66816, 2698, 11, 1848, 1669, 1273, 3564, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBPR_MovieLens(t *testing.T) { trainSet, testSet, err := LoadDataFromBuiltIn("ml-1m") assert.Nil(t, err) m := NewBPR(model.Params{ model.NFactors: 8, model.Reg: 0.01, model.Lr: 0.05, model.NEpochs: 30, model.InitMean: 0, model.InitStdDev: 0.001, }) score := m.Fit(trainSet, testSet, fitConfig) assertEpsilon(t, 0.36, score.NDCG, benchEpsilon) // test predict assert.Equal(t, m.Predict("1", "1"), m.InternalPredict(1, 1)) // test increment test m.nEpochs = 0 scoreInc := m.Fit(trainSet, testSet, fitConfig) assertEpsilon(t, score.NDCG, scoreInc.NDCG, incrEpsilon) // test clear m.Clear() score = m.Fit(trainSet, testSet, fitConfig) assert.Less(t, score.NDCG, float32(0.2)) }
explode_data.jsonl/67650
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 340 }
[ 2830, 3393, 33, 6480, 1245, 6327, 98105, 1155, 353, 8840, 836, 8, 341, 197, 10397, 1649, 11, 1273, 1649, 11, 1848, 1669, 8893, 1043, 3830, 54300, 641, 445, 1014, 12, 16, 76, 1138, 6948, 59678, 1155, 11, 1848, 340, 2109, 1669, 1532, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNilAndEmptyMapIsReturnedIfRouteNotFound(t *testing.T) { trie := newRouteTrie() for _, routePair := range routePairs { trie.add(routePair.route) } routes, params := trie.search("/path/to") assert.Nil(t, routes) assert.Empty(t, params) }
explode_data.jsonl/42517
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 104 }
[ 2830, 3393, 19064, 3036, 3522, 2227, 3872, 84804, 2679, 4899, 10372, 1155, 353, 8840, 836, 8, 341, 197, 8927, 1669, 501, 4899, 51, 7231, 2822, 2023, 8358, 6021, 12443, 1669, 2088, 6021, 54228, 341, 197, 197, 8927, 1364, 31436, 12443, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_getWorkflowPullHandler(t *testing.T) { api, db, _, end := newTestAPI(t) defer end() u, pass := assets.InsertAdminUser(t, api.mustDB()) key := sdk.RandomString(10) proj := assets.InsertTestProject(t, api.mustDB(), api.Cache, key, key) require.NoError(t, group.InsertLinkGroupUser(context.TODO(), api.mustDB(), &group.LinkGroupUser{ GroupID: proj.ProjectGroups[0].Group.ID, AuthentifiedUserID: u.ID, Admin: true, })) u.Groups = append(u.Groups, proj.ProjectGroups[0].Group) //First pipeline pip := sdk.Pipeline{ ProjectID: proj.ID, ProjectKey: proj.Key, Name: "pip1", } test.NoError(t, pipeline.InsertPipeline(api.mustDB(), api.Cache, proj, &pip)) script := assets.GetBuiltinOrPluginActionByName(t, db, sdk.ScriptAction) s := sdk.NewStage("stage 1") s.Enabled = true s.PipelineID = pip.ID pipeline.InsertStage(api.mustDB(), s) j := &sdk.Job{ Enabled: true, Action: sdk.Action{ Enabled: true, Actions: []sdk.Action{ assets.NewAction(script.ID, sdk.Parameter{Name: "script", Value: "echo lol"}), }, }, } pipeline.InsertJob(api.mustDB(), j, s.ID, &pip) s.Jobs = append(s.Jobs, *j) pip.Stages = append(pip.Stages, *s) w := sdk.Workflow{ Name: "test_1", ProjectID: proj.ID, ProjectKey: proj.Key, WorkflowData: &sdk.WorkflowData{ Node: sdk.Node{ Type: sdk.NodeTypePipeline, Context: &sdk.NodeContext{ PipelineID: pip.ID, }, Triggers: []sdk.NodeTrigger{ { ChildNode: sdk.Node{ Type: sdk.NodeTypePipeline, Context: &sdk.NodeContext{ PipelineID: pip.ID, }, }, }, }, }, }, } test.NoError(t, workflow.RenameNode(context.TODO(), db, &w)) proj, _ = project.Load(api.mustDB(), api.Cache, proj.Key, project.LoadOptions.WithPipelines, project.LoadOptions.WithGroups, ) test.NoError(t, workflow.Insert(context.TODO(), api.mustDB(), api.Cache, &w, proj)) w1, err := workflow.Load(context.TODO(), api.mustDB(), api.Cache, proj, "test_1", workflow.LoadOptions{}) test.NoError(t, err) //Prepare request vars := map[string]string{ "key": proj.Key, "permWorkflowName": w1.Name, } uri := api.Router.GetRoute("GET", api.getWorkflowPullHandler, vars) test.NotEmpty(t, uri) req := assets.NewAuthentifiedRequest(t, u, pass, "GET", uri, nil) //Do the request rec := httptest.NewRecorder() api.Router.Mux.ServeHTTP(rec, req) assert.Equal(t, 200, rec.Code) //Check result t.Logf(">>%s", rec.Header().Get("Content-Type")) // Open the tar archive for reading. r := bytes.NewReader(rec.Body.Bytes()) tr := tar.NewReader(r) // Iterate through the files in the archive. for { hdr, err := tr.Next() if err == io.EOF { break } test.NoError(t, err, "Unable to iterate over the tar buffer") t.Logf("Contents of %s:", hdr.Name) btes, err := ioutil.ReadAll(tr) test.NoError(t, err, "Unable to read the tar buffer") t.Logf("%s", string(btes)) } }
explode_data.jsonl/25271
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1341 }
[ 2830, 3393, 3062, 62768, 36068, 3050, 1155, 353, 8840, 836, 8, 341, 54299, 11, 2927, 11, 8358, 835, 1669, 501, 2271, 7082, 1155, 340, 16867, 835, 741, 10676, 11, 1494, 1669, 11770, 23142, 7210, 1474, 1155, 11, 6330, 69419, 3506, 2398, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRecordSetsListAllIntegrationFilterForExistentName(t *testing.T) { c := client() zs, err := c.ZonesListAll(ListFilter{}) if err != nil { t.Error(err) } records, err := c.RecordSetsListAll(zs[0].ID, ListFilter{ NameFilter: "foo", }) if err != nil { t.Error(err) } if len(records) < 1 { t.Error("Expected RecordSetsListAll for records named 'foo' to yield results") } }
explode_data.jsonl/12124
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 161 }
[ 2830, 3393, 6471, 30175, 852, 2403, 52464, 5632, 2461, 840, 18128, 675, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 2943, 741, 20832, 82, 11, 1848, 1669, 272, 13476, 3154, 852, 2403, 10278, 5632, 37790, 743, 1848, 961, 2092, 341, 197, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAssertQuery(t *testing.T) { cases := []struct { e, a string asserts bool }{ { e: `Action=OperationName&Version=2014-01-01&Foo=val1&Bar=val2`, a: `Action=OperationName&Version=2014-01-01&Foo=val2&Bar=val3`, asserts: false, }, { e: `Action=OperationName&Version=2014-01-01&Foo=val1&Bar=val2`, a: `Action=OperationName&Version=2014-01-01&Foo=val1&Bar=val2`, asserts: true, }, } for i, c := range cases { mockT := &testing.T{} if awstesting.AssertQuery(mockT, c.e, c.a) != c.asserts { t.Error("Assert Query result was not expected.", i) } } }
explode_data.jsonl/6221
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 304 }
[ 2830, 3393, 8534, 2859, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 7727, 11, 264, 262, 914, 198, 197, 6948, 82, 1807, 198, 197, 59403, 197, 197, 515, 298, 7727, 25, 981, 1565, 2512, 28, 8432, 675, 5, 563...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDHCPv4EncodeResponse(t *testing.T) { dhcp := &DHCPv4{Operation: DHCPOpReply, HardwareType: LinkTypeEthernet, Xid: 0x12345678, ClientIP: net.IP{0, 0, 0, 0}, YourClientIP: net.IP{192, 168, 0, 123}, NextServerIP: net.IP{192, 168, 0, 1}, RelayAgentIP: net.IP{0, 0, 0, 0}, ClientHWAddr: net.HardwareAddr{0x12, 0x34, 0x56, 0x78, 0x9a, 0xbc}, ServerName: make([]byte, 64), File: make([]byte, 128)} dhcp.Options = append(dhcp.Options, NewDHCPOption(DHCPOptMessageType, []byte{byte(DHCPMsgTypeOffer)})) dhcp.Options = append(dhcp.Options, NewDHCPOption(DHCPOptSubnetMask, []byte{255, 255, 255, 0})) dhcp.Options = append(dhcp.Options, NewDHCPOption(DHCPOptPad, nil)) dhcp.Options = append(dhcp.Options, NewDHCPOption(DHCPOptT1, []byte{0x00, 0x00, 0x0e, 0x10})) dhcp.Options = append(dhcp.Options, NewDHCPOption(DHCPOptT2, []byte{0x00, 0x00, 0x0e, 0x10})) dhcp.Options = append(dhcp.Options, NewDHCPOption(DHCPOptLeaseTime, []byte{0x00, 0x00, 0x0e, 0x10})) dhcp.Options = append(dhcp.Options, NewDHCPOption(DHCPOptServerID, []byte{192, 168, 0, 1})) buf := gopacket.NewSerializeBuffer() opts := gopacket.SerializeOptions{FixLengths: true} err := gopacket.SerializeLayers(buf, opts, dhcp) if err != nil { t.Fatal(err) } p2 := gopacket.NewPacket(buf.Bytes(), LayerTypeDHCPv4, testDecodeOptions) dhcp2 := p2.Layer(LayerTypeDHCPv4).(*DHCPv4) testDHCPEqual(t, dhcp, dhcp2) }
explode_data.jsonl/75880
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 607 }
[ 2830, 3393, 51326, 7123, 85, 19, 32535, 2582, 1155, 353, 8840, 836, 8, 341, 2698, 62169, 1669, 609, 51326, 7123, 85, 19, 90, 8432, 25, 422, 22455, 2045, 79, 20841, 11, 36765, 929, 25, 5948, 929, 98006, 11, 1599, 307, 25, 220, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMSSQLLongColumnNames(t *testing.T) { db, sc, err := mssqlConnect() if err != nil { t.Fatal(err) } defer closeDB(t, db, sc, sc) query := fmt.Sprintf("select 'hello' as %s", strings.Repeat("a", 110)) var s string err = db.QueryRow(query).Scan(&s) if err != nil { t.Fatal(err) } if s != "hello" { t.Errorf("expected \"hello\", but received %v", s) } }
explode_data.jsonl/33558
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 163 }
[ 2830, 3393, 44, 1220, 3588, 6583, 2933, 7980, 1155, 353, 8840, 836, 8, 341, 20939, 11, 1136, 11, 1848, 1669, 296, 79713, 14611, 741, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 16867, 3265, 3506, 1155, 11, 2927, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestShouldSaveTrace(t *testing.T) { for _, tc := range []struct { name string expected bool synthetics bool tracerEnabled bool collectTraces bool duration time.Duration threshold time.Duration }{ { name: "insufficient duration, all disabled", expected: false, synthetics: false, tracerEnabled: false, collectTraces: false, duration: 1 * time.Second, threshold: 2 * time.Second, }, { name: "insufficient duration, only synthetics enabled", expected: true, synthetics: true, tracerEnabled: false, collectTraces: false, duration: 1 * time.Second, threshold: 2 * time.Second, }, { name: "insufficient duration, only tracer enabled", expected: false, synthetics: false, tracerEnabled: true, collectTraces: false, duration: 1 * time.Second, threshold: 2 * time.Second, }, { name: "insufficient duration, only collect traces enabled", expected: false, synthetics: false, tracerEnabled: false, collectTraces: true, duration: 1 * time.Second, threshold: 2 * time.Second, }, { name: "insufficient duration, all normal flags enabled", expected: false, synthetics: false, tracerEnabled: true, collectTraces: true, duration: 1 * time.Second, threshold: 2 * time.Second, }, { name: "insufficient duration, all flags enabled", expected: true, synthetics: true, tracerEnabled: true, collectTraces: true, duration: 1 * time.Second, threshold: 2 * time.Second, }, { name: "sufficient duration, all disabled", expected: false, synthetics: false, tracerEnabled: false, collectTraces: false, duration: 3 * time.Second, threshold: 2 * time.Second, }, { name: "sufficient duration, only synthetics enabled", expected: true, synthetics: true, tracerEnabled: false, collectTraces: false, duration: 3 * time.Second, threshold: 2 * time.Second, }, { name: "sufficient duration, only tracer enabled", expected: false, synthetics: false, tracerEnabled: true, collectTraces: false, duration: 3 * time.Second, threshold: 2 * time.Second, }, { name: "sufficient duration, only collect traces enabled", expected: false, synthetics: false, tracerEnabled: false, collectTraces: true, duration: 3 * time.Second, threshold: 2 * time.Second, }, { name: "sufficient duration, all normal flags enabled", expected: true, synthetics: false, tracerEnabled: true, collectTraces: true, duration: 3 * time.Second, threshold: 2 * time.Second, }, { name: "sufficient duration, all flags enabled", expected: true, synthetics: true, tracerEnabled: true, collectTraces: true, duration: 3 * time.Second, threshold: 2 * time.Second, }, } { txn := &txn{} txn.Config.TransactionTracer.Enabled = tc.tracerEnabled txn.Config.TransactionTracer.Threshold.Duration = tc.threshold txn.Reply = &internal.ConnectReply{CollectTraces: tc.collectTraces} txn.Duration = tc.duration if tc.synthetics { txn.CrossProcess.Synthetics = &cat.SyntheticsHeader{} txn.CrossProcess.SetSynthetics(tc.synthetics) } if actual := txn.shouldSaveTrace(); actual != tc.expected { t.Errorf("%s: unexpected shouldSaveTrace value; expected %v; got %v", tc.name, tc.expected, actual) } } }
explode_data.jsonl/29012
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1658 }
[ 2830, 3393, 14996, 8784, 6550, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 11609, 688, 914, 198, 197, 42400, 414, 1807, 198, 197, 1903, 45809, 24279, 262, 1807, 198, 197, 25583, 9584, 5462, 1807, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestFieldUsage(t *testing.T) { t.Parallel() tests := []struct { name string field reflect.StructField defValue *reflect.Value expected string }{ { name: "jsonrpcusage tag override", field: func() reflect.StructField { type s struct { Test int `jsonrpcusage:"testvalue"` } return reflect.TypeOf((*s)(nil)).Elem().Field(0) }(), defValue: nil, expected: "testvalue", }, { name: "generic interface", field: func() reflect.StructField { type s struct { Test interface{} } return reflect.TypeOf((*s)(nil)).Elem().Field(0) }(), defValue: nil, expected: `test`, }, { name: "string without default value", field: func() reflect.StructField { type s struct { Test string } return reflect.TypeOf((*s)(nil)).Elem().Field(0) }(), defValue: nil, expected: `"test"`, }, { name: "string with default value", field: func() reflect.StructField { type s struct { Test string } return reflect.TypeOf((*s)(nil)).Elem().Field(0) }(), defValue: func() *reflect.Value { value := "default" rv := reflect.ValueOf(&value) return &rv }(), expected: `test="default"`, }, { name: "array of strings", field: func() reflect.StructField { type s struct { Test []string } return reflect.TypeOf((*s)(nil)).Elem().Field(0) }(), defValue: nil, expected: `["test",...]`, }, { name: "array of strings with plural field name 1", field: func() reflect.StructField { type s struct { Keys []string } return reflect.TypeOf((*s)(nil)).Elem().Field(0) }(), defValue: nil, expected: `["key",...]`, }, { name: "array of strings with plural field name 2", field: func() reflect.StructField { type s struct { Addresses []string } return reflect.TypeOf((*s)(nil)).Elem().Field(0) }(), defValue: nil, expected: `["address",...]`, }, { name: "array of strings with plural field name 3", field: func() reflect.StructField { type s struct { Capabilities []string } return reflect.TypeOf((*s)(nil)).Elem().Field(0) }(), defValue: nil, expected: `["capability",...]`, }, { name: "array of structs", field: func() reflect.StructField { type s2 struct { Txid string } type s struct { Capabilities []s2 } return reflect.TypeOf((*s)(nil)).Elem().Field(0) }(), defValue: nil, expected: `[{"txid":"value"},...]`, }, { name: "array of ints", field: func() reflect.StructField { type s struct { Test []int } return reflect.TypeOf((*s)(nil)).Elem().Field(0) }(), defValue: nil, expected: `[test,...]`, }, { name: "sub struct with jsonrpcusage tag override", field: func() reflect.StructField { type s2 struct { Test string `jsonrpcusage:"testusage"` } type s struct { Test s2 } return reflect.TypeOf((*s)(nil)).Elem().Field(0) }(), defValue: nil, expected: `{testusage}`, }, { name: "sub struct with string", field: func() reflect.StructField { type s2 struct { Txid string } type s struct { Test s2 } return reflect.TypeOf((*s)(nil)).Elem().Field(0) }(), defValue: nil, expected: `{"txid":"value"}`, }, { name: "sub struct with int", field: func() reflect.StructField { type s2 struct { Vout int } type s struct { Test s2 } return reflect.TypeOf((*s)(nil)).Elem().Field(0) }(), defValue: nil, expected: `{"vout":n}`, }, { name: "sub struct with float", field: func() reflect.StructField { type s2 struct { Amount float64 } type s struct { Test s2 } return reflect.TypeOf((*s)(nil)).Elem().Field(0) }(), defValue: nil, expected: `{"amount":n.nnn}`, }, { name: "sub struct with sub struct", field: func() reflect.StructField { type s3 struct { Amount float64 } type s2 struct { Template s3 } type s struct { Test s2 } return reflect.TypeOf((*s)(nil)).Elem().Field(0) }(), defValue: nil, expected: `{"template":{"amount":n.nnn}}`, }, { name: "sub struct with slice", field: func() reflect.StructField { type s2 struct { Capabilities []string } type s struct { Test s2 } return reflect.TypeOf((*s)(nil)).Elem().Field(0) }(), defValue: nil, expected: `{"capabilities":["capability",...]}`, }, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { // Ensure usage matches the expected value. usage := btcjson.TstFieldUsage(test.field, test.defValue) if usage != test.expected { t.Errorf("Test #%d (%s) mismatched usage - got %v, "+ "want %v", i, test.name, usage, test.expected) continue } } }
explode_data.jsonl/27520
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2215 }
[ 2830, 3393, 1877, 14783, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 39250, 262, 8708, 51445, 1877, 198, 197, 7452, 1130, 353, 34913, 6167, 198, 197, 42400, 914, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateQuoteRequest(t *testing.T) { t.Parallel() if !areTestAPIKeysSet() || !canManipulateRealOrders { t.Skip("skipping test, either api keys or canManipulateRealOrders isnt set correctly") } _, err := f.CreateQuoteRequest(context.Background(), currency.BTC, "call", order.Buy.Lower(), 1593140400, "", 10, 10, 5, 0, false) if err != nil { t.Error(err) } }
explode_data.jsonl/15206
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 141 }
[ 2830, 3393, 4021, 19466, 1900, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 743, 753, 546, 2271, 7082, 8850, 1649, 368, 1369, 753, 4814, 92876, 6334, 12768, 24898, 341, 197, 3244, 57776, 445, 4886, 5654, 1273, 11, 2987, 6330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_Dump(t *testing.T) { gtest.C(t, func(t *gtest.T) { gutil.Dump(map[int]int{ 100: 100, }) }) }
explode_data.jsonl/28547
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 64 }
[ 2830, 3393, 1557, 1510, 1155, 353, 8840, 836, 8, 341, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 3174, 1314, 909, 1510, 9147, 18640, 63025, 515, 298, 197, 16, 15, 15, 25, 220, 16, 15, 15, 345, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestValidQuarter(t *testing.T) { require.True(t, ValidQuarter(1)) require.True(t, ValidQuarter(2)) require.True(t, ValidQuarter(3)) require.True(t, ValidQuarter(4)) require.False(t, ValidQuarter(0)) require.False(t, ValidQuarter(5)) }
explode_data.jsonl/80465
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 99 }
[ 2830, 3393, 4088, 86608, 1155, 353, 8840, 836, 8, 341, 17957, 32443, 1155, 11, 7818, 86608, 7, 16, 1171, 17957, 32443, 1155, 11, 7818, 86608, 7, 17, 1171, 17957, 32443, 1155, 11, 7818, 86608, 7, 18, 1171, 17957, 32443, 1155, 11, 7818,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnmarshalRepeatingNonRepeatedExtension(t *testing.T) { // We may see multiple instances of the same extension in the wire // format. For example, the proto compiler may encode custom options in // this way. Here, we verify that we merge the extensions together. tests := []struct { name string ext []*pb.ComplexExtension }{ { "two fields", []*pb.ComplexExtension{ {First: proto.Int32(7)}, {Second: proto.Int32(11)}, }, }, { "repeated field", []*pb.ComplexExtension{ {Third: []int32{1000}}, {Third: []int32{2000}}, }, }, { "two fields and repeated field", []*pb.ComplexExtension{ {Third: []int32{1000}}, {First: proto.Int32(9)}, {Second: proto.Int32(21)}, {Third: []int32{2000}}, }, }, } for _, test := range tests { var buf bytes.Buffer var want pb.ComplexExtension // Generate a serialized representation of a repeated extension // by catenating bytes together. for i, e := range test.ext { // Merge to create the wanted proto. proto.Merge(&want, e) // serialize the message msg := new(pb.OtherMessage) err := proto.SetExtension(msg, pb.E_Complex, e) if err != nil { t.Fatalf("[%s] Error setting extension %d: %v", test.name, i, err) } b, err := proto.Marshal(msg) if err != nil { t.Fatalf("[%s] Error marshaling message %d: %v", test.name, i, err) } buf.Write(b) } // Unmarshal and read the merged proto. msg2 := new(pb.OtherMessage) err := proto.Unmarshal(buf.Bytes(), msg2) if err != nil { t.Fatalf("[%s] Error unmarshaling message: %v", test.name, err) } e, err := proto.GetExtension(msg2, pb.E_Complex) if err != nil { t.Fatalf("[%s] Error getting extension: %v", test.name, err) } ext := e.(*pb.ComplexExtension) if ext == nil { t.Fatalf("[%s] Invalid extension", test.name) } if !reflect.DeepEqual(*ext, want) { t.Errorf("[%s] Wrong value for ComplexExtension: got: %v want: %v\n", test.name, ext, &want) } } }
explode_data.jsonl/55068
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 850 }
[ 2830, 3393, 1806, 27121, 693, 64877, 8121, 90989, 12049, 1155, 353, 8840, 836, 8, 341, 197, 322, 1205, 1231, 1490, 5248, 13121, 315, 279, 1852, 8894, 304, 279, 9067, 198, 197, 322, 3561, 13, 1752, 3110, 11, 279, 18433, 19415, 1231, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestLoadOrgIDsFromCSVExtraParam(t *testing.T) { extraParamCSV := `OrgID 1,2 3 ` r := strings.NewReader(extraParamCSV) _, err := conf.LoadOrgIDsFromCSV(r) assert.EqualError(t, err, "error reading CSV file: record on line 2: wrong number of fields") }
explode_data.jsonl/61902
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 98 }
[ 2830, 3393, 5879, 42437, 30466, 3830, 44209, 11612, 2001, 1155, 353, 8840, 836, 8, 341, 8122, 2172, 2001, 44209, 1669, 1565, 42437, 915, 198, 16, 11, 17, 198, 18, 198, 3989, 7000, 1669, 9069, 68587, 83790, 2001, 44209, 340, 197, 6878, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEncodeDecodeWIF(t *testing.T) { priv1, _ := btcec.PrivKeyFromBytes(btcec.S256(), []byte{ 0x0c, 0x28, 0xfc, 0xa3, 0x86, 0xc7, 0xa2, 0x27, 0x60, 0x0b, 0x2f, 0xe5, 0x0b, 0x7c, 0xae, 0x11, 0xec, 0x86, 0xd3, 0xbf, 0x1f, 0xbe, 0x47, 0x1b, 0xe8, 0x98, 0x27, 0xe1, 0x9d, 0x72, 0xaa, 0x1d}) priv2, _ := btcec.PrivKeyFromBytes(btcec.S256(), []byte{ 0xdd, 0xa3, 0x5a, 0x14, 0x88, 0xfb, 0x97, 0xb6, 0xeb, 0x3f, 0xe6, 0xe9, 0xef, 0x2a, 0x25, 0x81, 0x4e, 0x39, 0x6f, 0xb5, 0xdc, 0x29, 0x5f, 0xe9, 0x94, 0xb9, 0x67, 0x89, 0xb2, 0x1a, 0x03, 0x98}) wif1, err := NewWIF(priv1, &btcnet.MainNetParams, false) if err != nil { t.Fatal(err) } wif2, err := NewWIF(priv2, &btcnet.TestNet3Params, true) if err != nil { t.Fatal(err) } tests := []struct { wif *WIF encoded string }{ { wif1, "5HueCGU8rMjxEXxiPuD5BDku4MkFqeZyd4dZ1jvhTVqvbTLvyTJ", }, { wif2, "cV1Y7ARUr9Yx7BR55nTdnR7ZXNJphZtCCMBTEZBJe1hXt2kB684q", }, } for _, test := range tests { // Test that encoding the WIF structure matches the expected string. s := test.wif.String() if s != test.encoded { t.Errorf("TestEncodeDecodePrivateKey failed: want '%s', got '%s'", test.encoded, s) continue } // Test that decoding the expected string results in the original WIF // structure. w, err := DecodeWIF(test.encoded) if err != nil { t.Error(err) continue } if got := w.String(); got != test.encoded { t.Errorf("NewWIF failed: want '%v', got '%v'", test.wif, got) } } }
explode_data.jsonl/27635
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 836 }
[ 2830, 3393, 32535, 32564, 54, 2773, 1155, 353, 8840, 836, 8, 341, 71170, 16, 11, 716, 1669, 19592, 68955, 17947, 344, 1592, 3830, 7078, 68417, 68955, 808, 17, 20, 21, 1507, 3056, 3782, 515, 197, 197, 15, 87, 15, 66, 11, 220, 15, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestParsePreAndPostConditions(t *testing.T) { t.Parallel() result, errs := ParseProgram(` fun test(n: Int) { pre { n != 0 n > 0 } post { result == 0 } return 0 } `) require.Empty(t, errs) utils.AssertEqualWithDiff(t, []ast.Declaration{ &ast.FunctionDeclaration{ Access: ast.AccessNotSpecified, Identifier: ast.Identifier{ Identifier: "test", Pos: ast.Position{Offset: 13, Line: 2, Column: 12}, }, ParameterList: &ast.ParameterList{ Parameters: []*ast.Parameter{ { Label: "", Identifier: ast.Identifier{ Identifier: "n", Pos: ast.Position{Offset: 18, Line: 2, Column: 17}, }, TypeAnnotation: &ast.TypeAnnotation{ IsResource: false, Type: &ast.NominalType{ Identifier: ast.Identifier{ Identifier: "Int", Pos: ast.Position{Offset: 21, Line: 2, Column: 20}, }, }, StartPos: ast.Position{Offset: 21, Line: 2, Column: 20}, }, Range: ast.Range{ StartPos: ast.Position{Offset: 18, Line: 2, Column: 17}, EndPos: ast.Position{Offset: 23, Line: 2, Column: 22}, }, }, }, Range: ast.Range{ StartPos: ast.Position{Offset: 17, Line: 2, Column: 16}, EndPos: ast.Position{Offset: 24, Line: 2, Column: 23}, }, }, ReturnTypeAnnotation: &ast.TypeAnnotation{ IsResource: false, Type: &ast.NominalType{ Identifier: ast.Identifier{ Identifier: "", Pos: ast.Position{Offset: 24, Line: 2, Column: 23}, }, }, StartPos: ast.Position{Offset: 24, Line: 2, Column: 23}, }, FunctionBlock: &ast.FunctionBlock{ Block: &ast.Block{ Statements: []ast.Statement{ &ast.ReturnStatement{ Expression: &ast.IntegerExpression{ Value: new(big.Int), Base: 10, Range: ast.Range{ StartPos: ast.Position{Offset: 185, Line: 10, Column: 19}, EndPos: ast.Position{Offset: 185, Line: 10, Column: 19}, }, }, Range: ast.Range{ StartPos: ast.Position{Offset: 178, Line: 10, Column: 12}, EndPos: ast.Position{Offset: 185, Line: 10, Column: 19}, }, }, }, Range: ast.Range{ StartPos: ast.Position{Offset: 26, Line: 2, Column: 25}, EndPos: ast.Position{Offset: 195, Line: 11, Column: 8}, }, }, PreConditions: &ast.Conditions{ { Kind: ast.ConditionKindPre, Test: &ast.BinaryExpression{ Operation: ast.OperationNotEqual, Left: &ast.IdentifierExpression{ Identifier: ast.Identifier{ Identifier: "n", Pos: ast.Position{Offset: 62, Line: 4, Column: 16}, }, }, Right: &ast.IntegerExpression{ Value: new(big.Int), Base: 10, Range: ast.Range{ StartPos: ast.Position{Offset: 67, Line: 4, Column: 21}, EndPos: ast.Position{Offset: 67, Line: 4, Column: 21}, }, }, }, }, { Kind: ast.ConditionKindPre, Test: &ast.BinaryExpression{ Operation: ast.OperationGreater, Left: &ast.IdentifierExpression{ Identifier: ast.Identifier{ Identifier: "n", Pos: ast.Position{Offset: 85, Line: 5, Column: 16}, }, }, Right: &ast.IntegerExpression{ Value: new(big.Int), Base: 10, Range: ast.Range{ StartPos: ast.Position{Offset: 89, Line: 5, Column: 20}, EndPos: ast.Position{Offset: 89, Line: 5, Column: 20}, }, }, }, }, }, PostConditions: &ast.Conditions{ { Kind: ast.ConditionKindPost, Test: &ast.BinaryExpression{ Operation: ast.OperationEqual, Left: &ast.IdentifierExpression{ Identifier: ast.Identifier{ Identifier: "result", Pos: ast.Position{Offset: 140, Line: 8, Column: 16}, }, }, Right: &ast.IntegerExpression{ Value: new(big.Int), Base: 10, Range: ast.Range{ StartPos: ast.Position{Offset: 150, Line: 8, Column: 26}, EndPos: ast.Position{Offset: 150, Line: 8, Column: 26}, }, }, }, }, }, }, StartPos: ast.Position{Offset: 9, Line: 2, Column: 8}, }, }, result.Declarations(), ) }
explode_data.jsonl/35976
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2442 }
[ 2830, 3393, 14463, 4703, 3036, 4133, 35435, 1155, 353, 8840, 836, 8, 1476, 3244, 41288, 7957, 2822, 9559, 11, 70817, 1669, 14775, 10690, 61528, 286, 2464, 1273, 1445, 25, 1333, 8, 341, 310, 855, 341, 394, 308, 961, 220, 15, 198, 394, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestJson(t *testing.T) { assert := assert.New(t) settings := testutil.Settings().WithSections().Build() expected, err := testutil.GetExpected("json", "json") assert.Nil(err) options := module.NewOptions() module, err := testutil.GetModule(options) assert.Nil(err) printer := NewJSON(settings) actual, err := printer.Print(module, settings) assert.Nil(err) assert.Equal(expected, actual) }
explode_data.jsonl/40843
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 143 }
[ 2830, 3393, 5014, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 62930, 1669, 1273, 1314, 27000, 1005, 2354, 38122, 1005, 11066, 2822, 42400, 11, 1848, 1669, 1273, 1314, 2234, 18896, 445, 2236, 497, 330, 2236, 1138, 69...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidate(t *testing.T) { tests := map[string]struct { options *Options args []string setStringFlags []stringFlag expErr bool }{ "If there are arguments, as well as label selector, error": { options: &Options{ LabelSelector: "foo=bar", }, args: []string{"abc"}, expErr: true, }, "If there are all certificates selected, as well as label selector, error": { options: &Options{ LabelSelector: "foo=bar", All: true, }, args: []string{""}, expErr: true, }, "If there are all certificates selected, as well as arguments, error": { options: &Options{ All: true, }, args: []string{"abc"}, expErr: true, }, "If all certificates in all namespaces selected, don't error": { options: &Options{ All: true, AllNamespaces: true, }, expErr: false, }, "If --namespace and --all namespace specified, error": { options: &Options{ All: true, }, setStringFlags: []stringFlag{ {name: "namespace", value: "foo"}, }, expErr: true, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { cmd := NewCmdRenew(context.TODO(), genericclioptions.IOStreams{}, nil) // This is normally registered in the main func. We add here to test // against flags normally inherited. kubeConfigFlags := genericclioptions.NewConfigFlags(true) kubeConfigFlags.AddFlags(cmd.PersistentFlags()) if test.setStringFlags != nil { for _, s := range test.setStringFlags { if err := cmd.PersistentFlags().Set(s.name, s.value); err != nil { t.Fatal(err) } } } err := test.options.Validate(cmd, test.args) if test.expErr != (err != nil) { t.Errorf("expected error=%t got=%v", test.expErr, err) } }) } }
explode_data.jsonl/61634
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 777 }
[ 2830, 3393, 17926, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 2415, 14032, 60, 1235, 341, 197, 35500, 286, 353, 3798, 198, 197, 31215, 1843, 3056, 917, 198, 197, 8196, 703, 9195, 3056, 917, 12135, 198, 197, 48558, 7747, 260, 1807, 198...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestTagComment(t *testing.T) { assert.NoError(t, PrepareEngine()) // FIXME: only support mysql if testEngine.Dialect().URI().DBType != schemas.MYSQL { return } type TestComment1 struct { Id int64 `xorm:"comment(主键)"` } assert.NoError(t, testEngine.Sync2(new(TestComment1))) tables, err := testEngine.DBMetas() assert.NoError(t, err) assert.EqualValues(t, 1, len(tables)) assert.EqualValues(t, 1, len(tables[0].Columns())) assert.EqualValues(t, "主键", tables[0].Columns()[0].Comment) assert.NoError(t, testEngine.DropTables(new(TestComment1))) type TestComment2 struct { Id int64 `xorm:"comment('主键')"` } assert.NoError(t, testEngine.Sync2(new(TestComment2))) tables, err = testEngine.DBMetas() assert.NoError(t, err) assert.EqualValues(t, 1, len(tables)) assert.EqualValues(t, 1, len(tables[0].Columns())) assert.EqualValues(t, "主键", tables[0].Columns()[0].Comment) }
explode_data.jsonl/19209
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 376 }
[ 2830, 3393, 5668, 10677, 1155, 353, 8840, 836, 8, 341, 6948, 35699, 1155, 11, 31166, 4571, 2398, 197, 322, 27475, 25, 1172, 1824, 10564, 198, 743, 1273, 4571, 909, 55056, 1005, 10301, 1005, 3506, 929, 961, 61800, 1321, 62364, 341, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestParseEmptySection(t *testing.T) { p := NewParser(nil) input := "[]\n" + "Label 1 += B" err := p.Parse(strings.NewReader(input)) if err == nil { t.Errorf("expected error") } actual := err.Error() expected := "1: empty section name" if actual != expected { t.Errorf("expected: %q, actual: %q", expected, actual) } }
explode_data.jsonl/49354
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 132 }
[ 2830, 3393, 14463, 3522, 9620, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 1532, 6570, 27907, 340, 22427, 1669, 330, 1294, 59, 77, 1, 3610, 197, 197, 1, 2476, 220, 16, 1421, 425, 1837, 9859, 1669, 281, 8937, 51442, 68587, 5384, 1171, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_SqlUserDefinedFunctionCreateUpdatePropertiesARM_WhenSerializedToJson_DeserializesAsEqual(t *testing.T) { t.Parallel() parameters := gopter.DefaultTestParameters() parameters.MaxSize = 10 properties := gopter.NewProperties(parameters) properties.Property( "Round trip of SqlUserDefinedFunctionCreateUpdatePropertiesARM via JSON returns original", prop.ForAll(RunJSONSerializationTestForSqlUserDefinedFunctionCreateUpdatePropertiesARM, SqlUserDefinedFunctionCreateUpdatePropertiesARMGenerator())) properties.TestingRun(t, gopter.NewFormatedReporter(true, 240, os.Stdout)) }
explode_data.jsonl/40570
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 178 }
[ 2830, 3393, 1098, 1470, 1474, 29361, 5152, 4021, 4289, 7903, 17911, 62, 4498, 77521, 78967, 98054, 2848, 4756, 2121, 2993, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 67543, 1669, 728, 73137, 13275, 2271, 9706, 741, 67543, 14535...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSubstitute(t *testing.T) { s := "$ONE $TWO ${{TWO}} ${{TWO:}} ${{TWO:3}} ${{TWO2:22}} ${{THREE:3}}" result, err := substitute(s, []string{"ONE=1", "TWO=2"}) if err != nil { t.Fatalf("failed substitutition: %s", err) } expected := "1 2 2 2 2 22 3" if result != expected { t.Fatalf("bad substitution result, expected %s got %s", expected, result) } // ${PRODUCT} is ok s = "$PRODUCT ${PRODUCT//x} ${{PRODUCT}}" result, err = substitute(s, []string{"PRODUCT=foo"}) if err != nil { t.Fatalf("failed substitution: %s", err) } expected = "foo ${PRODUCT//x} foo" if result != expected { t.Fatalf("bad substitution result, expected %s got %s", expected, result) } }
explode_data.jsonl/81212
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 276 }
[ 2830, 3393, 3136, 7660, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 5201, 5225, 400, 51, 22681, 400, 2979, 51, 22681, 3417, 400, 2979, 51, 22681, 25, 3417, 400, 2979, 51, 22681, 25, 18, 3417, 400, 2979, 51, 22681, 17, 25, 17, 17, 34...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestFromBytes(t *testing.T) { testCases := []struct { tag string jsonStr string expectErr bool }{ { tag: "valid json", jsonStr: `{"name":"John Doe", "age": 30}`, expectErr: false, }, { tag: "invalid json", jsonStr: `{"name":"John Doe", "age": 30, "oops"}`, expectErr: true, }, } for _, tc := range testCases { jn := FromBytes([]byte(tc.jsonStr)) if jn.Err != nil && !tc.expectErr { t.Errorf("Failed %s", tc.tag) } } }
explode_data.jsonl/10431
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 248 }
[ 2830, 3393, 3830, 7078, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 60439, 981, 914, 198, 197, 30847, 2580, 256, 914, 198, 197, 24952, 7747, 1807, 198, 197, 59403, 197, 197, 515, 298, 60439, 25, 981, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_bingo_start(t *testing.T) { ctx := context.Background() bgame, err := newBingoGame(utils.ReaderFromFile(t, filepath.Join("testdata", "input.txt"))) require.NoError(t, err) type args struct { ctx context.Context wr winRule } type expected struct { board *board num int } tests := []struct { name string args args expected expected }{ { name: "", args: args{ ctx: ctx, wr: rule(1), }, expected: expected{ board: &board{ id: 3, numbers: [5][5]number{ { number{val: 14, isMarked: true}, number{val: 21, isMarked: true}, number{val: 17, isMarked: true}, number{val: 24, isMarked: true}, number{val: 4, isMarked: true}, }, { number{val: 10}, number{val: 16}, number{val: 15}, number{val: 9, isMarked: true}, number{val: 19}, }, { number{val: 18}, number{val: 8}, number{val: 23, isMarked: true}, number{val: 26}, number{val: 20}, }, { number{val: 22}, number{val: 11, isMarked: true}, number{val: 13}, number{val: 6}, number{val: 5, isMarked: true}, }, { number{val: 2, isMarked: true}, number{val: 0, isMarked: true}, number{val: 12}, number{val: 3}, number{val: 7, isMarked: true}, }, }, state: state{ verticals: [boardSize]int{ 0: 2, 1: 3, 2: 2, 3: 2, 4: 3, }, horizontals: [boardSize]int{ 0: 5, 1: 1, 2: 1, 3: 2, 4: 3, }, }, }, num: 24, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { b := bgame gotBoard, gotNum := b.start(tt.args.ctx, tt.args.wr) equalBoards(t, tt.expected.board, gotBoard) assert.Equal(t, tt.expected.num, gotNum) }) } }
explode_data.jsonl/45721
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1075 }
[ 2830, 3393, 880, 27908, 4906, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 2822, 2233, 5804, 11, 1848, 1669, 501, 33, 27908, 4868, 64166, 47431, 43633, 1155, 11, 26054, 22363, 445, 92425, 497, 330, 1355, 3909, 29836, 17957, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSCSSWithIncludePaths(t *testing.T) { if !scss.Supports() { t.Skip("Skip SCSS") } assert := require.New(t) workDir, clean, err := createTempDir("hugo-scss-include") assert.NoError(err) defer clean() v := viper.New() v.Set("workingDir", workDir) b := newTestSitesBuilder(t).WithLogger(loggers.NewWarningLogger()) b.WithViper(v) b.WithWorkingDir(workDir) // Need to use OS fs for this. b.Fs = hugofs.NewDefault(v) fooDir := filepath.Join(workDir, "node_modules", "foo") scssDir := filepath.Join(workDir, "assets", "scss") assert.NoError(os.MkdirAll(fooDir, 0777)) assert.NoError(os.MkdirAll(filepath.Join(workDir, "content", "sect"), 0777)) assert.NoError(os.MkdirAll(filepath.Join(workDir, "data"), 0777)) assert.NoError(os.MkdirAll(filepath.Join(workDir, "i18n"), 0777)) assert.NoError(os.MkdirAll(filepath.Join(workDir, "layouts", "shortcodes"), 0777)) assert.NoError(os.MkdirAll(filepath.Join(workDir, "layouts", "_default"), 0777)) assert.NoError(os.MkdirAll(filepath.Join(scssDir), 0777)) b.WithSourceFile(filepath.Join(fooDir, "_moo.scss"), ` $moolor: #fff; moo { color: $moolor; } `) b.WithSourceFile(filepath.Join(scssDir, "main.scss"), ` @import "moo"; `) b.WithTemplatesAdded("index.html", ` {{ $cssOpts := (dict "includePaths" (slice "node_modules/foo" ) ) }} {{ $r := resources.Get "scss/main.scss" | toCSS $cssOpts | minify }} T1: {{ $r.Content }} `) b.Build(BuildCfg{}) b.AssertFileContent(filepath.Join(workDir, "public/index.html"), `T1: moo{color:#fff}`) }
explode_data.jsonl/79721
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 643 }
[ 2830, 3393, 3540, 1220, 2354, 22283, 26901, 1155, 353, 8840, 836, 8, 341, 743, 753, 63298, 79990, 82, 368, 341, 197, 3244, 57776, 445, 35134, 7531, 1220, 1138, 197, 532, 6948, 1669, 1373, 7121, 1155, 340, 97038, 6184, 11, 4240, 11, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNewGoRunnerPodForCR(t *testing.T) { tests := map[string]struct { engine chaosTypes.EngineInfo isErr bool }{ "Test Positive-1": { engine: chaosTypes.EngineInfo{ Instance: &v1alpha1.ChaosEngine{ ObjectMeta: metav1.ObjectMeta{ Name: "test-runner", Namespace: "test", }, Spec: v1alpha1.ChaosEngineSpec{ ChaosServiceAccount: "fake-serviceAccount", Components: v1alpha1.ComponentParams{ Runner: v1alpha1.RunnerInfo{ Image: "fake-runner-image", Command: []string{ "cmd1", "cmd2", }, }, }, }, }, AppExperiments: []string{"exp-1"}, }, isErr: false, }, "Test Positive-2": { engine: chaosTypes.EngineInfo{ Instance: &v1alpha1.ChaosEngine{ ObjectMeta: metav1.ObjectMeta{ Name: "test-runner", Namespace: "test", }, Spec: v1alpha1.ChaosEngineSpec{ ChaosServiceAccount: "fake-serviceAccount", Components: v1alpha1.ComponentParams{ Runner: v1alpha1.RunnerInfo{ Image: "fake-runner-image", ImagePullPolicy: "Always", Args: []string{ "args1", "args2", }, }, }, }, }, AppExperiments: []string{"exp-1"}, }, isErr: false, }, "Test Positive-3": { engine: chaosTypes.EngineInfo{ Instance: &v1alpha1.ChaosEngine{ ObjectMeta: metav1.ObjectMeta{ Name: "test-runner", Namespace: "test", }, Spec: v1alpha1.ChaosEngineSpec{ ChaosServiceAccount: "fake-serviceAccount", AnnotationCheck: "false", Components: v1alpha1.ComponentParams{ Runner: v1alpha1.RunnerInfo{ Image: "fake-runner-image", ImagePullPolicy: "IfNotPresent", Command: []string{ "cmd1", "cmd2", }, }, }, }, }, AppExperiments: []string{"exp-1"}, }, isErr: false, }, "Test Positive-4": { engine: chaosTypes.EngineInfo{ Instance: &v1alpha1.ChaosEngine{ ObjectMeta: metav1.ObjectMeta{ Name: "test-runner", Namespace: "test", }, Spec: v1alpha1.ChaosEngineSpec{ ChaosServiceAccount: "fake-serviceAccount", AnnotationCheck: "true", Components: v1alpha1.ComponentParams{ Runner: v1alpha1.RunnerInfo{ Image: "fake-runner-image", ImagePullPolicy: "Never", Args: []string{ "args1", "args2", }, }, }, }, }, AppExperiments: []string{"exp-1"}, }, isErr: false, }, "Test Negative-1": { engine: chaosTypes.EngineInfo{ Instance: &v1alpha1.ChaosEngine{ ObjectMeta: metav1.ObjectMeta{}, }, AppExperiments: []string{"exp-1"}, }, isErr: true, }, "Test Negative-2 ": { engine: chaosTypes.EngineInfo{ Instance: &v1alpha1.ChaosEngine{ ObjectMeta: metav1.ObjectMeta{ Name: "test-runner", Namespace: "test", }, Spec: v1alpha1.ChaosEngineSpec{ ChaosServiceAccount: "fake-serviceAccount", }, }, AppExperiments: []string{"exp-1"}, }, isErr: true, }, "Test Negative-3 ": { engine: chaosTypes.EngineInfo{ Instance: &v1alpha1.ChaosEngine{ ObjectMeta: metav1.ObjectMeta{ Name: "test-runner", Namespace: "test", }, Spec: v1alpha1.ChaosEngineSpec{ ChaosServiceAccount: "fake-serviceAccount", }, }, AppExperiments: []string{}, }, isErr: true, }, "Test Negative-4 ": { engine: chaosTypes.EngineInfo{ Instance: &v1alpha1.ChaosEngine{ ObjectMeta: metav1.ObjectMeta{ Name: "test-runner", Namespace: "test", }, Spec: v1alpha1.ChaosEngineSpec{ ChaosServiceAccount: "fake-serviceAccount", Components: v1alpha1.ComponentParams{ Runner: v1alpha1.RunnerInfo{ Image: "", }, }, }, }, AppExperiments: []string{}, }, isErr: true, }, } for name, mock := range tests { t.Run(name, func(t *testing.T) { _, err := newGoRunnerPodForCR(&mock.engine) if mock.isErr && err == nil { t.Fatalf("Test %q failed: expected error not to be nil", name) } if !mock.isErr && err != nil { t.Fatalf("Test %q failed: expected error to be nil", name) } }) } }
explode_data.jsonl/32129
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2214 }
[ 2830, 3393, 3564, 10850, 19486, 23527, 2461, 8973, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 2415, 14032, 60, 1235, 341, 197, 80118, 26915, 4173, 54424, 1731, 198, 197, 19907, 7747, 220, 1807, 198, 197, 59403, 197, 197, 1, 2271, 43903,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestFormatDate(t *testing.T) { testtype.SkipUnlessTestType(t, testtype.UnitTestType) Convey("will take valid format 2006-01-02T15:04:05.000Z", t, func() { _, err := FormatDate("2014-01-02T15:04:05.000Z") So(err, ShouldBeNil) }) Convey("will take valid format 2006-01-02T15:04:05Z", t, func() { _, err := FormatDate("2014-03-02T15:05:05Z") So(err, ShouldBeNil) }) Convey("will take valid format 2006-01-02T15:04Z", t, func() { _, err := FormatDate("2014-04-02T15:04Z") So(err, ShouldBeNil) }) Convey("will take valid format 2006-01-02T15:04-0700", t, func() { _, err := FormatDate("2014-04-02T15:04-0800") So(err, ShouldBeNil) }) Convey("will take valid format 2006-01-02T15:04:05.000-0700", t, func() { _, err := FormatDate("2014-04-02T15:04:05.000-0600") So(err, ShouldBeNil) }) Convey("will take valid format 2006-01-02T15:04:05-0700", t, func() { _, err := FormatDate("2014-04-02T15:04:05-0500") So(err, ShouldBeNil) }) Convey("will return an error for an invalid format", t, func() { _, err := FormatDate("invalid string format") So(err, ShouldNotBeNil) }) }
explode_data.jsonl/36080
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 486 }
[ 2830, 3393, 4061, 1916, 1155, 353, 8840, 836, 8, 341, 18185, 1313, 57776, 35587, 2271, 929, 1155, 11, 1273, 1313, 25159, 2271, 929, 692, 93070, 5617, 445, 14387, 1896, 2697, 3561, 220, 17, 15, 15, 21, 12, 15, 16, 12, 15, 17, 51, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIssue1656(t *testing.T) { skipUnlessOn(t, "amd64 only", "amd64") withTestProcess("issue1656/", t, func(p *proc.Target, fixture protest.Fixture) { setFileBreakpoint(p, t, filepath.ToSlash(filepath.Join(fixture.BuildDir, "main.s")), 5) assertNoError(p.Continue(), t, "Continue()") t.Logf("step1\n") assertNoError(p.Step(), t, "Step()") assertLineNumber(p, t, 8, "wrong line number after first step") t.Logf("step2\n") assertNoError(p.Step(), t, "Step()") assertLineNumber(p, t, 9, "wrong line number after second step") }) }
explode_data.jsonl/56336
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 220 }
[ 2830, 3393, 42006, 16, 21, 20, 21, 1155, 353, 8840, 836, 8, 341, 1903, 13389, 35587, 1925, 1155, 11, 330, 67913, 21, 19, 1172, 497, 330, 67913, 21, 19, 1138, 46948, 2271, 7423, 445, 11159, 16, 21, 20, 21, 28105, 259, 11, 2915, 129...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_defaultErrorHandler_XML(t *testing.T) { r := require.New(t) app := New(Options{}) app.GET("/", func(c Context) error { return c.Error(401, fmt.Errorf("boom")) }) w := httptest.New(app) res := w.XML("/").Get() r.Equal(401, res.Code) ct := res.Header().Get("content-type") r.Equal("text/xml", ct) b := res.Body.String() r.Contains(b, `<response code="401">`) r.Contains(b, `<error>boom</error>`) r.Contains(b, `<trace>`) r.Contains(b, `</trace>`) r.Contains(b, `</response>`) }
explode_data.jsonl/82182
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 233 }
[ 2830, 3393, 9993, 66673, 45617, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1373, 7121, 1155, 340, 28236, 1669, 1532, 7, 3798, 37790, 28236, 17410, 35460, 2915, 1337, 9608, 8, 1465, 341, 197, 853, 272, 6141, 7, 19, 15, 16, 11, 8879, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLabelUpdatePod(t *testing.T) { labels := map[string]string{ "app": "test-pod", } oldPodObj := createPod("test-pod", "test-namespace", "0", "1.2.3.4", labels, NonHostNetwork, corev1.PodRunning) calls := []testutils.TestCmd{ // add pod {Cmd: []string{"ipset", "-N", "-exist", util.GetHashedName("ns-test-namespace"), "nethash"}}, {Cmd: []string{"ipset", "-N", "-exist", util.GetHashedName("all-namespaces"), "setlist"}}, {Cmd: []string{"ipset", "-A", "-exist", util.GetHashedName("all-namespaces"), util.GetHashedName("ns-test-namespace")}}, {Cmd: []string{"ipset", "-A", "-exist", util.GetHashedName("ns-test-namespace"), "1.2.3.4"}}, {Cmd: []string{"ipset", "-N", "-exist", util.GetHashedName("app"), "nethash"}}, {Cmd: []string{"ipset", "-A", "-exist", util.GetHashedName("app"), "1.2.3.4"}}, {Cmd: []string{"ipset", "-N", "-exist", util.GetHashedName("app:test-pod"), "nethash"}}, {Cmd: []string{"ipset", "-A", "-exist", util.GetHashedName("app:test-pod"), "1.2.3.4"}}, {Cmd: []string{"ipset", "-N", "-exist", util.GetHashedName("namedport:app:test-pod"), "hash:ip,port"}}, {Cmd: []string{"ipset", "-A", "-exist", util.GetHashedName("namedport:app:test-pod"), "1.2.3.4,8080"}}, // update pod {Cmd: []string{"ipset", "-D", "-exist", util.GetHashedName("app:test-pod"), "1.2.3.4"}}, {Cmd: []string{"ipset", "-X", "-exist", util.GetHashedName("app:test-pod")}}, {Cmd: []string{"ipset", "-N", "-exist", util.GetHashedName("app:new-test-pod"), "nethash"}}, {Cmd: []string{"ipset", "-A", "-exist", util.GetHashedName("app:new-test-pod"), "1.2.3.4"}}, } fexec := testutils.GetFakeExecWithScripts(calls) defer testutils.VerifyCalls(t, fexec, calls) f := newFixture(t, fexec) f.podLister = append(f.podLister, oldPodObj) f.kubeobjects = append(f.kubeobjects, oldPodObj) stopCh := make(chan struct{}) defer close(stopCh) f.newPodController(stopCh) newPodObj := oldPodObj.DeepCopy() newPodObj.Labels = map[string]string{ "app": "new-test-pod", } // oldPodObj.ResourceVersion value is "0" newRV, _ := strconv.Atoi(oldPodObj.ResourceVersion) newPodObj.ResourceVersion = fmt.Sprintf("%d", newRV+1) updatePod(t, f, oldPodObj, newPodObj) testCases := []expectedValues{ {1, 1, 0}, } checkPodTestResult("TestLabelUpdatePod", f, testCases) checkNpmPodWithInput("TestLabelUpdatePod", f, newPodObj) }
explode_data.jsonl/35411
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 969 }
[ 2830, 3393, 2476, 4289, 23527, 1155, 353, 8840, 836, 8, 341, 95143, 1669, 2415, 14032, 30953, 515, 197, 197, 1, 676, 788, 330, 1944, 2268, 347, 756, 197, 532, 61828, 23527, 5261, 1669, 1855, 23527, 445, 1944, 2268, 347, 497, 330, 1944...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewPRMMatchExact(t *testing.T) { _prm := NewPRMMatchExact() prm, ok := _prm.(*prmMatchExact) require.True(t, ok) assert.Equal(t, &prmMatchExact{prmCommon{prmTypeMatchExact}}, prm) }
explode_data.jsonl/36511
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 90 }
[ 2830, 3393, 3564, 6480, 8035, 754, 57954, 1155, 353, 8840, 836, 8, 341, 197, 5294, 76, 1669, 1532, 6480, 8035, 754, 57954, 741, 25653, 76, 11, 5394, 1669, 716, 94043, 41399, 94043, 8331, 57954, 340, 17957, 32443, 1155, 11, 5394, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestComputeSingleByteXOR(t *testing.T) { tests := map[string]struct { s []byte c byte want []byte }{ "valid": { s: []byte{0x00, 0x00}, c: 0xff, want: []byte{0xff, 0xff}, }, "nil s": { s: nil, c: 0xff, want: []byte{}, }, "blank s": { s: []byte{}, c: 0xff, want: []byte{}, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { s := append(tt.s[:0:0], tt.s...) got := ComputeSingleByteXOR(s, tt.c) if !reflect.DeepEqual(got, tt.want) { t.Errorf("computeSingleByteXOR(%v, %v) = %v, want: %v", tt.s, tt.c, got, tt.want) } if !reflect.DeepEqual(s, tt.s) { t.Errorf("computeSingleByteXOR(%v, %v) changed input buffer to %v", tt.s, tt.c, s) } }) } }
explode_data.jsonl/42755
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 426 }
[ 2830, 3393, 46254, 10888, 7153, 55, 868, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 2415, 14032, 60, 1235, 341, 197, 1903, 262, 3056, 3782, 198, 197, 1444, 262, 4922, 198, 197, 50780, 3056, 3782, 198, 197, 59403, 197, 197, 1, 1891, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_AddressPresenter_NullifyJsonProps(t *testing.T) { const ADDRESS_TYPE = "http://ns.dasch.swiss/repository#Address" id, _ := valueobject.NewIdentifier() a := presenter.Address{ ID: id, Type: ADDRESS_TYPE, Street: "street", PostalCode: "0000", Locality: "city", Country: "country", Canton: "canton", Additional: "additional", CreatedAt: "2021-08-05 12:12:00 +0000 UTC", CreatedBy: "12345678-1234-1234-1234-123456789101", ChangedAt: "0001-01-01 00:00:00 +0000 UTC", ChangedBy: "00000000-0000-0000-0000-000000000000", DeletedAt: "0001-01-01 00:00:00 +0000 UTC", DeletedBy: "00000000-0000-0000-0000-000000000000", } a = a.NullifyJsonProps() assert.Equal(t, a.ID, id) assert.Equal(t, a.Type, ADDRESS_TYPE) assert.Equal(t, a.Street, "street") assert.Equal(t, a.PostalCode, "0000") assert.Equal(t, a.Locality, "city") assert.Equal(t, a.Country, "country") assert.Equal(t, a.Canton, "canton") assert.Equal(t, a.Additional, "additional") assert.Equal(t, a.CreatedAt, "2021-08-05 12:12:00 +0000 UTC") assert.Equal(t, a.CreatedBy, "12345678-1234-1234-1234-123456789101") assert.Equal(t, a.ChangedAt, "") assert.Equal(t, a.ChangedBy, "") assert.Equal(t, a.DeletedAt, "") assert.Equal(t, a.DeletedBy, "") }
explode_data.jsonl/46001
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 586 }
[ 2830, 3393, 64899, 33849, 55534, 1437, 5014, 5992, 1155, 353, 8840, 836, 8, 341, 4777, 64428, 4189, 284, 330, 1254, 1110, 4412, 950, 300, 331, 3064, 1038, 97548, 2, 4286, 698, 15710, 11, 716, 1669, 897, 1700, 7121, 8714, 2822, 11323, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRelativeToAbsolutePath_WhenGivenPathWithOnlyName(t *testing.T) { sshPath := "test-dir" absPath, err := common.RelativeToAbsolutePath(sshPath) currentPath, _ := filepath.Abs(".") assert.NoError(t, err) assert.Equal(t, path.Join(currentPath, sshPath), absPath) }
explode_data.jsonl/21755
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 93347, 39211, 62, 4498, 22043, 1820, 2354, 7308, 675, 1155, 353, 8840, 836, 8, 341, 197, 25537, 1820, 1669, 330, 1944, 45283, 1837, 197, 3435, 1820, 11, 1848, 1669, 4185, 63463, 1249, 39211, 7, 25537, 1820, 340, 20121, 1820,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateCity(t *testing.T) { ctx := context.Background() board := createTestBoard(ctx) url := fmt.Sprintf("/boards/%d/cities/", board.ID) city := app.CityForm{ Name: "Test City", Position: app.Position{ X: 10, Y: 20, }, } body, err := json.Marshal(&city) if err != nil { panic(err) } req := httptest.NewRequest("POST", url, bytes.NewReader(body)) req.Header.Set("Content-Type", "application/json; charset=utf-8") req.Header.Set("X-Requested-With", "XMLHttpRequest") req.Header.Set("Accept", "application/json") w := httptest.NewRecorder() router.ServeHTTP(w, req) if !httpassert.Success(t, w) { t.Log("Body:", w.Body) } httpassert.JsonContentType(t, w) }
explode_data.jsonl/12548
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 287 }
[ 2830, 3393, 4021, 12730, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 59868, 1669, 1855, 2271, 11932, 7502, 340, 19320, 1669, 8879, 17305, 4283, 19270, 12627, 67, 2899, 1361, 28105, 4479, 9910, 340, 1444, 487, 1669, 906, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestJobSpecsController_Create_CustomName(t *testing.T) { t.Parallel() rpcClient, gethClient, _, assertMocksCalled := cltest.NewEthMocksWithStartupAssertions(t) defer assertMocksCalled() app, cleanup := cltest.NewApplication(t, eth.NewClientWith(rpcClient, gethClient), ) defer cleanup() require.NoError(t, app.Start()) client := app.NewHTTPClient() fixtureBytes := cltest.MustReadFile(t, "testdata/hello_world_job.json") jsr := cltest.JSONFromBytes(t, fixtureBytes) jsr, err := jsr.MultiAdd(map[string]interface{}{"name": "CustomJobName"}) require.NoError(t, err) requestBody, err := json.Marshal(jsr) require.NoError(t, err) t.Run("it creates the job spec with the specified custom name", func(t *testing.T) { resp, cleanup := client.Post("/v2/specs", bytes.NewReader(requestBody)) defer cleanup() cltest.AssertServerResponse(t, resp, http.StatusOK) var j models.JobSpec err = cltest.ParseJSONAPIResponse(t, resp, &j) require.NoError(t, err) orm := app.GetStore().ORM j, err = orm.FindJobSpec(j.ID) require.NoError(t, err) assert.Equal(t, j.Name, "CustomJobName") }) }
explode_data.jsonl/31806
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 432 }
[ 2830, 3393, 12245, 8327, 82, 2051, 34325, 57402, 675, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 7000, 3992, 2959, 11, 633, 71, 2959, 11, 8358, 2060, 72577, 20960, 1669, 1185, 1944, 7121, 65390, 11571, 16056, 39076, 90206, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnitMoneyAdd(t *testing.T) { testCases := []struct { name string initialValue int64 addValue int64 wantOutput string wantError error }{ { name: "Adds money - negative input value", initialValue: 10, addValue: -10, wantOutput: "0", }, { name: "Adds money - both positive values", initialValue: 10, addValue: 30, wantOutput: "40", }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { m1, err := transaction.NewMoney(tc.initialValue) testkit.AssertIsNil(t, err) m2, err := transaction.NewMoney(tc.addValue) testkit.AssertIsNil(t, err) m3, err := m1.Add(m2) testkit.AssertEqual(t, tc.wantError, err) testkit.AssertEqual(t, tc.wantOutput, m3.String()) }) } }
explode_data.jsonl/18361
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 380 }
[ 2830, 3393, 4562, 24786, 2212, 1155, 353, 8840, 836, 8, 1476, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 260, 914, 198, 197, 85270, 1130, 526, 21, 19, 198, 197, 12718, 1130, 257, 526, 21, 19, 198, 197, 50780, 5097, 256, 914, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClient_UsersCurrent(t *testing.T) { t.Parallel() var err error var u *User record(t, "users/get_current_user", func(c *Client) { u, err = c.GetCurrentUser() }) if err != nil { t.Fatal(err) } t.Logf("%+v", u) }
explode_data.jsonl/26700
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 105 }
[ 2830, 3393, 2959, 62, 7137, 5405, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 2405, 1848, 1465, 198, 2405, 575, 353, 1474, 198, 71952, 1155, 11, 330, 4218, 23302, 11080, 3317, 497, 2915, 1337, 353, 2959, 8, 341, 197, 10676,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestScrapeLoop_RespectTimestamps(t *testing.T) { s := teststorage.New(t) defer s.Close() app, err := s.Appender() if err != nil { t.Error(err) } capp := &collectResultAppender{next: app} sl := newScrapeLoop(context.Background(), nil, nil, nil, nopMutator, nopMutator, func() storage.Appender { return capp }, nil, 0, true, ) now := time.Now() _, _, _, err = sl.append([]byte(`metric_a{a="1",b="1"} 1 0`), "", now) if err != nil { t.Fatalf("Unexpected append error: %s", err) } want := []sample{ { metric: labels.FromStrings("__name__", "metric_a", "a", "1", "b", "1"), t: 0, v: 1, }, } if !reflect.DeepEqual(want, capp.result) { t.Fatalf("Appended samples not as expected. Wanted: %+v Got: %+v", want, capp.result) } }
explode_data.jsonl/56139
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 360 }
[ 2830, 3393, 3326, 19842, 14620, 92815, 987, 20812, 82, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 1273, 16172, 7121, 1155, 340, 16867, 274, 10421, 2822, 28236, 11, 1848, 1669, 274, 5105, 1659, 741, 743, 1848, 961, 2092, 341, 197, 3244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLoopbackPreference(t *testing.T) { testPrefer(t, loopbackV4, loopbackV4, unspecV4) testPrefer(t, loopbackV6, loopbackV6, unspecV6) }
explode_data.jsonl/3800
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 62 }
[ 2830, 3393, 14620, 1419, 31173, 1155, 353, 8840, 836, 8, 341, 18185, 4703, 802, 1155, 11, 6337, 1419, 53, 19, 11, 6337, 1419, 53, 19, 11, 6975, 992, 53, 19, 340, 18185, 4703, 802, 1155, 11, 6337, 1419, 53, 21, 11, 6337, 1419, 53, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestInvalidInt(t *testing.T) { os.Setenv("INT", "should-be-an-int") defer os.Clearenv() cfg := Config{} assert.EqualError(t, Parse(&cfg), "env: parse error on field \"Int\" of type \"int\": strconv.ParseInt: parsing \"should-be-an-int\": invalid syntax") }
explode_data.jsonl/78761
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 99 }
[ 2830, 3393, 7928, 1072, 1155, 353, 8840, 836, 8, 341, 25078, 4202, 3160, 445, 3221, 497, 330, 5445, 15150, 18883, 20052, 1138, 16867, 2643, 727, 273, 9151, 85, 2822, 50286, 1669, 5532, 16094, 6948, 12808, 1454, 1155, 11, 14775, 2099, 14...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClient_CreateBridge(t *testing.T) { t.Parallel() app, cleanup := cltest.NewApplication(t, cltest.EthMockRegisterChainID) defer cleanup() require.NoError(t, app.Start()) client, _ := app.NewClientAndRenderer() tests := []struct { name string param string errored bool }{ {"EmptyString", "", true}, {"ValidString", `{ "name": "TestBridge", "url": "http://localhost:3000/randomNumber" }`, false}, {"InvalidString", `{ "noname": "", "nourl": "" }`, true}, {"InvalidChar", `{ "badname": "path/bridge", "nourl": "" }`, true}, {"ValidPath", "testdata/create_random_number_bridge_type.json", false}, {"InvalidPath", "bad/filepath/", true}, } for _, tt := range tests { test := tt t.Run(test.name, func(t *testing.T) { set := flag.NewFlagSet("bridge", 0) set.Parse([]string{test.param}) c := cli.NewContext(nil, set, nil) if test.errored { assert.Error(t, client.CreateBridge(c)) } else { assert.Nil(t, client.CreateBridge(c)) } }) } }
explode_data.jsonl/78847
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 407 }
[ 2830, 3393, 2959, 34325, 32848, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 28236, 11, 21290, 1669, 1185, 1944, 7121, 4988, 1155, 11, 1185, 1944, 5142, 339, 11571, 8690, 18837, 915, 340, 16867, 21290, 741, 17957, 35699, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetAggregatedTrades(t *testing.T) { t.Parallel() _, err := b.GetAggregatedTrades(context.Background(), &AggregatedTradeRequestParams{ Symbol: currency.NewPair(currency.BTC, currency.USDT), Limit: 5, }) if err != nil { t.Error("Binance GetAggregatedTrades() error", err) } }
explode_data.jsonl/76646
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 121 }
[ 2830, 3393, 1949, 9042, 93040, 1282, 3452, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 6878, 1848, 1669, 293, 2234, 9042, 93040, 1282, 3452, 5378, 19047, 3148, 197, 197, 5, 9042, 93040, 39173, 1900, 4870, 515, 298, 7568, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestController_preparePullRequestDescription(t *testing.T) { c := &Controller{} components := map[types.Component][]string{ types.ComponentDashboard: {"1", "2", "3"}, } title, body := c.preparePullRequestDescription("test-team", "patch-string", "test/file1.yml", "bodyExtra", components) expectedTitle := "[Automated PR] Update datadog component files owned by [test-team] - test/file1.yml" expectedBody := "Modified component files have been detected and a new PR has been created\n\n" expectedBody += "The following components are different from master branch:\npatch-string\n\n" expectedBody += "\n\nbodyExtra" if title != expectedTitle { t.Fatalf("expect title %s .Got %s", expectedTitle, title) } if body != expectedBody { t.Fatalf("expect body %s .Got %s", expectedBody, body) } components = map[types.Component][]string{ types.ComponentDashboard: {"1"}, } title, body = c.preparePullRequestDescription("test-team", "patch-string", "test/file1.yml", "", components) expectedTitle = "[Automated PR] Update datadog component files owned by [test-team] - test/file1.yml dashboard 1" expectedBody = "Modified component files have been detected and a new PR has been created\n\n" expectedBody += "The following components are different from master branch:\npatch-string\n\n" expectedBody += ":warning: **Closing this PR will revert all changes made in datadog!!!**" if title != expectedTitle { t.Fatalf("expect title %s .Got %s", expectedTitle, title) } if body != expectedBody { t.Fatalf("expect body %s .Got %s", expectedBody, body) } }
explode_data.jsonl/31995
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 503 }
[ 2830, 3393, 2051, 47460, 36068, 1900, 5009, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 609, 2051, 31483, 197, 5149, 1669, 2415, 58, 9242, 5119, 45725, 917, 515, 197, 98785, 5119, 26947, 25, 5212, 16, 497, 330, 17, 497, 330, 18, 7115, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestTermString(t *testing.T) { assertToString(t, Null{}, "null") assertToString(t, Boolean(true), "true") assertToString(t, Boolean(false), "false") assertToString(t, Number("4"), "4") assertToString(t, Number("42.1"), "42.1") assertToString(t, Number("6e7"), "6e7") assertToString(t, UIntNumberTerm(uint64(1)).Value, "1") assertToString(t, String("foo"), "\"foo\"") assertToString(t, String("\"foo\""), "\"\\\"foo\\\"\"") assertToString(t, String("foo bar"), "\"foo bar\"") assertToString(t, Var("foo"), "foo") assertToString(t, RefTerm(VarTerm("foo"), StringTerm("bar")).Value, "foo.bar") assertToString(t, RefTerm(VarTerm("foo"), StringTerm("bar"), VarTerm("i"), IntNumberTerm(0), StringTerm("baz")).Value, "foo.bar[i][0].baz") assertToString(t, RefTerm(VarTerm("foo"), BooleanTerm(false), NullTerm(), StringTerm("bar")).Value, "foo[false][null].bar") assertToString(t, RefTerm(VarTerm("p"), StringTerm("not")).Value, `p["not"]`) assertToString(t, RefTerm(CallTerm(VarTerm("f"), VarTerm("x")), IntNumberTerm(0)).Value, "f(x)[0]") assertToString(t, RefTerm(ArrayTerm(StringTerm("a"), StringTerm("b")), IntNumberTerm(0)).Value, "[\"a\", \"b\"][0]") assertToString(t, ArrayTerm().Value, "[]") assertToString(t, ObjectTerm().Value, "{}") assertToString(t, SetTerm().Value, "set()") assertToString(t, ArrayTerm(ObjectTerm(Item(VarTerm("foo"), ArrayTerm(RefTerm(VarTerm("bar"), VarTerm("i"))))), StringTerm("foo"), SetTerm(BooleanTerm(true), NullTerm()), FloatNumberTerm(42.1)).Value, "[{foo: [bar[i]]}, \"foo\", {null, true}, 42.1]") assertToString(t, ArrayComprehensionTerm(ArrayTerm(VarTerm("x")), NewBody(&Expr{Terms: RefTerm(VarTerm("a"), VarTerm("i"))})).Value, `[[x] | a[i]]`) assertToString(t, ObjectComprehensionTerm(VarTerm("y"), ArrayTerm(VarTerm("x")), NewBody(&Expr{Terms: RefTerm(VarTerm("a"), VarTerm("i"))})).Value, `{y: [x] | a[i]}`) assertToString(t, SetComprehensionTerm(ArrayTerm(VarTerm("x")), NewBody(&Expr{Terms: RefTerm(VarTerm("a"), VarTerm("i"))})).Value, `{[x] | a[i]}`) // ensure that objects and sets have deterministic String() results assertToString(t, SetTerm(VarTerm("y"), VarTerm("x")).Value, "{x, y}") assertToString(t, ObjectTerm([2]*Term{VarTerm("y"), VarTerm("b")}, [2]*Term{VarTerm("x"), VarTerm("a")}).Value, "{x: a, y: b}") }
explode_data.jsonl/2917
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 876 }
[ 2830, 3393, 17249, 703, 1155, 353, 8840, 836, 8, 341, 6948, 5870, 1155, 11, 18084, 22655, 330, 2921, 1138, 6948, 5870, 1155, 11, 6992, 3715, 701, 330, 1866, 1138, 6948, 5870, 1155, 11, 6992, 3576, 701, 330, 3849, 1138, 6948, 5870, 115...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMinInterface(t *testing.T) { in := []interface{}{ 2, 3, 2.22, uint8(8), int64(2), 2.0, } out, err := Min(in) assert.NoError(t, err) assert.Equal(t, 2, out) }
explode_data.jsonl/9597
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 99 }
[ 2830, 3393, 6217, 5051, 1155, 353, 8840, 836, 8, 341, 17430, 1669, 3056, 4970, 67066, 197, 197, 17, 345, 197, 197, 18, 345, 197, 197, 17, 13, 17, 17, 345, 197, 8254, 23, 7, 23, 1326, 197, 2084, 21, 19, 7, 17, 1326, 197, 197, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDedupper(t *testing.T) { parse := func(s string) []zoekt.FileMatch { t.Helper() var fms []zoekt.FileMatch for _, t := range strings.Split(s, " ") { if t == "" { continue } parts := strings.Split(t, ":") fms = append(fms, zoekt.FileMatch{ Repository: parts[0], FileName: parts[1], }) } return fms } cases := []struct { name string matches []string want string }{{ name: "empty", matches: []string{ "", }, want: "", }, { name: "one", matches: []string{ "r1:a r1:a r1:b r2:a", }, want: "r1:a r1:a r1:b r2:a", }, { name: "some dups", matches: []string{ "r1:a r1:a r1:b r2:a", "r1:c r1:c r3:a", }, want: "r1:a r1:a r1:b r2:a r3:a", }, { name: "no dups", matches: []string{ "r1:a r1:a r1:b r2:a", "r4:c r4:c r5:a", }, want: "r1:a r1:a r1:b r2:a r4:c r4:c r5:a", }, { name: "shuffled", matches: []string{ "r1:a r2:a r1:a r1:b", "r1:c r3:a r1:c", }, want: "r1:a r2:a r1:a r1:b r3:a", }} for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { d := dedupper{} var got []zoekt.FileMatch for _, s := range tc.matches { fms := parse(s) got = append(got, d.Dedup(fms)...) } want := parse(tc.want) if !cmp.Equal(want, got, cmpopts.EquateEmpty()) { t.Errorf("mismatch (-want +got):\n%s", cmp.Diff(want, got)) } }) } }
explode_data.jsonl/61638
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 787 }
[ 2830, 3393, 35, 291, 13574, 1155, 353, 8840, 836, 8, 341, 75115, 1669, 2915, 1141, 914, 8, 3056, 12738, 17149, 8576, 8331, 341, 197, 3244, 69282, 741, 197, 2405, 282, 1011, 3056, 12738, 17149, 8576, 8331, 198, 197, 2023, 8358, 259, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSoundex(t *testing.T) { for _, row := range soundex_testdata { res := Soundex(row[0]) if res != row[1] { t.Errorf("Soundex(%q) => %q, expected %q", row[0], res, row[1]) } } }
explode_data.jsonl/58063
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 91 }
[ 2830, 3393, 16103, 327, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 2802, 1669, 2088, 5112, 327, 4452, 691, 341, 197, 10202, 1669, 14594, 327, 7835, 58, 15, 9604, 197, 743, 592, 961, 2802, 58, 16, 60, 341, 298, 3244, 13080, 445, 16103...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestParenQueries(t *testing.T) { jsonStr := `{ "friends": [{"a":10},{"a":20},{"a":30},{"a":40}] }` assert(t, Get(jsonStr, "friends.#(a>9)#|#").Int() == 4) assert(t, Get(jsonStr, "friends.#(a>10)#|#").Int() == 3) assert(t, Get(jsonStr, "friends.#(a>40)#|#").Int() == 0) }
explode_data.jsonl/43467
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 136 }
[ 2830, 3393, 83997, 55261, 1155, 353, 8840, 836, 8, 341, 30847, 2580, 1669, 1565, 515, 197, 197, 1, 29462, 788, 61753, 64, 788, 16, 15, 36828, 64, 788, 17, 15, 36828, 64, 788, 18, 15, 36828, 64, 788, 19, 15, 57320, 197, 31257, 6948...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewGetStateBuilder(t *testing.T) { assert := assert.New(t) pubnub.Config.UUID = "my-custom-uuid" o := newGetStateBuilder(pubnub) o.Channels([]string{"ch"}) o.ChannelGroups([]string{"cg"}) path, err := o.opts.buildPath() assert.Nil(err) u := &url.URL{ Path: path, } h.AssertPathsEqual(t, "/v2/presence/sub-key/sub_key/channel/ch/uuid/my-custom-uuid", u.EscapedPath(), []int{}) query, err := o.opts.buildQuery() assert.Nil(err) expected := &url.Values{} expected.Set("channel-group", "cg") h.AssertQueriesEqual(t, expected, query, []string{"pnsdk", "uuid"}, []string{}) body, err := o.opts.buildBody() assert.Nil(err) assert.Equal([]byte{}, body) }
explode_data.jsonl/32495
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 302 }
[ 2830, 3393, 3564, 1949, 1397, 3297, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 62529, 77, 392, 10753, 39636, 284, 330, 2408, 36898, 12, 17128, 1837, 22229, 1669, 501, 1949, 1397, 3297, 74186, 77, 392, 340, 22229, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContainsCondition(t *testing.T) { cases := []struct { name string input ConditionBuilder expectedNode exprNode err condErrorMode }{ { name: "basic contains", input: Name("foo").Contains("bar"), expectedNode: exprNode{ children: []exprNode{ { names: []string{"foo"}, fmtExpr: "$n", }, { values: []dynamodb.AttributeValue{ { S: aws.String("bar"), }, }, fmtExpr: "$v", }, }, fmtExpr: "contains ($c, $c)", }, }, { name: "contains invalid operand", input: Name("").Contains("bar"), err: invalidConditionOperand, }, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { actual, err := c.input.buildTree() if c.err != noConditionError { if err == nil { t.Errorf("expect error %q, got no error", c.err) } else { if e, a := string(c.err), err.Error(); !strings.Contains(a, e) { t.Errorf("expect %q error message to be in %q", e, a) } } } else { if err != nil { t.Errorf("expect no error, got unexpected Error %q", err) } if e, a := c.expectedNode, actual; !reflect.DeepEqual(a, e) { t.Errorf("expect %v, got %v", e, a) } } }) } }
explode_data.jsonl/27681
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 639 }
[ 2830, 3393, 23805, 10547, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 260, 914, 198, 197, 22427, 286, 15180, 3297, 198, 197, 42400, 1955, 15169, 1955, 198, 197, 9859, 688, 9756, 1454, 3636, 198, 197, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestHandlers_FetchSyncDataHasData(t *testing.T) { testName := syncutil.GetCallingName() testhelper.StartTest(testName) server = readyFetchSyncDataHasData() var reader io.Reader request, err := http.NewRequest("GET", server.URL+"/fetchData/sessionId/F32A9BB9-7691-4F20-B337-55414E45B1D1/nodeId/9702F991-F1E7-4186-A97B-8B804F723F87/orderNum/1/changeType/AddOrUpdate", reader) res, err := http.DefaultClient.Do(request) if err != nil { syncutil.Fatal(err) t.Error(err.Error()) } if res.StatusCode != http.StatusOK { t.Error("Result incorrect. Actual Response=" + res.Status + ". Using url='" + server.URL + "'") return } responseBytes, err := ioutil.ReadAll(res.Body) if err != nil { syncutil.Fatal("Error: " + err.Error()) t.Error(err.Error()) } responseData := &syncmsg.ProtoRequestSyncEntityMessageResponse{} err = proto.Unmarshal(responseBytes, responseData) if err != nil { t.Errorf("Error decoding response: %s", err.Error()) } //syncutil.Info("responseData:", responseData) expectedResult := syncmsg.SyncRequestEntityMessageResponseResult_HasMsgs.String() if responseData.Result.String() != expectedResult { t.Errorf("Result should be: %v. Not: %v", expectedResult, responseData.Result) //return } // TODO(doug4j@gmail.com): Add more test validation testhelper.EndTest(testName) }
explode_data.jsonl/50007
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 498 }
[ 2830, 3393, 39949, 1400, 2995, 12154, 1043, 10281, 1043, 1155, 353, 8840, 836, 8, 341, 18185, 675, 1669, 12811, 1314, 2234, 48853, 675, 741, 18185, 18764, 12101, 2271, 8623, 675, 692, 41057, 284, 5527, 20714, 12154, 1043, 10281, 1043, 741...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestMuxWildcardRoute(t *testing.T) { handler := func(w http.ResponseWriter, r *http.Request) {} defer func() { if recover() == nil { t.Error("expected panic()") } }() r := NewRouter() r.Get("/*/wildcard/must/be/at/end", handler) }
explode_data.jsonl/42886
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 101 }
[ 2830, 3393, 44, 2200, 92988, 4899, 1155, 353, 8840, 836, 8, 341, 53326, 1669, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 10086, 16867, 2915, 368, 341, 197, 743, 11731, 368, 621, 2092, 341, 298, 3244, 6141, 445, 7325, 21975,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFileAndSymlinkStats(t *testing.T) { // TODO: revert to upstream test once symlinks and t.TempDir are implemented tmpdir := os.TempDir() file := filepath.Join(tmpdir, "file") if err := os.WriteFile(file, []byte("abcdefg"), 0644); err != nil { t.Fatal(err) return } testFileStats(t, file) }
explode_data.jsonl/53390
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 116 }
[ 2830, 3393, 1703, 3036, 34667, 44243, 16635, 1155, 353, 8840, 836, 8, 341, 197, 322, 5343, 25, 41128, 311, 41730, 1273, 3055, 6568, 1014, 15504, 323, 259, 65009, 6184, 525, 11537, 198, 20082, 3741, 1669, 2643, 65009, 6184, 741, 17661, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestBigRandom(t *testing.T) { rTest(t, 15) rTest(t, 100) rTest(t, 512) rTest(t, 1023) rTest(t, 1025) rTest(t, 4095) rTest(t, 4096) rTest(t, 4097) rTest(t, 65536) rTest(t, 65536*16) }
explode_data.jsonl/20342
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 112 }
[ 2830, 3393, 15636, 13999, 1155, 353, 8840, 836, 8, 341, 7000, 2271, 1155, 11, 220, 16, 20, 340, 7000, 2271, 1155, 11, 220, 16, 15, 15, 340, 7000, 2271, 1155, 11, 220, 20, 16, 17, 340, 7000, 2271, 1155, 11, 220, 16, 15, 17, 18, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDate(t *testing.T) { type args struct { t time.Time layout string } tests := []struct { name string args args want string }{ { "", args{ time.Date(2012, 11, 22, 21, 28, 10, 0, time.Local), "Y-m-d H:i:s", }, "2012-11-22 21:28:10", }, { "", args{ time.Date(2012, 11, 22, 0, 0, 0, 0, time.Local), "Y-m-d", }, "2012-11-22", }, { "", args{ time.Date(2012, 11, 22, 21, 28, 10, 0, time.Local), "Y-m-d H:i:s", }, "2012-11-22 21:28:10", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := Date(tt.args.t, tt.args.layout); got != tt.want { t.Errorf("Date() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/45310
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 411 }
[ 2830, 3393, 1916, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 3244, 414, 882, 16299, 198, 197, 61104, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197, 50780, 914, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestListPools(t *testing.T) { var fd = &Driver{} config.CONF.OsdsDock.Backends.NFS.ConfigPath = "testdata/nfs.yaml" fd.Setup() var vgsResp = `opensds-files-default 20.00 20.00 WSpJ3r-JYVF-DYNq-1rCe-5I6j-Zb3d-8Ub0Hg opensds-volumes-default 20.00 20.00 t7mLWW-AeCf-LtuF-7K8p-R4xA-QC5x-61qx3H` respMap := map[string]*FakeResp{ "vgs": {vgsResp, nil}, } fd.cli.RootExecuter = NewFakeExecuter(respMap) fd.cli.BaseExecuter = NewFakeExecuter(respMap) var expected = []*model.StoragePoolSpec{ { BaseModel: &model.BaseModel{}, Name: "opensds-files-default", TotalCapacity: int64(20), FreeCapacity: int64(20), AvailabilityZone: "default", StorageType: "file", MultiAttach: false, Extras: model.StoragePoolExtraSpec{ DataStorage: model.DataStorageLoS{ ProvisioningPolicy: "Thin", Compression: false, Deduplication: false, StorageAccessCapability: []string{"Read", "Write", "Execute"}, }, IOConnectivity: model.IOConnectivityLoS{ AccessProtocol: "nfs", MaxIOPS: 7000000, MaxBWS: 600, MinIOPS: 1000000, MinBWS: 100, Latency: 100, }, Advanced: map[string]interface{}{ "diskType": "SSD", "latency": "5ms", }, }, }, } pols, err := fd.ListPools() if err != nil { t.Error("Failed to list pools:", err) } for i := range pols { pols[i].Id = "" } if !reflect.DeepEqual(pols, expected) { t.Errorf("Expected %+v, got %+v\n", expected[0], pols[0]) } }
explode_data.jsonl/39372
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 793 }
[ 2830, 3393, 852, 47, 6178, 1155, 353, 8840, 836, 8, 341, 2405, 12414, 284, 609, 11349, 16094, 25873, 30197, 37, 8382, 82, 5356, 41468, 8864, 1412, 2067, 8485, 10753, 1820, 284, 330, 92425, 9612, 3848, 33406, 698, 61721, 39820, 2822, 240...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestParseStmtsStopEarly(t *testing.T) { in := []string{"a\n", "b &\n", "c\n"} p := NewParser() cr := &chunkedReader{in, make(chan bool, 10)} recv := make(chan bool, 10) errc := make(chan error) go func() { errc <- p.Stmts(cr, func(s *Stmt) bool { recv <- true return !s.Background }) }() cr.cont <- true <-recv cr.cont <- true <-recv cr.cont <- true if err := <-errc; err != nil { t.Fatalf("Expected no error in %q: %v", in, err) } }
explode_data.jsonl/31442
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 215 }
[ 2830, 3393, 14463, 31063, 82, 10674, 41198, 1155, 353, 8840, 836, 8, 341, 17430, 1669, 3056, 917, 4913, 64, 1699, 497, 330, 65, 609, 59, 77, 497, 330, 66, 1699, 16707, 3223, 1669, 1532, 6570, 741, 91492, 1669, 609, 25979, 17120, 90, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_sqlGetManifestData(t *testing.T) { var query resources.CollectionQuery var testdata string t.Log("Test 1: get an error for no collection id") if _, err := sqlGetManifestData(query); err == nil { t.Error("no error returned") } t.Log("Test 2: get correct sql statement for manifest data") query.CollectionID = "aa" testdata = `SELECT t_collection_data.date_added, t_collection_data.stix_id, group_concat(s_base_object.modified), group_concat(s_base_object.spec_version) FROM t_collection_data JOIN s_base_object ON t_collection_data.stix_id = s_base_object.id WHERE t_collection_data.collection_id = "aa" GROUP BY t_collection_data.date_added` if v, _ := sqlGetManifestData(query); testdata != v { t.Error("sql statement is not correct") } }
explode_data.jsonl/19847
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 259 }
[ 2830, 3393, 18063, 1949, 38495, 1043, 1155, 353, 8840, 836, 8, 341, 2405, 3239, 4963, 28629, 2859, 198, 2405, 1273, 691, 914, 271, 3244, 5247, 445, 2271, 220, 16, 25, 633, 458, 1465, 369, 902, 4426, 877, 1138, 743, 8358, 1848, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMarshalJsonCdlProof(t *testing.T) { test := CdlProof{ u: []*big.Int{ bi(31), bi(38), bi(77), bi(43), bi(27), bi(42), bi(56), bi(36), bi(45), bi(20), bi(81), bi(31), bi(69), bi(36), bi(35), bi(47), bi(44), bi(95), bi(95), bi(79), bi(90), bi(67), bi(90), bi(95), bi(96), bi(73), bi(100), bi(17), bi(26), bi(56), bi(96), bi(66), bi(63), bi(31), bi(12), bi(91), bi(96), bi(83), bi(89), bi(34), bi(91), bi(54), bi(66), bi(46), bi(19), bi(50), bi(30), bi(32), bi(74), bi(17), bi(98), bi(98), bi(56), bi(33), bi(57), bi(55), bi(98), bi(73), bi(31), bi(95), bi(31), bi(26), bi(45), bi(42), bi(59), bi(70), bi(30), bi(10), bi(58), bi(59), bi(47), bi(35), bi(35), bi(74), bi(73), bi(74), bi(38), bi(95), bi(99), bi(12), bi(34), bi(40), bi(64), bi(22), bi(30), bi(31), bi(81), bi(56), bi(20), bi(58), bi(26), bi(16), bi(31), bi(83), bi(29), bi(13), bi(20), bi(29), bi(37), bi(92), bi(10), bi(12), bi(24), bi(15), bi(34), bi(79), bi(54), bi(59), bi(40), bi(18), bi(88), bi(70), bi(10), bi(81), bi(11), bi(13), bi(81), bi(10), bi(66), bi(15), bi(23), bi(39), bi(92), bi(32), bi(78), bi(96), bi(72), bi(23), }, s: []*big.Int{ bi(58), bi(85), bi(46), bi(71), bi(67), bi(64), bi(15), bi(98), bi(47), bi(22), bi(82), bi(26), bi(40), bi(76), bi(74), bi(15), bi(66), bi(88), bi(77), bi(85), bi(69), bi(18), bi(56), bi(58), bi(16), bi(21), bi(46), bi(61), bi(97), bi(42), bi(85), bi(83), bi(21), bi(69), bi(75), bi(22), bi(97), bi(41), bi(33), bi(75), bi(21), bi(11), bi(15), bi(59), bi(12), bi(21), bi(45), bi(63), bi(94), bi(79), bi(47), bi(92), bi(11), bi(27), bi(77), bi(63), bi(50), bi(25), bi(100), bi(25), bi(48), bi(70), bi(98), bi(26), bi(99), bi(75), bi(23), bi(61), bi(15), bi(23), bi(36), bi(86), bi(73), bi(71), bi(88), bi(38), bi(91), bi(89), bi(65), bi(68), bi(78), bi(23), bi(73), bi(84), bi(71), bi(90), bi(17), bi(13), bi(15), bi(40), bi(49), bi(79), bi(45), bi(40), bi(84), bi(71), bi(50), bi(40), bi(16), bi(94), bi(88), bi(95), bi(77), bi(87), bi(49), bi(62), bi(38), bi(28), bi(20), bi(68), bi(31), bi(87), bi(98), bi(67), bi(91), bi(62), bi(60), bi(91), bi(71), bi(25), bi(44), bi(72), bi(43), bi(19), bi(65), bi(24), bi(16), bi(47), }, } testJSON, err := json.Marshal(test) require.NoError(t, err) require.NotNil(t, testJSON) unmarshaled := new(CdlProof) err = json.Unmarshal(testJSON, unmarshaled) require.NoError(t, err) require.Equal(t, test.u, unmarshaled.u) require.Equal(t, test.s, unmarshaled.s) }
explode_data.jsonl/25753
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1410 }
[ 2830, 3393, 55438, 5014, 34, 8736, 31076, 1155, 353, 8840, 836, 8, 341, 18185, 1669, 356, 8736, 31076, 515, 197, 10676, 25, 29838, 16154, 7371, 515, 298, 2233, 72, 7, 18, 16, 701, 6032, 7, 18, 23, 701, 6032, 7, 22, 22, 701, 6032, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetTaskRunTimeout(t *testing.T) { prName := "pipelinerun-timeouts" ns := "foo" p := "pipeline" tcs := []struct { name string timeoutDuration *metav1.Duration timeoutFields *v1beta1.TimeoutFields startTime time.Time rprt *resources.ResolvedPipelineRunTask expected *metav1.Duration }{{ name: "nil timeout duration", startTime: now, rprt: &resources.ResolvedPipelineRunTask{ PipelineTask: &v1beta1.PipelineTask{ Timeout: nil, }, }, expected: &metav1.Duration{Duration: 60 * time.Minute}, }, { name: "timeout specified in pr", timeoutDuration: &metav1.Duration{Duration: 20 * time.Minute}, startTime: now, rprt: &resources.ResolvedPipelineRunTask{ PipelineTask: &v1beta1.PipelineTask{ Timeout: nil, }, }, expected: &metav1.Duration{Duration: 20 * time.Minute}, }, { name: "0 timeout duration", timeoutDuration: &metav1.Duration{Duration: 0 * time.Minute}, startTime: now, rprt: &resources.ResolvedPipelineRunTask{ PipelineTask: &v1beta1.PipelineTask{ Timeout: nil, }, }, expected: &metav1.Duration{Duration: 0 * time.Minute}, }, { name: "taskrun being created after timeout expired", timeoutDuration: &metav1.Duration{Duration: 1 * time.Minute}, startTime: now.Add(-2 * time.Minute), rprt: &resources.ResolvedPipelineRunTask{ PipelineTask: &v1beta1.PipelineTask{ Timeout: nil, }, }, expected: &metav1.Duration{Duration: 1 * time.Second}, }, { name: "taskrun being created with timeout for PipelineTask", timeoutDuration: &metav1.Duration{Duration: 20 * time.Minute}, startTime: now, rprt: &resources.ResolvedPipelineRunTask{ PipelineTask: &v1beta1.PipelineTask{ Timeout: &metav1.Duration{Duration: 2 * time.Minute}, }, }, expected: &metav1.Duration{Duration: 2 * time.Minute}, }, { name: "0 timeout duration for PipelineRun, PipelineTask timeout still applied", timeoutDuration: &metav1.Duration{Duration: 0 * time.Minute}, startTime: now, rprt: &resources.ResolvedPipelineRunTask{ PipelineTask: &v1beta1.PipelineTask{ Timeout: &metav1.Duration{Duration: 2 * time.Minute}, }, }, expected: &metav1.Duration{Duration: 2 * time.Minute}, }, { name: "taskstimeout specified in pr", timeoutFields: &v1beta1.TimeoutFields{ Tasks: &metav1.Duration{Duration: 20 * time.Minute}, }, startTime: now, rprt: &resources.ResolvedPipelineRunTask{ PipelineTask: &v1beta1.PipelineTask{ Timeout: nil, }, }, expected: &metav1.Duration{Duration: 20 * time.Minute}, }, { name: "40m timeout duration, 20m taskstimeout duration", timeoutFields: &v1beta1.TimeoutFields{ Pipeline: &metav1.Duration{Duration: 40 * time.Minute}, Tasks: &metav1.Duration{Duration: 20 * time.Minute}, }, startTime: now, rprt: &resources.ResolvedPipelineRunTask{ PipelineTask: &v1beta1.PipelineTask{ Timeout: nil, }, }, expected: &metav1.Duration{Duration: 20 * time.Minute}, }, { name: "taskrun being created with taskstimeout for PipelineTask", timeoutFields: &v1beta1.TimeoutFields{ Tasks: &metav1.Duration{Duration: 20 * time.Minute}, }, startTime: now, rprt: &resources.ResolvedPipelineRunTask{ PipelineTask: &v1beta1.PipelineTask{ Timeout: &metav1.Duration{Duration: 2 * time.Minute}, }, }, expected: &metav1.Duration{Duration: 2 * time.Minute}, }, { name: "tasks.timeout < pipeline.tasks[].timeout", timeoutFields: &v1beta1.TimeoutFields{ Tasks: &metav1.Duration{Duration: 1 * time.Minute}, }, startTime: now, rprt: &resources.ResolvedPipelineRunTask{ PipelineTask: &v1beta1.PipelineTask{ Timeout: &metav1.Duration{Duration: 2 * time.Minute}, }, }, expected: &metav1.Duration{Duration: 1 * time.Minute}, }, { name: "taskrun with elapsed time; timeouts.tasks applies", timeoutFields: &v1beta1.TimeoutFields{ Tasks: &metav1.Duration{Duration: 20 * time.Minute}, }, startTime: now.Add(-10 * time.Minute), rprt: &resources.ResolvedPipelineRunTask{ PipelineTask: &v1beta1.PipelineTask{}, TaskRun: &v1beta1.TaskRun{ Status: v1beta1.TaskRunStatus{ TaskRunStatusFields: v1beta1.TaskRunStatusFields{ StartTime: nil, }, }, }, }, expected: &metav1.Duration{Duration: 10 * time.Minute}, }, { name: "taskrun with elapsed time; task.timeout applies", timeoutFields: &v1beta1.TimeoutFields{ Tasks: &metav1.Duration{Duration: 20 * time.Minute}, }, startTime: now.Add(-10 * time.Minute), rprt: &resources.ResolvedPipelineRunTask{ PipelineTask: &v1beta1.PipelineTask{ Timeout: &metav1.Duration{Duration: 15 * time.Minute}, }, TaskRun: &v1beta1.TaskRun{ Status: v1beta1.TaskRunStatus{ TaskRunStatusFields: v1beta1.TaskRunStatusFields{ StartTime: nil, }, }, }, }, expected: &metav1.Duration{Duration: 10 * time.Minute}, }, { name: "taskrun with elapsed time; timeouts.pipeline applies", timeoutFields: &v1beta1.TimeoutFields{ Tasks: &metav1.Duration{Duration: 20 * time.Minute}, }, startTime: now.Add(-10 * time.Minute), rprt: &resources.ResolvedPipelineRunTask{ PipelineTask: &v1beta1.PipelineTask{ Timeout: &metav1.Duration{Duration: 15 * time.Minute}, }, TaskRun: &v1beta1.TaskRun{ Status: v1beta1.TaskRunStatus{ TaskRunStatusFields: v1beta1.TaskRunStatusFields{ StartTime: nil, }, }, }, }, expected: &metav1.Duration{Duration: 10 * time.Minute}, }} for _, tc := range tcs { t.Run(tc.name, func(t *testing.T) { pr := &v1beta1.PipelineRun{ ObjectMeta: baseObjectMeta(prName, ns), Spec: v1beta1.PipelineRunSpec{ PipelineRef: &v1beta1.PipelineRef{Name: p}, Timeout: tc.timeoutDuration, Timeouts: tc.timeoutFields, }, Status: v1beta1.PipelineRunStatus{ PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ StartTime: &metav1.Time{Time: tc.startTime}, }, }, } if d := cmp.Diff(getTaskRunTimeout(context.TODO(), pr, tc.rprt, testClock), tc.expected); d != "" { t.Errorf("Unexpected task run timeout. Diff %s", diff.PrintWantGot(d)) } }) } }
explode_data.jsonl/27306
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2738 }
[ 2830, 3393, 1949, 6262, 6727, 7636, 1155, 353, 8840, 836, 8, 341, 25653, 675, 1669, 330, 51501, 301, 10453, 359, 7246, 11672, 698, 84041, 1669, 330, 7975, 698, 3223, 1669, 330, 51258, 1837, 3244, 4837, 1669, 3056, 1235, 341, 197, 11609,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReconcileControlPlaneInfrastructureMachineTemplate(t *testing.T) { g := NewWithT(t) // Create InfrastructureMachineTemplates for test cases infrastructureMachineTemplate := builder.InfrastructureMachineTemplate(metav1.NamespaceDefault, "infra1"). Build() infrastructureMachineTemplate2 := builder.InfrastructureMachineTemplate(metav1.NamespaceDefault, "infra2"). Build() // Create the blueprint mandating controlPlaneInfrastructure. blueprint := &scope.ClusterBlueprint{ ClusterClass: builder.ClusterClass(metav1.NamespaceDefault, "class1"). WithControlPlaneInfrastructureMachineTemplate(infrastructureMachineTemplate). Build(), ControlPlane: &scope.ControlPlaneBlueprint{ InfrastructureMachineTemplate: infrastructureMachineTemplate, }, } // Create Cluster object for test cases. cluster := builder.Cluster(metav1.NamespaceDefault, "cluster1").Build() // Infrastructure object with a different Kind. incompatibleInfrastructureMachineTemplate := infrastructureMachineTemplate2.DeepCopy() incompatibleInfrastructureMachineTemplate.SetKind("incompatibleInfrastructureMachineTemplate") updatedInfrastructureMachineTemplate := infrastructureMachineTemplate.DeepCopy() err := unstructured.SetNestedField(updatedInfrastructureMachineTemplate.UnstructuredContent(), true, "spec", "differentSetting") g.Expect(err).ToNot(HaveOccurred()) // Create ControlPlaneObjects for test cases. controlPlane1 := builder.ControlPlane(metav1.NamespaceDefault, "cp1"). WithInfrastructureMachineTemplate(infrastructureMachineTemplate). Build() // ControlPlane object with novel field in the spec. controlPlane2 := controlPlane1.DeepCopy() err = unstructured.SetNestedField(controlPlane2.UnstructuredContent(), true, "spec", "differentSetting") g.Expect(err).ToNot(HaveOccurred()) // ControlPlane object with a new label. controlPlaneWithInstanceSpecificChanges := controlPlane1.DeepCopy() controlPlaneWithInstanceSpecificChanges.SetLabels(map[string]string{"foo": "bar"}) // ControlPlane object with the same name as controlPlane1 but a different InfrastructureMachineTemplate controlPlane3 := builder.ControlPlane(metav1.NamespaceDefault, "cp1"). WithInfrastructureMachineTemplate(updatedInfrastructureMachineTemplate). Build() tests := []struct { name string current *scope.ControlPlaneState desired *scope.ControlPlaneState want *scope.ControlPlaneState wantErr bool }{ { name: "Create desired InfrastructureMachineTemplate where it doesn't exist", current: &scope.ControlPlaneState{Object: controlPlane1.DeepCopy()}, desired: &scope.ControlPlaneState{Object: controlPlane1.DeepCopy(), InfrastructureMachineTemplate: infrastructureMachineTemplate.DeepCopy()}, want: &scope.ControlPlaneState{Object: controlPlane1.DeepCopy(), InfrastructureMachineTemplate: infrastructureMachineTemplate.DeepCopy()}, wantErr: false, }, { name: "Update desired InfrastructureMachineTemplate connected to controlPlane", current: &scope.ControlPlaneState{Object: controlPlane1.DeepCopy(), InfrastructureMachineTemplate: infrastructureMachineTemplate.DeepCopy()}, desired: &scope.ControlPlaneState{Object: controlPlane3, InfrastructureMachineTemplate: updatedInfrastructureMachineTemplate}, want: &scope.ControlPlaneState{Object: controlPlane3, InfrastructureMachineTemplate: updatedInfrastructureMachineTemplate}, wantErr: false, }, { name: "Fail on updating infrastructure with incompatible changes", current: &scope.ControlPlaneState{Object: controlPlane1.DeepCopy(), InfrastructureMachineTemplate: infrastructureMachineTemplate.DeepCopy()}, desired: &scope.ControlPlaneState{Object: controlPlane1.DeepCopy(), InfrastructureMachineTemplate: incompatibleInfrastructureMachineTemplate}, wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { fakeObjs := make([]client.Object, 0) s := scope.New(cluster) s.Blueprint = blueprint if tt.current != nil { s.Current.ControlPlane = tt.current if tt.current.Object != nil { fakeObjs = append(fakeObjs, tt.current.Object) } if tt.current.InfrastructureMachineTemplate != nil { fakeObjs = append(fakeObjs, tt.current.InfrastructureMachineTemplate) } } fakeClient := fake.NewClientBuilder(). WithScheme(fakeScheme). WithObjects(fakeObjs...). Build() r := Reconciler{ Client: fakeClient, recorder: env.GetEventRecorderFor("test"), } s.Desired = &scope.ClusterState{ControlPlane: &scope.ControlPlaneState{Object: tt.desired.Object, InfrastructureMachineTemplate: tt.desired.InfrastructureMachineTemplate}} // Run reconcileControlPlane with the states created in the initial section of the test. err := r.reconcileControlPlane(ctx, s) if tt.wantErr { g.Expect(err).To(HaveOccurred()) return } g.Expect(err).ToNot(HaveOccurred()) // Create ControlPlane object for fetching data into gotControlPlaneObject := builder.ControlPlane("", "").Build() err = fakeClient.Get(ctx, client.ObjectKeyFromObject(tt.want.Object), gotControlPlaneObject) g.Expect(err).ToNot(HaveOccurred()) // Check to see if the controlPlaneObject has been updated with a new template. // This check is just for the naming format uses by generated templates - here it's templateName-* // This check is only performed when we had an initial template that has been changed if tt.current.InfrastructureMachineTemplate != nil { item, err := contract.ControlPlane().MachineTemplate().InfrastructureRef().Get(gotControlPlaneObject) g.Expect(err).ToNot(HaveOccurred()) pattern := fmt.Sprintf("%s.*", controlPlaneInfrastructureMachineTemplateNamePrefix(s.Current.Cluster.Name)) fmt.Println(pattern, item.Name) ok, err := regexp.Match(pattern, []byte(item.Name)) g.Expect(err).NotTo(HaveOccurred()) g.Expect(ok).To(BeTrue()) } // Create object to hold the queried InfrastructureMachineTemplate gotInfrastructureMachineTemplate := builder.InfrastructureMachineTemplate("", "").Build() err = fakeClient.Get(ctx, client.ObjectKeyFromObject(tt.want.InfrastructureMachineTemplate), gotInfrastructureMachineTemplate) g.Expect(err).ToNot(HaveOccurred()) // Get the spec from the InfrastructureMachineTemplate we are expecting wantInfrastructureMachineTemplateSpec, ok, err := unstructured.NestedMap(tt.want.InfrastructureMachineTemplate.UnstructuredContent(), "spec") g.Expect(err).NotTo(HaveOccurred()) g.Expect(ok).To(BeTrue()) // Get the spec from the InfrastructureMachineTemplate we got from the client.Get gotInfrastructureMachineTemplateSpec, ok, err := unstructured.NestedMap(gotInfrastructureMachineTemplate.UnstructuredContent(), "spec") g.Expect(err).NotTo(HaveOccurred()) g.Expect(ok).To(BeTrue()) // Compare all keys and values in the InfrastructureMachineTemplate Spec for k, v := range wantInfrastructureMachineTemplateSpec { g.Expect(gotInfrastructureMachineTemplateSpec).To(HaveKeyWithValue(k, v)) } // Check to see that labels are as expected on the object for k, v := range tt.want.InfrastructureMachineTemplate.GetLabels() { g.Expect(gotInfrastructureMachineTemplate.GetLabels()).To(HaveKeyWithValue(k, v)) } }) } }
explode_data.jsonl/11660
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2279 }
[ 2830, 3393, 693, 40446, 457, 3273, 34570, 97838, 21605, 7275, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1532, 2354, 51, 1155, 692, 197, 322, 4230, 44487, 21605, 51195, 369, 1273, 5048, 198, 197, 13573, 10314, 21605, 7275, 1669, 7363, 40...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestStoreDeleteCollectionNotFound(t *testing.T) { server, registry := NewTestGenericStoreRegistry(t) defer server.Terminate(t) testContext := api.WithNamespace(api.NewContext(), "test") podA := &api.Pod{ObjectMeta: api.ObjectMeta{Name: "foo"}} podB := &api.Pod{ObjectMeta: api.ObjectMeta{Name: "bar"}} for i := 0; i < 10; i++ { // Setup if _, err := registry.Create(testContext, podA); err != nil { t.Errorf("Unexpected error: %v", err) } if _, err := registry.Create(testContext, podB); err != nil { t.Errorf("Unexpected error: %v", err) } // Kick off multiple delete collection calls to test notfound behavior wg := &sync.WaitGroup{} for j := 0; j < 2; j++ { wg.Add(1) go func() { defer wg.Done() _, err := registry.DeleteCollection(testContext, nil, &api.ListOptions{}) if err != nil { t.Fatalf("Unexpected error: %v", err) } }() } wg.Wait() if _, err := registry.Get(testContext, podA.Name); !errors.IsNotFound(err) { t.Errorf("Unexpected error: %v", err) } if _, err := registry.Get(testContext, podB.Name); !errors.IsNotFound(err) { t.Errorf("Unexpected error: %v", err) } } }
explode_data.jsonl/238
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 478 }
[ 2830, 3393, 6093, 6435, 6482, 10372, 1155, 353, 8840, 836, 8, 341, 41057, 11, 19424, 1669, 1532, 2271, 19964, 6093, 15603, 1155, 340, 16867, 3538, 836, 261, 34016, 1155, 692, 18185, 1972, 1669, 6330, 26124, 22699, 24827, 7121, 1972, 1507,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReadCloser(t *testing.T) { fifo := "rrfifo" os.Remove(fifo) err := exec.Command("mkfifo", fifo).Run() if err != nil { t.Error("mkfifo returned: ", err) } fmt.Println("Fifo created") done := make(chan bool) go func(done chan bool) { fread, err := os.OpenFile(fifo, os.O_RDONLY, 0600) if err != nil { t.Error("Error opening fifo: ", err) } reader := NewReadWriteCloser(fread, 2*time.Second, 50*time.Millisecond) fmt.Println("reader created") fmt.Println("read thread") closed := false for { if closed { break } rdata := make([]byte, 128) c, err := reader.Read(rdata) if err == io.EOF { fmt.Println("Reader returned EOF, exiting read routine") break } if err != nil { t.Error("Read error: ", err) } fmt.Println("read count: ", c) if !closed { go func() { time.Sleep(20 * time.Millisecond) fmt.Println("closing read file") reader.Close() closed = true }() } } done <- true }(done) time.Sleep(20 * time.Millisecond) fwrite, err := os.OpenFile(fifo, os.O_WRONLY, 0644) if err != nil { t.Error("Error opening file for writing: ", err) } c, err := fwrite.Write([]byte("Hi there")) if err != nil { t.Error("Write error: ", err) } fmt.Printf("Wrote %v bytes\n", c) <-done fmt.Println("removing fifo") err = os.Remove(fifo) if err != nil { t.Error("Error removing fifo") } fmt.Println("test all done") }
explode_data.jsonl/49961
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 647 }
[ 2830, 3393, 4418, 51236, 799, 1155, 353, 8840, 836, 8, 341, 1166, 31497, 1669, 330, 634, 74031, 698, 25078, 13270, 955, 31497, 340, 9859, 1669, 3883, 12714, 445, 24452, 74031, 497, 63497, 568, 6727, 741, 743, 1848, 961, 2092, 341, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1