text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func Test_buildEnvVars_FourSortedKeys(t *testing.T) { firstKey := "alex" secondKey := "elliot" thirdKey := "stefan" lastKey := "zane" inputEnvs := map[string]string{ lastKey: "", firstKey: "", thirdKey: "", secondKey: "", } function := types.FunctionDeployment{ EnvVars: inputEnvs, } coreEnvs := buildEnvVars(&function) if coreEnvs[0].Name != firstKey { t.Errorf("first want: %s, got: %s", firstKey, coreEnvs[0].Name) t.Fail() } if coreEnvs[1].Name != secondKey { t.Errorf("second want: %s, got: %s", secondKey, coreEnvs[1].Name) t.Fail() } if coreEnvs[2].Name != thirdKey { t.Errorf("third want: %s, got: %s", thirdKey, coreEnvs[2].Name) t.Fail() } if coreEnvs[3].Name != lastKey { t.Errorf("last want: %s, got: %s", lastKey, coreEnvs[3].Name) t.Fail() } }
explode_data.jsonl/1075
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 366 }
[ 2830, 3393, 20801, 14359, 28305, 1400, 413, 51051, 8850, 1155, 353, 8840, 836, 8, 341, 42190, 1592, 1669, 330, 55875, 698, 197, 5569, 1592, 1669, 330, 613, 11098, 698, 197, 31727, 1592, 1669, 330, 267, 823, 276, 698, 33096, 1592, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestAddScaledTo(t *testing.T) { t.Parallel() s := []float64{3, 4, 1, 7, 5} alpha := 6.0 y := []float64{1, 2, 3, 4, 5} dst1 := make([]float64, 5) ans := []float64{19, 26, 9, 46, 35} dst2 := AddScaledTo(dst1, y, alpha, s) if !EqualApprox(dst1, ans, EqTolerance) { t.Errorf("AddScaledTo did not match for mutator") } if !EqualApprox(dst2, ans, EqTolerance) { t.Errorf("AddScaledTo did not match for returned slice") } AddScaledTo(dst1, y, alpha, s) if !EqualApprox(dst1, ans, EqTolerance) { t.Errorf("Reusing dst did not match") } short := []float64{1} if !Panics(func() { AddScaledTo(dst1, y, alpha, short) }) { t.Errorf("Doesn't panic if s is smaller than dst") } if !Panics(func() { AddScaledTo(short, y, alpha, s) }) { t.Errorf("Doesn't panic if dst is smaller than s") } if !Panics(func() { AddScaledTo(dst1, short, alpha, s) }) { t.Errorf("Doesn't panic if y is smaller than dst") } }
explode_data.jsonl/1210
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 416 }
[ 2830, 3393, 2212, 94201, 1249, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 1903, 1669, 3056, 3649, 21, 19, 90, 18, 11, 220, 19, 11, 220, 16, 11, 220, 22, 11, 220, 20, 532, 73063, 1669, 220, 21, 13, 15, 198, 14522, 1669...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStorageTest(t *testing.T) { storagetest.Test(t, func(t *testing.T) (_ blobserver.Storage, cleanup func()) { ld := test.NewLoader() s1, _ := ld.GetStorage("/good-schema/") s2, _ := ld.GetStorage("/good-other/") ld.SetStorage("/replica-all/", replica.NewForTest([]blobserver.Storage{s1, s2})) sto := newCond(t, ld, map[string]interface{}{ "write": map[string]interface{}{ "if": "isSchema", "then": "/good-schema/", "else": "/good-other/", }, "read": "/replica-all/", "remove": "/replica-all/", }) return sto, func() {} }) }
explode_data.jsonl/10993
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 260 }
[ 2830, 3393, 5793, 2271, 1155, 353, 8840, 836, 8, 341, 18388, 269, 351, 57824, 8787, 1155, 11, 2915, 1155, 353, 8840, 836, 8, 5453, 23404, 4030, 43771, 11, 21290, 2915, 2140, 341, 197, 197, 507, 1669, 1273, 7121, 9181, 741, 197, 1903, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParse_cmpl(t *testing.T) { tt(t, func() { test := func(src string) { program, err := parser.ParseFile(nil, "", src, 0) is(err, nil) is(cmpl_parse(program), "!=", nil) } test(``) test(`var abc = 1; abc;`) test(` function abc() { return; } `) }) }
explode_data.jsonl/9318
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 183 }
[ 2830, 3393, 14463, 43619, 500, 1155, 353, 8840, 836, 8, 341, 3244, 83, 1155, 11, 2915, 368, 1476, 197, 18185, 1669, 2915, 14705, 914, 8, 341, 298, 197, 14906, 11, 1848, 1669, 6729, 8937, 1703, 27907, 11, 7342, 2286, 11, 220, 15, 340...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGRPCProbe(t *testing.T) { framework.NewTest(t). Features("usability.observability.grpc-probe"). Run(func(t framework.TestContext) { if !t.Clusters().Default().MinKubeVersion(23) { t.Skip("gRPC probe not supported") } ns := namespace.NewOrFail(t, t, namespace.Config{Prefix: "grpc-probe", Inject: true}) // apply strict mtls t.ConfigKube().YAML(fmt.Sprintf(` apiVersion: security.istio.io/v1beta1 kind: PeerAuthentication metadata: name: grpc-probe-mtls namespace: %s spec: mtls: mode: STRICT`, ns.Name())).ApplyOrFail(t, ns.Name()) for _, testCase := range []struct { name string rewrite bool ready bool openPort bool }{ {name: "norewrite-unready", rewrite: false, ready: false, openPort: true}, {name: "rewrite-unready", rewrite: true, ready: false, openPort: false}, {name: "rewrite-ready", rewrite: true, ready: true, openPort: true}, } { t.NewSubTest(testCase.name).Run(func(t framework.TestContext) { runGRPCProbeDeployment(t, ns, testCase.name, testCase.rewrite, testCase.ready, testCase.openPort) }) } }) }
explode_data.jsonl/62962
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 467 }
[ 2830, 3393, 8626, 4872, 81426, 1155, 353, 8840, 836, 8, 341, 1166, 5794, 7121, 2271, 1155, 4292, 197, 197, 21336, 445, 355, 2897, 13, 22764, 2897, 69612, 9838, 1371, 38609, 197, 85952, 18552, 1155, 12626, 8787, 1972, 8, 341, 298, 743, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUpdateItemWebhook(t *testing.T) { sandboxResp, _ := testClient.CreateSandboxPublicToken(sandboxInstitution, testProducts) tokenResp, _ := testClient.ExchangePublicToken(sandboxResp.PublicToken) itemResp, err := testClient.UpdateItemWebhook(tokenResp.AccessToken, "https://plaid.com/webhook-test") assert.Nil(t, err) assert.NotNil(t, itemResp.Item) assert.Equal(t, itemResp.Item.Webhook, "https://plaid.com/webhook-test") }
explode_data.jsonl/12573
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 4289, 1234, 5981, 20873, 1155, 353, 8840, 836, 8, 341, 1903, 31536, 36555, 11, 716, 1669, 1273, 2959, 7251, 50, 31536, 12676, 3323, 1141, 31536, 641, 10446, 11, 1273, 17746, 340, 43947, 36555, 11, 716, 1669, 1273, 2959, 8699...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPhantomHashBasedValidation(t *testing.T) { testDBEnv := testEnvs[levelDBtestEnvName] testDBEnv.Init(t) defer testDBEnv.Cleanup() db := testDBEnv.GetDBHandle("TestDB") //populate db with initial data batch := privacyenabledstate.NewUpdateBatch() batch.PubUpdates.Put("ns1", "key1", []byte("value1"), version.NewHeight(1, 0)) batch.PubUpdates.Put("ns1", "key2", []byte("value2"), version.NewHeight(1, 1)) batch.PubUpdates.Put("ns1", "key3", []byte("value3"), version.NewHeight(1, 2)) batch.PubUpdates.Put("ns1", "key4", []byte("value4"), version.NewHeight(1, 3)) batch.PubUpdates.Put("ns1", "key5", []byte("value5"), version.NewHeight(1, 4)) batch.PubUpdates.Put("ns1", "key6", []byte("value6"), version.NewHeight(1, 5)) batch.PubUpdates.Put("ns1", "key7", []byte("value7"), version.NewHeight(1, 6)) batch.PubUpdates.Put("ns1", "key8", []byte("value8"), version.NewHeight(1, 7)) batch.PubUpdates.Put("ns1", "key9", []byte("value9"), version.NewHeight(1, 8)) db.ApplyPrivacyAwareUpdates(batch, version.NewHeight(1, 8)) testValidator := &validator{db: db, hashFunc: testHashFunc} rwsetBuilder1 := rwsetutil.NewRWSetBuilder() rqi1 := &kvrwset.RangeQueryInfo{StartKey: "key2", EndKey: "key9", ItrExhausted: true} kvReadsDuringSimulation1 := []*kvrwset.KVRead{ rwsetutil.NewKVRead("key2", version.NewHeight(1, 1)), rwsetutil.NewKVRead("key3", version.NewHeight(1, 2)), rwsetutil.NewKVRead("key4", version.NewHeight(1, 3)), rwsetutil.NewKVRead("key5", version.NewHeight(1, 4)), rwsetutil.NewKVRead("key6", version.NewHeight(1, 5)), rwsetutil.NewKVRead("key7", version.NewHeight(1, 6)), rwsetutil.NewKVRead("key8", version.NewHeight(1, 7)), } rwsetutil.SetMerkelSummary(rqi1, buildTestHashResults(t, 2, kvReadsDuringSimulation1)) rwsetBuilder1.AddToRangeQuerySet("ns1", rqi1) checkValidation(t, testValidator, getTestPubSimulationRWSet(t, rwsetBuilder1), []int{}) rwsetBuilder2 := rwsetutil.NewRWSetBuilder() rqi2 := &kvrwset.RangeQueryInfo{StartKey: "key1", EndKey: "key9", ItrExhausted: false} kvReadsDuringSimulation2 := []*kvrwset.KVRead{ rwsetutil.NewKVRead("key1", version.NewHeight(1, 0)), rwsetutil.NewKVRead("key2", version.NewHeight(1, 1)), rwsetutil.NewKVRead("key3", version.NewHeight(1, 1)), rwsetutil.NewKVRead("key4", version.NewHeight(1, 3)), rwsetutil.NewKVRead("key5", version.NewHeight(1, 4)), rwsetutil.NewKVRead("key6", version.NewHeight(1, 5)), rwsetutil.NewKVRead("key7", version.NewHeight(1, 6)), rwsetutil.NewKVRead("key8", version.NewHeight(1, 7)), rwsetutil.NewKVRead("key9", version.NewHeight(1, 8)), } rwsetutil.SetMerkelSummary(rqi2, buildTestHashResults(t, 2, kvReadsDuringSimulation2)) rwsetBuilder2.AddToRangeQuerySet("ns1", rqi2) checkValidation(t, testValidator, getTestPubSimulationRWSet(t, rwsetBuilder2), []int{0}) }
explode_data.jsonl/65029
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1145 }
[ 2830, 3393, 3357, 30002, 6370, 28715, 13799, 1155, 353, 8840, 836, 8, 341, 18185, 3506, 14359, 1669, 1273, 1702, 11562, 64586, 3506, 1944, 14359, 675, 921, 18185, 3506, 14359, 26849, 1155, 340, 16867, 1273, 3506, 14359, 727, 60639, 741, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExternalModuleExclusionRelativePath(t *testing.T) { default_suite.expectBundled(t, bundled{ files: map[string]string{ "/Users/user/project/src/index.js": ` import './nested/folder/test' `, "/Users/user/project/src/nested/folder/test.js": ` import foo from './foo.js' import out from '../../../out/in-out-dir.js' import sha256 from '../../sha256.min.js' import config from '/api/config?a=1&b=2' console.log(foo, out, sha256, config) `, }, entryPaths: []string{"/Users/user/project/src/index.js"}, options: config.Options{ Mode: config.ModeBundle, AbsOutputDir: "/Users/user/project/out", ExternalModules: config.ExternalModules{ AbsPaths: map[string]bool{ "/Users/user/project/out/in-out-dir.js": true, "/Users/user/project/src/nested/folder/foo.js": true, "/Users/user/project/src/sha256.min.js": true, "/api/config?a=1&b=2": true, }, }, }, }) }
explode_data.jsonl/38525
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 470 }
[ 2830, 3393, 25913, 3332, 840, 8957, 28442, 1820, 1155, 353, 8840, 836, 8, 341, 11940, 57239, 25952, 33, 1241, 832, 1155, 11, 51450, 515, 197, 74075, 25, 2415, 14032, 30953, 515, 298, 197, 3115, 7137, 11739, 40118, 13437, 9022, 2857, 788...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStreamReceiveMsgFromReplica(t *testing.T) { defer cleanupStorage(t) // Use a central NATS server. ns := natsdTest.RunDefaultServer() defer ns.Shutdown() // Configure server. s1Config := getTestConfig("a", true, 5050) s1 := runServerWithConfig(t, s1Config) defer s1.Stop() // Configure second server. s2Config := getTestConfig("b", false, 5051) s2 := runServerWithConfig(t, s2Config) defer s2.Stop() getMetadataLeader(t, 10*time.Second, s1, s2) client, err := lift.Connect([]string{"localhost:5050"}) require.NoError(t, err) defer client.Close() name := "foo" subject := "foo" err = client.CreateStream(context.Background(), subject, name, lift.ReplicationFactor(2)) require.NoError(t, err) num := 5 expected := make([]*message, num) for i := 0; i < num; i++ { expected[i] = &message{ Key: []byte("bar"), Value: []byte(strconv.Itoa(i)), Offset: int64(i), } } i := 0 ch1 := make(chan struct{}) ch2 := make(chan struct{}) leader := getPartitionLeader(t, 10*time.Second, name, 0, s1, s2) var followerConfig *Config if leader == s1 { followerConfig = s2Config } else { followerConfig = s1Config } followerAdd := fmt.Sprintf("localhost:%d", followerConfig.Port) client2, err := lift.Connect([]string{followerAdd}) require.NoError(t, err) defer client2.Close() // Subscribe on the follower. err = client2.Subscribe(context.Background(), name, func(msg lift.Message, err error) { require.NoError(t, err) //expect := expected[i] //assertMsg(t, expect, msg) i++ if i == num { close(ch1) } if i == num+5 { close(ch2) } }, lift.ReadISRReplica()) require.NoError(t, err) // Publish messages. for i := 0; i < num; i++ { _, err = client.Publish(context.Background(), name, expected[i].Value, lift.Key(expected[i].Key)) require.NoError(t, err) } // Wait to receive initial messages. select { case <-ch1: case <-time.After(10 * time.Second): t.Fatal("Did not receive all expected messages") } // Publish some more messages. for i := 0; i < 5; i++ { expected = append(expected, &message{ Key: []byte("baz"), Value: []byte(strconv.Itoa(i + num)), Offset: int64(i + num), }) } for i := 0; i < 5; i++ { _, err = client.Publish(context.Background(), name, expected[i+num].Value, lift.Key(expected[i+num].Key)) require.NoError(t, err) } // Wait to receive remaining messages. select { case <-ch2: case <-time.After(10 * time.Second): t.Fatal("Did not receive all expected messages") } // Make sure we can play back the log. client3, err := lift.Connect([]string{"localhost:5050"}) require.NoError(t, err) defer client3.Close() i = num ch1 = make(chan struct{}) err = client3.Subscribe(context.Background(), name, func(msg lift.Message, err error) { require.NoError(t, err) expect := expected[i] assertMsg(t, expect, msg) i++ if i == num+5 { close(ch1) } }, lift.StartAtOffset(int64(num))) require.NoError(t, err) select { case <-ch1: case <-time.After(10 * time.Second): t.Fatal("Did not receive all expected messages") } }
explode_data.jsonl/34469
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1231 }
[ 2830, 3393, 3027, 14742, 6611, 3830, 18327, 15317, 1155, 353, 8840, 836, 8, 341, 16867, 21290, 5793, 1155, 692, 197, 322, 5443, 264, 8622, 18248, 50, 3538, 624, 84041, 1669, 308, 1862, 67, 2271, 16708, 3675, 5475, 741, 16867, 12268, 108...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNewDNSProvider(t *testing.T) { testCases := []struct { desc string envVars map[string]string expected string }{ { desc: "success", envVars: map[string]string{ "VULTR_API_KEY": "123", }, }, { desc: "missing api key", envVars: map[string]string{ "VULTR_API_KEY": "", }, expected: "vultr: some credentials information are missing: VULTR_API_KEY", }, } for _, test := range testCases { t.Run(test.desc, func(t *testing.T) { defer envTest.RestoreEnv() envTest.ClearEnv() envTest.Apply(test.envVars) p, err := NewDNSProvider() if len(test.expected) == 0 { require.NoError(t, err) require.NotNil(t, p) require.NotNil(t, p.config) require.NotNil(t, p.client) } else { require.EqualError(t, err, test.expected) } }) } }
explode_data.jsonl/7612
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 389 }
[ 2830, 3393, 3564, 61088, 5179, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 41653, 257, 914, 198, 197, 57538, 28305, 220, 2415, 14032, 30953, 198, 197, 42400, 914, 198, 197, 59403, 197, 197, 515, 298, 41653, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNewIssuer(t *testing.T) { lib := new(mocks.Lib) cfg := &Config{ NonceExpiration: "15", NonceSweepInterval: "15", } issuer := NewIssuer("ca1", ".", cfg, util.GetDefaultBCCSP(), lib) assert.NotNil(t, issuer) }
explode_data.jsonl/23675
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 102 }
[ 2830, 3393, 3564, 98902, 1155, 353, 8840, 836, 8, 341, 93459, 1669, 501, 1255, 25183, 86281, 340, 50286, 1669, 609, 2648, 515, 197, 197, 90528, 66301, 25, 262, 330, 16, 20, 756, 197, 197, 90528, 50, 48542, 10256, 25, 330, 16, 20, 75...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrinterSupportsExpectedTemplateFormats(t *testing.T) { testObject := &v1.Pod{ObjectMeta: metav1.ObjectMeta{Name: "foo"}} templateFile, err := ioutil.TempFile("", "printers_jsonpath_flags") if err != nil { t.Fatalf("unexpected error: %v", err) } defer func(tempFile *os.File) { tempFile.Close() os.Remove(tempFile.Name()) }(templateFile) fmt.Fprintf(templateFile, "{{ .metadata.name }}") testCases := []struct { name string outputFormat string templateArg string expectedError string expectedParseError string expectedOutput string expectNoMatch bool }{ { name: "valid output format also containing the template argument succeeds", outputFormat: "go-template={{ .metadata.name }}", expectedOutput: "foo", }, { name: "valid output format and no template argument results in an error", outputFormat: "template", expectedError: "template format specified but no template given", }, { name: "valid output format and template argument succeeds", outputFormat: "go-template", templateArg: "{{ .metadata.name }}", expectedOutput: "foo", }, { name: "Go-template file should match, and successfully return correct value", outputFormat: "go-template-file", templateArg: templateFile.Name(), expectedOutput: "foo", }, { name: "valid output format and invalid template argument results in the templateArg contents as the output", outputFormat: "go-template", templateArg: "invalid", expectedOutput: "invalid", }, { name: "no printer is matched on an invalid outputFormat", outputFormat: "invalid", expectNoMatch: true, }, { name: "go-template printer should not match on any other format supported by another printer", outputFormat: "jsonpath", expectNoMatch: true, }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { templateArg := &tc.templateArg if len(tc.templateArg) == 0 { templateArg = nil } printFlags := printers.GoTemplatePrintFlags{ TemplateArgument: templateArg, } p, err := printFlags.ToPrinter(tc.outputFormat) if tc.expectNoMatch { if !printers.IsNoCompatiblePrinterError(err) { t.Fatalf("expected no printer matches for output format %q", tc.outputFormat) } return } if printers.IsNoCompatiblePrinterError(err) { t.Fatalf("expected to match template printer for output format %q", tc.outputFormat) } if len(tc.expectedError) > 0 { if err == nil || !strings.Contains(err.Error(), tc.expectedError) { t.Errorf("expecting error %q, got %v", tc.expectedError, err) } return } if err != nil { t.Fatalf("unexpected error: %v", err) } out := bytes.NewBuffer([]byte{}) err = p.PrintObj(testObject, out) if err != nil { t.Errorf("unexpected error: %v", err) } if len(out.String()) != len(tc.expectedOutput) { t.Errorf("unexpected output: expecting %q, got %q", tc.expectedOutput, out.String()) } }) } }
explode_data.jsonl/13186
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1239 }
[ 2830, 3393, 45660, 7916, 82, 18896, 7275, 44599, 1155, 353, 8840, 836, 8, 341, 18185, 1190, 1669, 609, 85, 16, 88823, 90, 1190, 12175, 25, 77520, 16, 80222, 63121, 25, 330, 7975, 9207, 630, 22832, 1703, 11, 1848, 1669, 43144, 65009, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDecodeUnkownDebugCfg(t *testing.T) { tmpDir := t.TempDir() configPath := filepath.Join(tmpDir, "ticdc.toml") configContent := ` [debug] unknown1 = 1 [debug.unknown2] unknown3 = 3 ` err := os.WriteFile(configPath, []byte(configContent), 0o644) require.Nil(t, err) cmd := new(cobra.Command) o := newOptions() o.addFlags(cmd) require.Nil(t, cmd.ParseFlags([]string{"--config", configPath})) err = o.complete(cmd) require.Nil(t, err) err = o.validate() require.Nil(t, err) require.Equal(t, &config.DebugConfig{ EnableTableActor: false, EnableDBSorter: false, DB: &config.DBConfig{ Count: 8, Concurrency: 128, MaxOpenFiles: 10000, BlockSize: 65536, BlockCacheSize: 4294967296, WriterBufferSize: 8388608, Compression: "snappy", TargetFileSizeBase: 8388608, WriteL0SlowdownTrigger: math.MaxInt32, WriteL0PauseTrigger: math.MaxInt32, CompactionL0Trigger: 160, CompactionDeletionThreshold: 160000, IteratorMaxAliveDuration: 10000, IteratorSlowReadDuration: 256, CleanupSpeedLimit: 10000, }, // We expect the default configuration here. Messages: &config.MessagesConfig{ ClientMaxBatchInterval: config.TomlDuration(time.Millisecond * 200), ClientMaxBatchSize: 8 * 1024 * 1024, ClientMaxBatchCount: 128, ClientRetryRateLimit: 1.0, ServerMaxPendingMessageCount: 102400, ServerAckInterval: config.TomlDuration(time.Millisecond * 100), ServerWorkerPoolSize: 4, }, }, o.serverConfig.Debug) }
explode_data.jsonl/41636
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 803 }
[ 2830, 3393, 32564, 1806, 74, 779, 7939, 42467, 1155, 353, 8840, 836, 8, 341, 20082, 6184, 1669, 259, 65009, 6184, 741, 25873, 1820, 1669, 26054, 22363, 10368, 6184, 11, 330, 28050, 7628, 73494, 75, 1138, 25873, 2762, 1669, 22074, 58, 83...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRouter_PrioritizeByWeight_StillMatchesRoutes(t *testing.T) { mainRouter := Router{} _ = mainRouter.Register(http.MethodGet, "/", testHandlerFunc) _ = mainRouter.Register(http.MethodGet, "/with/slash", testHandlerFunc, MatchingOptions{Name: "/w/s"}) _ = mainRouter.Register(http.MethodGet, "/path1", testHandlerFunc, MatchingOptions{Name: "path"}) _ = mainRouter.Register(http.MethodGet, "/path1/{id}/{name:[a-z]{1,5}}", testHandlerFunc) _ = mainRouter.Register(http.MethodGet, "/path1/{file:.*}", testHandlerFunc, MatchingOptions{Name: "path"}) _ = mainRouter.Register(http.MethodGet, "/{date:[0-9]{4}-[0-9]{2}-[0-9]{2}}", testHandlerFunc) mainRouter.PrioritizeByWeight() assertPathFound(t, mainRouter, "GET", "/") assertPathFound(t, mainRouter, "GET", "/with/slash") assertPathFound(t, mainRouter, "GET", "/path1") assertPathFound(t, mainRouter, "GET", "/path1/1/name") assertPathFound(t, mainRouter, "GET", "/path1/some/path/to/file") assertPathFound(t, mainRouter, "GET", "/2021-01-31") }
explode_data.jsonl/31744
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 401 }
[ 2830, 3393, 9523, 1088, 3254, 26310, 1359, 8295, 62, 23322, 42470, 26653, 1155, 353, 8840, 836, 8, 341, 36641, 9523, 1669, 10554, 31483, 197, 62, 284, 1887, 9523, 19983, 19886, 20798, 1949, 11, 64657, 1273, 3050, 9626, 340, 197, 62, 284...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSpanLinkSlice(t *testing.T) { es := NewSpanLinkSlice() assert.EqualValues(t, 0, es.Len()) es = newSpanLinkSlice(&[]*otlptrace.Span_Link{}) assert.EqualValues(t, 0, es.Len()) es.EnsureCapacity(7) emptyVal := newSpanLink(&otlptrace.Span_Link{}) testVal := generateTestSpanLink() assert.EqualValues(t, 7, cap(*es.orig)) for i := 0; i < es.Len(); i++ { el := es.AppendEmpty() assert.EqualValues(t, emptyVal, el) fillTestSpanLink(el) assert.EqualValues(t, testVal, el) } }
explode_data.jsonl/63289
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 218 }
[ 2830, 3393, 12485, 3939, 33236, 1155, 353, 8840, 836, 8, 341, 78966, 1669, 1532, 12485, 3939, 33236, 741, 6948, 12808, 6227, 1155, 11, 220, 15, 11, 1531, 65819, 2398, 78966, 284, 501, 12485, 3939, 33236, 2099, 1294, 9, 354, 75, 3505, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDB_Open_InitialMmapSize(t *testing.T) { path := tempfile() defer os.Remove(path) initMmapSize := 1 << 30 // 1GB testWriteSize := 1 << 27 // 134MB db, err := bolt.Open(path, 0666, &bolt.Options{InitialMmapSize: initMmapSize}) if err != nil { t.Fatal(err) } // create a long-running read transaction // that never gets closed while writing rtx, err := db.Begin(false) if err != nil { t.Fatal(err) } // create a write transaction wtx, err := db.Begin(true) if err != nil { t.Fatal(err) } b, err := wtx.CreateBucket([]byte("test")) if err != nil { t.Fatal(err) } // and commit a large write err = b.Put([]byte("foo"), make([]byte, testWriteSize)) if err != nil { t.Fatal(err) } done := make(chan error, 1) go func() { err := wtx.Commit() done <- err }() select { case <-time.After(5 * time.Second): t.Errorf("unexpected that the reader blocks writer") case err := <-done: if err != nil { t.Fatal(err) } } if err := rtx.Rollback(); err != nil { t.Fatal(err) } }
explode_data.jsonl/27468
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 427 }
[ 2830, 3393, 3506, 51747, 62, 6341, 44, 2186, 1695, 1155, 353, 8840, 836, 8, 341, 26781, 1669, 54819, 741, 16867, 2643, 13270, 5581, 692, 28248, 44, 2186, 1695, 1669, 220, 16, 1115, 220, 18, 15, 220, 442, 220, 16, 5381, 198, 18185, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAccSpannerInstance_update(t *testing.T) { // Randomness skipIfVcr(t) t.Parallel() dName1 := fmt.Sprintf("spanner-dname1-%s", randString(t, 10)) dName2 := fmt.Sprintf("spanner-dname2-%s", randString(t, 10)) vcrTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, Providers: testAccProviders, CheckDestroy: testAccCheckSpannerInstanceDestroyProducer(t), Steps: []resource.TestStep{ { Config: testAccSpannerInstance_update(dName1, 1, false), }, { ResourceName: "google_spanner_instance.updater", ImportState: true, ImportStateVerify: true, }, { Config: testAccSpannerInstance_update(dName2, 2, true), }, { ResourceName: "google_spanner_instance.updater", ImportState: true, ImportStateVerify: true, }, }, }) }
explode_data.jsonl/31771
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 376 }
[ 2830, 3393, 14603, 12485, 1194, 2523, 8882, 1155, 353, 8840, 836, 8, 341, 197, 322, 10612, 2090, 198, 1903, 13389, 2679, 53, 5082, 1155, 340, 3244, 41288, 7957, 2822, 2698, 675, 16, 1669, 8879, 17305, 445, 1480, 1194, 1737, 606, 16, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEvalRangeNode(t *testing.T) { var data = make(VarMap) data.Set("users", []User{ {"Mario Santos", "mario@gmail.com"}, {"Joel Silva", "joelsilva@gmail.com"}, {"Luis Santana", "luis.santana@gmail.com"}, }) const resultString = `<h1>Mario Santos<small>mario@gmail.com</small></h1><h1>Joel Silva<small>joelsilva@gmail.com</small></h1><h1>Luis Santana<small>luis.santana@gmail.com</small></h1>` RunJetTest(t, data, nil, "Range_Expression", `{{range users}}<h1>{{.Name}}<small>{{.Email}}</small></h1>{{end}}`, resultString) RunJetTest(t, data, nil, "Range_ExpressionValue", `{{range user:=users}}<h1>{{user.Name}}<small>{{user.Email}}</small></h1>{{end}}`, resultString) var resultString2 = `<h1>0: Mario Santos<small>mario@gmail.com</small></h1><h1>Joel Silva<small>joelsilva@gmail.com</small></h1><h1>2: Luis Santana<small>luis.santana@gmail.com</small></h1>` RunJetTest(t, data, nil, "Range_ExpressionValueIf", `{{range i, user:=users}}<h1>{{if i == 0 || i == 2}}{{i}}: {{end}}{{user.Name}}<small>{{user.Email}}</small></h1>{{end}}`, resultString2) }
explode_data.jsonl/22896
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 437 }
[ 2830, 3393, 54469, 6046, 1955, 1155, 353, 8840, 836, 8, 1476, 2405, 821, 284, 1281, 7, 3962, 2227, 692, 8924, 4202, 445, 4218, 497, 3056, 1474, 515, 197, 197, 4913, 78734, 47623, 497, 330, 76, 3290, 10375, 905, 7115, 197, 197, 4913, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBalancedQueuePusher(t *testing.T) { const numPushers = 100 var pushers []Pusher var mockedPushers []*mockedPusher for i := 0; i < numPushers; i++ { p := &mockedPusher{ name: "pusher:" + strconv.Itoa(i), } pushers = append(pushers, p) mockedPushers = append(mockedPushers, p) } pusher := NewBalancedPusher(pushers) assert.True(t, len(pusher.Name()) > 0) for i := 0; i < numPushers*1000; i++ { assert.Nil(t, pusher.Push("item")) } var counts []int for _, p := range mockedPushers { counts = append(counts, p.count) } mean := calcMean(counts) variance := calcVariance(mean, counts) assert.True(t, variance < 100, fmt.Sprintf("too big variance - %.2f", variance)) }
explode_data.jsonl/8531
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 296 }
[ 2830, 3393, 37889, 4874, 7554, 16644, 261, 1155, 353, 8840, 836, 8, 341, 4777, 1629, 16644, 388, 284, 220, 16, 15, 15, 198, 2405, 4484, 388, 3056, 16644, 261, 198, 2405, 46149, 16644, 388, 29838, 16712, 291, 16644, 261, 198, 2023, 600...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestHTTPSource(t *testing.T) { dir := tempDir() defer os.RemoveAll(dir) certPEM, keyPEM := makePEM("localhost", time.Minute) certFile, keyFile := saveCert(dir, "localhost", certPEM, keyPEM) listFile := filepath.Base(certFile) + "\n" + filepath.Base(keyFile) + "\n" writeFile(filepath.Join(dir, "list"), []byte(listFile)) srv := httptest.NewServer(http.FileServer(http.Dir(dir))) defer srv.Close() testSource(t, HTTPSource{CertURL: srv.URL + "/list"}, makeCertPool(certPEM), 500*time.Millisecond) }
explode_data.jsonl/24945
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 198 }
[ 2830, 3393, 9230, 3608, 1155, 353, 8840, 836, 8, 341, 48532, 1669, 2730, 6184, 741, 16867, 2643, 84427, 14161, 340, 1444, 529, 1740, 44, 11, 1376, 1740, 44, 1669, 1281, 1740, 44, 445, 8301, 497, 882, 75770, 340, 1444, 529, 1703, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestXOROnBytes( t *testing.T ) { bytes1 := []byte("1234") bytes2 := []byte("1234") result := XOROnBytes(bytes1,bytes2) if len(result) != len(bytes1) { t.Errorf("Result not equal lenght as input input:%v result:%v",bytes1,result) } }
explode_data.jsonl/2429
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 101 }
[ 2830, 3393, 55, 868, 1925, 7078, 7, 259, 353, 8840, 836, 873, 1476, 70326, 16, 1669, 3056, 3782, 445, 16, 17, 18, 19, 1138, 70326, 17, 1669, 3056, 3782, 445, 16, 17, 18, 19, 1138, 9559, 1669, 69887, 1925, 7078, 23158, 16, 11, 9651...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAllGraphWeights_EmptyGraph(t *testing.T) { gr := tests.NewTestEdgeWeightedGraph(0, nil) assert.Equal(t, []float64(nil), AllGraphWeights(gr)) assert.Equal(t, 0.0, TotalGraphWeight(gr)) }
explode_data.jsonl/51157
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 77 }
[ 2830, 3393, 2403, 11212, 55630, 76060, 1595, 11212, 1155, 353, 8840, 836, 8, 341, 90059, 1669, 7032, 7121, 2271, 11656, 8295, 291, 11212, 7, 15, 11, 2092, 340, 6948, 12808, 1155, 11, 3056, 3649, 21, 19, 27907, 701, 2009, 11212, 55630, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestBase64Decode(t *testing.T) { str := "aGVsbG8gd29ybGQ=" t.Logf("old str = %s, new str = %s", str, hash.Base64Decode(str)) }
explode_data.jsonl/70161
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 64 }
[ 2830, 3393, 3978, 21, 19, 32564, 1155, 353, 8840, 836, 8, 341, 11355, 1669, 330, 64, 39718, 16892, 38, 23, 28584, 17, 24, 84307, 38, 48, 43488, 3244, 98954, 445, 813, 607, 284, 1018, 82, 11, 501, 607, 284, 1018, 82, 497, 607, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestMitre(t *testing.T) { reader := bytes.NewBufferString(config.DefaultPolicy) policy, err := policy.LoadPolicy(reader) if err != nil { t.Fatal(err) } test, err := newTestModule(policy.Macros, policy.Rules, testOpts{ enableFilters: true, }) if err != nil { t.Fatal(err) } defer test.Close() time.Sleep(time.Second) testCases := []testCase{ { action: func(t *testing.T) { f, err := os.Open("/etc/shadow") if err != nil { t.Fatal(err) } f.Close() }, expectedRule: "credential_modified", }, { action: func(t *testing.T) { f, err := os.Open(fmt.Sprintf("/proc/%d/mem", os.Getpid())) if err != nil { t.Fatal(err) } f.Close() }, expectedRule: "memory_dump", }, { action: func(t *testing.T) { f, err := os.Create("/var/log/service.log") if err != nil { t.Fatal(err) } f.Close() if err := os.Truncate(fmt.Sprintf("/var/log/service.log"), 0); err != nil { t.Fatal(err) } }, expectedRule: "logs_altered", }, { action: func(t *testing.T) { if err := os.Remove("/var/log/service.log"); err != nil { t.Fatal(err) } }, expectedRule: "logs_removed", }, { action: func(t *testing.T) { f, err := os.Create("/usr/local/bin/pleaseremoveme") if err != nil { t.Fatal(err) } f.Close() if err := os.Chmod("/usr/local/bin/pleaseremoveme", 0777); err != nil { t.Fatal(err) } os.Remove("/usr/local/bin/pleaseremoveme") }, expectedRule: "permissions_changed", }, { action: func(t *testing.T) { f, err := os.Create("/.removeme") if err != nil { t.Fatal(err) } f.Close() os.Remove("/.removeme") }, expectedRule: "hidden_file", }, { action: func(t *testing.T) { os.Mkdir("/lib/modules", 0660) f, err := os.Create("/lib/modules/removeme.ko") if err != nil { t.Fatal(err) } f.Close() os.Remove("/lib/modules/removeme.ko") }, expectedRule: "kernel_module", }, } for _, tc := range testCases { t.Run(fmt.Sprintf("rule %s", tc.expectedRule), func(t *testing.T) { tc.action(t) timeout := time.After(3 * time.Second) for { select { case event := <-test.events: if _, ok := event.event.(*sprobe.Event); ok { if event.rule.ID == tc.expectedRule { return } } else { t.Error("invalid event") } case <-timeout: t.Error("timeout") return } } }) } }
explode_data.jsonl/8288
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1257 }
[ 2830, 3393, 44, 48734, 1155, 353, 8840, 836, 8, 341, 61477, 1669, 5820, 7121, 4095, 703, 8754, 13275, 13825, 692, 3223, 8018, 11, 1848, 1669, 4842, 13969, 13825, 21987, 340, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDeviceDataCompare(t *testing.T) { d1 := DeviceData{DeviceEUI: protocol.EUIFromUint64(0), Data: []byte{1, 2, 3}, Frequency: 99.0, GatewayEUI: protocol.EUIFromUint64(1)} d2 := DeviceData{DeviceEUI: protocol.EUIFromUint64(1), Data: []byte{1, 2, 3}, Frequency: 98.0, GatewayEUI: protocol.EUIFromUint64(1)} d3 := DeviceData{DeviceEUI: protocol.EUIFromUint64(0), Data: []byte{1, 2, 3}, Frequency: 99.0, GatewayEUI: protocol.EUIFromUint64(1)} if d1.Equals(d2) || d2.Equals(d1) { t.Fatal("Should not be the same") } if !d1.Equals(d3) || !d3.Equals(d1) { t.Fatal("Should be equal") } }
explode_data.jsonl/45707
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 260 }
[ 2830, 3393, 6985, 1043, 27374, 1155, 353, 8840, 836, 8, 341, 2698, 16, 1669, 13903, 1043, 90, 6985, 36, 2275, 25, 11507, 5142, 2275, 3830, 21570, 21, 19, 7, 15, 701, 2885, 25, 3056, 3782, 90, 16, 11, 220, 17, 11, 220, 18, 2137, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestInitRunner(t *testing.T) { opt := libs.Options{ Concurrency: 3, Threads: 5, Verbose: true, NoDB: true, NoOutput: true, } URL := "http://httpbin.org" signContent := ` # info to search signature id: cred-01-01 noutput: true info: name: Default Credentials risk: High origin: method: GET redirect: false headers: - User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:55.0) Gecko/20100101 Firefox/55 url: >- {{.BaseURL}}/anything?q=1122 concllousions: - SetValue("code", StatusCode()) variables: - tomcat: | /manager/ /manager/html/ /server-status/ /html/ / requests: - method: GET redirect: false url: >- {{.BaseURL}}/anything?aaaa={{.tomcat}} headers: - User-Agent: Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:55.0) Gecko/20100101 Firefox/55 detections: - >- StatusCode() == 200 && (1 == 1) - >- StatusCode() == 200 ` //signFile := "/Users/j3ssie/go/src/github.com/jaeles-project/jaeles/test-sign/default-cred.yaml" sign, err := ParseSignFromContent(signContent) if err != nil { t.Errorf("Error parsing signature") } runner, err := InitRunner(URL, sign, opt) if err != nil { t.Errorf("Error parsing signature") } spew.Dump(runner.Target) fmt.Println("New Requests generated: ", len(runner.Records)) runner.Sending() }
explode_data.jsonl/22775
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 615 }
[ 2830, 3393, 3803, 19486, 1155, 353, 8840, 836, 8, 341, 64838, 1669, 63974, 22179, 515, 197, 197, 79611, 25, 220, 18, 345, 197, 37057, 82, 25, 257, 220, 20, 345, 197, 197, 63404, 25, 257, 830, 345, 197, 197, 2753, 3506, 25, 286, 83...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSaveArtifactsErrorBeforeStart(t *testing.T) { bh := testBuildHandler() fd := bh.docker.(*docker.FakeDocker) expected := fmt.Errorf("run error") fd.RunContainerError = expected fd.RunContainerErrorBeforeStart = true err := bh.Save(bh.config) if err != expected { t.Errorf("Unexpected error returned from saveArtifacts: %v", err) } }
explode_data.jsonl/59442
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 119 }
[ 2830, 3393, 8784, 9286, 26401, 1454, 10227, 3479, 1155, 353, 8840, 836, 8, 341, 2233, 71, 1669, 1273, 11066, 3050, 741, 61721, 1669, 42989, 91131, 41399, 28648, 991, 726, 35, 13659, 340, 42400, 1669, 8879, 13080, 445, 6108, 1465, 1138, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTags(t *testing.T) { var tests = []string{ "a <span>tag</span>\n", "<p>a <span>tag</span></p>\n", "<span>tag</span>\n", "<p><span>tag</span></p>\n", "<span>mismatch</spandex>\n", "<p><span>mismatch</spandex></p>\n", "a <singleton /> tag\n", "<p>a <singleton /> tag</p>\n", } doTestsInline(t, tests) }
explode_data.jsonl/57358
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 175 }
[ 2830, 3393, 15930, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 3056, 917, 515, 197, 197, 56693, 366, 1480, 40986, 522, 1480, 8449, 77, 756, 197, 197, 22476, 79, 43875, 366, 1480, 40986, 522, 1480, 1472, 79, 8449, 77, 25897, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNGSetup(t *testing.T) { var n int var sendMsg []byte var recvMsg = make([]byte, 2048) // RAN connect to AMF conn, err := test.ConntectToAmf("127.0.0.1", "127.0.0.1", 38412, 9487) assert.Nil(t, err) // send NGSetupRequest Msg sendMsg, err = test.GetNGSetupRequest([]byte("\x00\x01\x02"), 24, "free5gc") assert.Nil(t, err) _, err = conn.Write(sendMsg) assert.Nil(t, err) // receive NGSetupResponse Msg n, err = conn.Read(recvMsg) assert.Nil(t, err) _, err = ngap.Decoder(recvMsg[:n]) assert.Nil(t, err) // close Connection conn.Close() }
explode_data.jsonl/27409
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 251 }
[ 2830, 3393, 6140, 21821, 1155, 353, 8840, 836, 8, 341, 2405, 308, 526, 198, 2405, 3624, 6611, 3056, 3782, 198, 2405, 27006, 6611, 284, 1281, 10556, 3782, 11, 220, 17, 15, 19, 23, 692, 197, 322, 431, 1093, 4564, 311, 6769, 37, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGenerateDockerBuild(t *testing.T) { defaultGopath := os.Getenv("GOPATH") testdataPath, err := filepath.Abs("testdata") require.NoError(t, err) tests := []struct { gopath string spec spec }{ {gopath: defaultGopath, spec: spec{CCName: "NoCode", Path: "path/to/nowhere", File: "/bin/warez", Mode: 0100400, SuccessExpected: false}}, {gopath: defaultGopath, spec: spec{CCName: "invalidhttp", Path: "https://not/a/valid/path", SuccessExpected: false, RealGen: true}}, {gopath: defaultGopath, spec: spec{CCName: "map", Path: "github.com/sinochem-tech/fabric/examples/chaincode/go/map", SuccessExpected: true, RealGen: true}}, {gopath: defaultGopath, spec: spec{CCName: "mapBadPath", Path: "github.com/sinochem-tech/fabric/examples/chaincode/go/map", File: "/src/github.com/sinochem-tech/fabric/examples/bad/path/to/map.go", Mode: 0100400, SuccessExpected: false}}, {gopath: defaultGopath, spec: spec{CCName: "mapBadMode", Path: "github.com/sinochem-tech/fabric/examples/chaincode/go/map", File: "/src/github.com/sinochem-tech/fabric/examples/chaincode/go/map/map.go", Mode: 0100555, SuccessExpected: false}}, {gopath: testdataPath, spec: spec{CCName: "AutoVendor", Path: "chaincodes/AutoVendor/chaincode", SuccessExpected: true, RealGen: true}}, } platform := &Platform{} for _, test := range tests { tst := test.spec reset := updateGopath(t, test.gopath) inputbuf := bytes.NewBuffer(nil) tw := tar.NewWriter(inputbuf) var cds *pb.ChaincodeDeploymentSpec var err error if tst.RealGen { cds = &pb.ChaincodeDeploymentSpec{ ChaincodeSpec: &pb.ChaincodeSpec{ ChaincodeId: &pb.ChaincodeID{ Name: tst.CCName, Path: tst.Path, Version: "0", }, }, } cds.CodePackage, err = platform.GetDeploymentPayload(cds.ChaincodeSpec) if err = testerr(err, tst.SuccessExpected); err != nil { t.Errorf("test failed in GetDeploymentPayload: %s, %s", cds.ChaincodeSpec.ChaincodeId.Path, err) } } else { cds, err = generateFakeCDS(tst.CCName, tst.Path, tst.File, tst.Mode) } if _, err = platform.GenerateDockerfile(cds); err != nil { t.Errorf("could not generate docker file for a valid spec: %s, %s", cds.ChaincodeSpec.ChaincodeId.Path, err) } err = platform.GenerateDockerBuild(cds, tw) if err = testerr(err, tst.SuccessExpected); err != nil { t.Errorf("Error validating chaincode spec: %s, %s", cds.ChaincodeSpec.ChaincodeId.Path, err) } reset() } }
explode_data.jsonl/2268
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 985 }
[ 2830, 3393, 31115, 35, 13659, 11066, 1155, 353, 8840, 836, 8, 341, 11940, 38, 35111, 1669, 2643, 64883, 445, 98733, 4827, 1138, 18185, 691, 1820, 11, 1848, 1669, 26054, 33255, 445, 92425, 1138, 17957, 35699, 1155, 11, 1848, 692, 78216, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestDeployTail(t *testing.T) { if testing.Short() { t.Skip("skipping integration test") } if ShouldRunGCPOnlyTests() { t.Skip("skipping test that is not gcp only") } ns, _, deleteNs := SetupNamespace(t) defer deleteNs() out, cancel := skaffold.Deploy("--tail", "--images", "busybox:latest").InDir("testdata/deploy-hello-tail").InNs(ns.Name).RunBackgroundOutput(t) defer cancel() // Wait for the logs to print "Hello world!" lines := make(chan string) go func() { scanner := bufio.NewScanner(out) for scanner.Scan() { lines <- scanner.Text() } }() timer := time.NewTimer(30 * time.Second) defer timer.Stop() for { select { case <-timer.C: t.Fatal("timeout") case line := <-lines: if strings.Contains(line, "Hello world!") { return } } } }
explode_data.jsonl/36242
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 317 }
[ 2830, 3393, 69464, 44795, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 445, 4886, 5654, 17590, 1273, 1138, 197, 532, 743, 12260, 6727, 38, 7123, 7308, 18200, 368, 341, 197, 3244, 57776, 445, 4886, 5654, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestHTTPTask_OnlyErrorMessage(t *testing.T) { t.Parallel() config := cltest.NewTestGeneralConfig(t) handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") w.WriteHeader(http.StatusBadGateway) _, err := w.Write([]byte(mustReadFile(t, "../../testdata/apiresponses/coinmarketcap.error.json"))) require.NoError(t, err) }) server := httptest.NewServer(handler) defer server.Close() task := pipeline.HTTPTask{ Method: "POST", URL: server.URL, RequestData: ethUSDPairing, } task.HelperSetDependencies(config) result, runInfo := task.Run(context.Background(), logger.TestLogger(t), pipeline.NewVarsFrom(nil), nil) assert.False(t, runInfo.IsPending) assert.True(t, runInfo.IsRetryable) require.Error(t, result.Error) require.Contains(t, result.Error.Error(), "RequestId") require.Nil(t, result.Value) }
explode_data.jsonl/81346
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 349 }
[ 2830, 3393, 2545, 51, 2828, 1073, 62, 7308, 21349, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 25873, 1669, 1185, 1944, 7121, 2271, 15415, 2648, 1155, 340, 53326, 1669, 1758, 89164, 18552, 3622, 1758, 37508, 11, 435, 353, 125...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestChannelsService_AddVideo(t *testing.T) { setup() defer teardown() mux.HandleFunc("/channels/ch/videos/1", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "PUT") }) _, _, err := client.Channels.AddVideo("ch", 1) if err != nil { t.Errorf("Channels.AddVideo returned unexpected error: %v", err) } }
explode_data.jsonl/49790
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 132 }
[ 2830, 3393, 35925, 1860, 21346, 10724, 1155, 353, 8840, 836, 8, 341, 84571, 741, 16867, 49304, 2822, 2109, 2200, 63623, 4283, 32425, 21284, 72945, 14, 16, 497, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 18185, 3523,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFormatNoUnits(t *testing.T) { actual := decor.Format(1234567).String() expected := "1234567" if actual != expected { t.Errorf("Expected %q but found %q", expected, actual) } }
explode_data.jsonl/19793
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 4061, 2753, 26314, 1155, 353, 8840, 836, 8, 341, 88814, 1669, 10576, 9978, 7, 16, 17, 18, 19, 20, 21, 22, 568, 703, 741, 42400, 1669, 330, 16, 17, 18, 19, 20, 21, 22, 698, 743, 5042, 961, 3601, 341, 197, 3244, 13080,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestArray_Json(t *testing.T) { // pointer gtest.C(t, func(t *gtest.T) { s1 := []interface{}{"a", "b", "d", "c"} a1 := garray.NewArrayFrom(s1) b1, err1 := json.Marshal(a1) b2, err2 := json.Marshal(s1) t.Assert(b1, b2) t.Assert(err1, err2) a2 := garray.New() err2 = json.UnmarshalUseNumber(b2, &a2) t.Assert(err2, nil) t.Assert(a2.Slice(), s1) var a3 garray.Array err := json.UnmarshalUseNumber(b2, &a3) t.Assert(err, nil) t.Assert(a3.Slice(), s1) }) // value. gtest.C(t, func(t *gtest.T) { s1 := []interface{}{"a", "b", "d", "c"} a1 := *garray.NewArrayFrom(s1) b1, err1 := json.Marshal(a1) b2, err2 := json.Marshal(s1) t.Assert(b1, b2) t.Assert(err1, err2) a2 := garray.New() err2 = json.UnmarshalUseNumber(b2, &a2) t.Assert(err2, nil) t.Assert(a2.Slice(), s1) var a3 garray.Array err := json.UnmarshalUseNumber(b2, &a3) t.Assert(err, nil) t.Assert(a3.Slice(), s1) }) // pointer gtest.C(t, func(t *gtest.T) { type User struct { Name string Scores *garray.Array } data := g.Map{ "Name": "john", "Scores": []int{99, 100, 98}, } b, err := json.Marshal(data) t.Assert(err, nil) user := new(User) err = json.UnmarshalUseNumber(b, user) t.Assert(err, nil) t.Assert(user.Name, data["Name"]) t.Assert(user.Scores, data["Scores"]) }) // value gtest.C(t, func(t *gtest.T) { type User struct { Name string Scores garray.Array } data := g.Map{ "Name": "john", "Scores": []int{99, 100, 98}, } b, err := json.Marshal(data) t.Assert(err, nil) user := new(User) err = json.UnmarshalUseNumber(b, user) t.Assert(err, nil) t.Assert(user.Name, data["Name"]) t.Assert(user.Scores, data["Scores"]) }) }
explode_data.jsonl/13916
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 913 }
[ 2830, 3393, 1857, 62, 5014, 1155, 353, 8840, 836, 8, 341, 197, 322, 7445, 198, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 1903, 16, 1669, 3056, 4970, 6257, 4913, 64, 497, 330, 65, 497, 330, 67, 497, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Transaction_Panic(t *testing.T) { table := createInitTable() defer dropTable(table) gtest.C(t, func(t *gtest.T) { err := db.Transaction(func(tx *gdb.TX) error { if _, err := tx.Replace(table, g.Map{ "id": 1, "passport": "USER_1", "password": "PASS_1", "nickname": "NAME_1", "create_time": gtime.Now().String(), }); err != nil { t.Error(err) } panic("error") return nil }) t.AssertNE(err, nil) if value, err := db.Table(table).Fields("nickname").Where("id", 1).Value(); err != nil { gtest.Error(err) } else { t.Assert(value.String(), "name_1") } }) }
explode_data.jsonl/41322
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 309 }
[ 2830, 3393, 34932, 1311, 1088, 31270, 1155, 353, 8840, 836, 8, 341, 26481, 1669, 1855, 3803, 2556, 741, 16867, 5943, 2556, 15761, 692, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 9859, 1669, 2927, 29284, 18552, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestHelmValuesLiteralFileLocal(t *testing.T) { Given(t). Path("helm"). When(). Create(). AppSet("--values-literal-file", "testdata/helm/baz.yaml"). Then(). And(func(app *Application) { data, err := ioutil.ReadFile("testdata/helm/baz.yaml") if err != nil { panic(err) } assert.Equal(t, string(data), app.Spec.Source.Helm.Values) }). When(). AppUnSet("--values-literal"). Then(). And(func(app *Application) { assert.Nil(t, app.Spec.Source.Helm) }) }
explode_data.jsonl/69401
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 39, 23162, 6227, 17350, 1703, 7319, 1155, 353, 8840, 836, 8, 341, 9600, 2071, 1155, 4292, 197, 69640, 445, 51899, 38609, 197, 197, 4498, 25829, 197, 75569, 25829, 197, 59557, 1649, 21549, 3661, 2852, 9953, 14203, 497, 330, 9...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestLexModule(t *testing.T) { l := lex("module foo { } ", nil) expecteds := [...]int{kywd_module, token_ident, token_curly_open, token_curly_close} for _, expected := range expecteds { token, err := l.nextToken() if err != nil { t.Errorf(err.Error()) } if token.typ != expected { t.Errorf("expected %d but got %d, %s", expected, token.typ, token.String()) } } }
explode_data.jsonl/81034
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 47778, 3332, 1155, 353, 8840, 836, 8, 341, 8810, 1669, 22429, 445, 4352, 15229, 314, 335, 3670, 2092, 340, 42400, 82, 1669, 48179, 396, 90, 7891, 6377, 10750, 11, 3950, 38399, 11, 3950, 18956, 398, 11311, 11, 3950, 18956, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRaceOnTryGetOrCreateReplicas(t *testing.T) { defer leaktest.AfterTest(t)() tc := testContext{} stopper := stop.NewStopper() ctx := context.Background() defer stopper.Stop(ctx) tc.Start(t, stopper) s := tc.store var wg sync.WaitGroup for i := 3; i < 100; i++ { wg.Add(1) go func(rid roachpb.ReplicaID) { defer wg.Done() r, _, _ := s.getOrCreateReplica(ctx, 42, rid, &roachpb.ReplicaDescriptor{ NodeID: 2, StoreID: 2, ReplicaID: 2, }, false) if r != nil { r.raftMu.Unlock() } }(roachpb.ReplicaID(i)) } wg.Wait() }
explode_data.jsonl/113
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 279 }
[ 2830, 3393, 55991, 1925, 21453, 1949, 57111, 18327, 52210, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 78255, 1669, 1273, 1972, 16094, 62644, 712, 1669, 2936, 7121, 10674, 712, 741, 20985, 1669, 2266, 1904...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestBadSignatureWithOneKey(t *testing.T) { publicKeys := []*rsa.PublicKey{getPublicKey(publicKeyModulusHexStringMatching)} valid := signatures.IsSignatureValid([]byte(invalidFile), []byte(signature), publicKeys) if valid { t.Error("Invalid signature not recognized to be invalid.") } }
explode_data.jsonl/4967
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 95 }
[ 2830, 3393, 17082, 25088, 2354, 3966, 1592, 1155, 353, 8840, 836, 8, 341, 1219, 8850, 1669, 29838, 60869, 49139, 1592, 90, 455, 61822, 31688, 1592, 4459, 19425, 49137, 64430, 10569, 56322, 1669, 32628, 4506, 25088, 4088, 10556, 3782, 5900, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestString(t *testing.T) { testStr := `{"age":"150","name":"abbot"}` jn := FromBytes([]byte(testStr)) rep := jn.String() if rep != testStr { t.Errorf("[%v]Not equal string reps. Got <<%s>>", jn.Err, rep) } }
explode_data.jsonl/10432
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 95 }
[ 2830, 3393, 703, 1155, 353, 8840, 836, 8, 341, 18185, 2580, 1669, 1565, 4913, 424, 3252, 16, 20, 15, 2198, 606, 3252, 370, 6331, 9207, 3989, 12428, 77, 1669, 5542, 7078, 10556, 3782, 8623, 2580, 1171, 73731, 1669, 502, 77, 6431, 741, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTransportChecksResponseHeaderListSize(t *testing.T) { ct := newClientTester(t) ct.client = func() error { req, _ := http.NewRequest("GET", "https://dummy.tld/", nil) res, err := ct.tr.RoundTrip(req) if err != errResponseHeaderListSize { if res != nil { res.Body.Close() } size := int64(0) for k, vv := range res.Header { for _, v := range vv { size += int64(len(k)) + int64(len(v)) + 32 } } return fmt.Errorf("RoundTrip Error = %v (and %d bytes of response headers); want errResponseHeaderListSize", err, size) } return nil } ct.server = func() error { ct.greet() var buf bytes.Buffer enc := hpack.NewEncoder(&buf) for { f, err := ct.fr.ReadFrame() if err != nil { return err } switch f := f.(type) { case *HeadersFrame: enc.WriteField(hpack.HeaderField{Name: ":status", Value: "200"}) large := strings.Repeat("a", 1<<10) for i := 0; i < 5042; i++ { enc.WriteField(hpack.HeaderField{Name: large, Value: large}) } if size, want := buf.Len(), 6329; size != want { // Note: this number might change if // our hpack implementation // changes. That's fine. This is // just a sanity check that our // response can fit in a single // header block fragment frame. return fmt.Errorf("encoding over 10MB of duplicate keypairs took %d bytes; expected %d", size, want) } ct.fr.WriteHeaders(HeadersFrameParam{ StreamID: f.StreamID, EndHeaders: true, EndStream: true, BlockFragment: buf.Bytes(), }) return nil } } } ct.run() }
explode_data.jsonl/16108
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 697 }
[ 2830, 3393, 27560, 49820, 2582, 4047, 852, 1695, 1155, 353, 8840, 836, 8, 341, 89216, 1669, 501, 2959, 58699, 1155, 340, 89216, 6581, 284, 2915, 368, 1465, 341, 197, 24395, 11, 716, 1669, 1758, 75274, 445, 3806, 497, 330, 2428, 1110, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func Test_IntIsPresent(t *testing.T) { r := require.New(t) v := IntIsPresent{Name: "Name", Field: 1} errors := validate.NewErrors() v.IsValid(errors) r.Equal(errors.Count(), 0) v = IntIsPresent{Name: "Name", Field: 0} v.IsValid(errors) r.Equal(errors.Count(), 1) r.Equal(errors.Get("name"), []string{"Name can not be blank."}) errors = validate.NewErrors() v = IntIsPresent{Name: "Name", Field: 0, Message: "Field can't be blank."} v.IsValid(errors) r.Equal(errors.Count(), 1) r.Equal(errors.Get("name"), []string{"Field can't be blank."}) }
explode_data.jsonl/16244
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 219 }
[ 2830, 3393, 32054, 3872, 21195, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1373, 7121, 1155, 692, 5195, 1669, 1333, 3872, 21195, 63121, 25, 330, 675, 497, 8601, 25, 220, 16, 532, 73424, 1669, 9593, 7121, 13877, 741, 5195, 28992, 38881, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOperator(t *testing.T) { gopClTest(t, ` a := "Hi" b := a + "!" c := 13 d := -c `, `package main func main() { a := "Hi" b := a + "!" c := 13 d := -c } `) }
explode_data.jsonl/73693
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 18461, 1155, 353, 8840, 836, 8, 341, 3174, 453, 5066, 2271, 1155, 11, 22074, 64, 1669, 330, 13048, 698, 65, 1669, 264, 488, 330, 24734, 66, 1669, 220, 16, 18, 198, 67, 1669, 481, 66, 198, 7808, 1565, 1722, 1887, 271, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOldInvoiceRemovalOnStart(t *testing.T) { t.Parallel() testClock := clock.NewTestClock(testTime) cdb, cleanup, err := newTestChannelDB(testClock) defer cleanup() require.NoError(t, err) cfg := RegistryConfig{ FinalCltvRejectDelta: testFinalCltvRejectDelta, Clock: testClock, GcCanceledInvoicesOnStartup: true, } expiryWatcher := NewInvoiceExpiryWatcher(cfg.Clock) registry := NewRegistry(cdb, expiryWatcher, &cfg) // First prefill the Channel DB with some pre-existing expired invoices. const numExpired = 5 const numPending = 0 existingInvoices := generateInvoiceExpiryTestData( t, testTime, 0, numExpired, numPending, ) i := 0 for paymentHash, invoice := range existingInvoices.expiredInvoices { // Mark half of the invoices as settled, the other hald as // canceled. if i%2 == 0 { invoice.State = channeldb.ContractSettled } else { invoice.State = channeldb.ContractCanceled } _, err := cdb.AddInvoice(invoice, paymentHash) require.NoError(t, err) i++ } // Collect all settled invoices for our expectation set. var expected []channeldb.Invoice // Perform a scan query to collect all invoices. query := channeldb.InvoiceQuery{ IndexOffset: 0, NumMaxInvoices: math.MaxUint64, } response, err := cdb.QueryInvoices(query) require.NoError(t, err) // Save all settled invoices for our expectation set. for _, invoice := range response.Invoices { if invoice.State == channeldb.ContractSettled { expected = append(expected, invoice) } } // Start the registry which should collect and delete all canceled // invoices upon start. err = registry.Start() require.NoError(t, err, "cannot start the registry") // Perform a scan query to collect all invoices. response, err = cdb.QueryInvoices(query) require.NoError(t, err) // Check that we really only kept the settled invoices after the // registry start. require.Equal(t, expected, response.Invoices) }
explode_data.jsonl/59119
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 720 }
[ 2830, 3393, 18284, 34674, 6590, 13516, 1925, 3479, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 18185, 26104, 1669, 8866, 7121, 2271, 26104, 8623, 1462, 340, 1444, 1999, 11, 21290, 11, 1848, 1669, 501, 2271, 9629, 3506, 8623, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestPrimitiveGetLong(t *testing.T) { client := newPrimitiveClient() result, err := client.GetLong(context.Background(), nil) if err != nil { t.Fatalf("GetLong: %v", err) } if r := cmp.Diff(result.LongWrapper, LongWrapper{ Field1: to.Int64Ptr(1099511627775), Field2: to.Int64Ptr(-999511627788), }); r != "" { t.Fatal(r) } }
explode_data.jsonl/61669
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 144 }
[ 2830, 3393, 33313, 1949, 6583, 1155, 353, 8840, 836, 8, 341, 25291, 1669, 501, 33313, 2959, 741, 9559, 11, 1848, 1669, 2943, 2234, 6583, 5378, 19047, 1507, 2092, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 1949, 6583, 25, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCrMergedCreateInRemovedDir(t *testing.T) { test(t, users("alice", "bob"), as(alice, mkfile("a/b/c/d/e", "hello"), ), as(bob, disableUpdates(), ), as(alice, write("a/b/c/d/f", "goodbye"), ), as(bob, noSync(), rm("a/b/c/d/e"), rmdir("a/b/c/d"), rmdir("a/b/c"), rmdir("a/b"), reenableUpdates(), lsdir("a/b/c/d", m{"f": "FILE"}), read("a/b/c/d/f", "goodbye"), ), as(alice, lsdir("a/b/c/d", m{"f": "FILE"}), read("a/b/c/d/f", "goodbye"), ), ) }
explode_data.jsonl/31367
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 304 }
[ 2830, 3393, 16001, 44, 51525, 4021, 641, 42642, 6184, 1155, 353, 8840, 836, 8, 341, 18185, 1155, 345, 197, 90896, 445, 63195, 497, 330, 47086, 4461, 197, 60451, 17643, 558, 345, 298, 2109, 74, 1192, 445, 64, 3470, 2899, 3446, 16546, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetMigrationsWithStatusAndProgress(t *testing.T) { defer resetTest() createMigrationAndVerify(t, "getmigrationstatustest", "default", "clusterpair1", []string{"namespace1"}, "", "") migration, err := k8s.Instance().GetMigration("getmigrationstatustest", "default") require.NoError(t, err, "Error getting migration") // Update the status of the migration migration.CreationTimestamp = metav1.Now() migration.Status.Stage = storkv1.MigrationStageFinal migration.Status.Status = storkv1.MigrationStatusSuccessful migration.Status.Volumes = []*storkv1.VolumeInfo{} migration, err = k8s.Instance().UpdateMigration(migration) expected := "NAME CLUSTERPAIR STAGE STATUS VOLUMES RESOURCES CREATED\n" + "getmigrationstatustest clusterpair1 Final Successful 0/0 0/0 " + toTimeString(migration.CreationTimestamp.Time) + "\n" cmdArgs := []string{"get", "migrations", "getmigrationstatustest"} testCommon(t, cmdArgs, nil, expected, false) }
explode_data.jsonl/18253
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 382 }
[ 2830, 3393, 1949, 44, 17824, 2354, 2522, 3036, 9496, 1155, 353, 8840, 836, 8, 341, 16867, 7585, 2271, 741, 39263, 20168, 3036, 32627, 1155, 11, 330, 455, 80227, 9878, 590, 477, 497, 330, 2258, 497, 330, 18855, 12670, 16, 497, 3056, 91...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestELEMENTIsRandom(t *testing.T) { for i := 0; i < 1000; i++ { var x, y Element x.SetRandom() y.SetRandom() if x.Equal(&y) { t.Fatal("2 random numbers are unlikely to be equal") } } }
explode_data.jsonl/50431
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 92 }
[ 2830, 3393, 91754, 3872, 13999, 1155, 353, 8840, 836, 8, 341, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 16, 15, 15, 15, 26, 600, 1027, 341, 197, 2405, 856, 11, 379, 8543, 198, 197, 10225, 4202, 13999, 741, 197, 14522, 4202, 139...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNewDocumentFromReader(t *testing.T) { cases := []struct { src string err bool sel string cnt int }{ 0: { src: ` <html> <head> <title>Test</title> <body> <h1>Hi</h1> </body> </html>`, sel: "h1", cnt: 1, }, 1: { // Actually pretty hard to make html.Parse return an error // based on content... src: `<html><body><aef<eqf>>>qq></body></ht>`, }, } buf := bytes.NewBuffer(nil) for i, c := range cases { buf.Reset() buf.WriteString(c.src) d, e := NewDocumentFromReader(buf) if (e != nil) != c.err { if c.err { t.Errorf("[%d] - expected error, got none", i) } else { t.Errorf("[%d] - expected no error, got %s", i, e) } } if c.sel != "" { s := d.Find(c.sel) if s.Length() != c.cnt { t.Errorf("[%d] - expected %d nodes, found %d", i, c.cnt, s.Length()) } } } }
explode_data.jsonl/56748
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 416 }
[ 2830, 3393, 3564, 7524, 3830, 5062, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 41144, 914, 198, 197, 9859, 1807, 198, 197, 1903, 301, 914, 198, 197, 60553, 526, 198, 197, 59403, 197, 197, 15, 25, 341, 298,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestScalarMult(t *testing.T) { u := Vec3{0, 0, 0} v := Vec3{15, -2, 1.5} w := Vec3{-10, 6, 5} assert.Equal(t, u, ScalarMult(u, 29.5)) assert.Equal(t, Vec3{30, -4, 3}, ScalarMult(v, 2)) assert.Equal(t, Vec3{-15, 9, 7.5}, ScalarMult(w, 1.5)) }
explode_data.jsonl/33712
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 138 }
[ 2830, 3393, 20639, 40404, 1155, 353, 8840, 836, 8, 341, 10676, 1669, 11312, 18, 90, 15, 11, 220, 15, 11, 220, 15, 532, 5195, 1669, 11312, 18, 90, 16, 20, 11, 481, 17, 11, 220, 16, 13, 20, 532, 6692, 1669, 11312, 18, 19999, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidate_UniqueOperationNames_MultipleOperationsOfDifferentTypes(t *testing.T) { testutil.ExpectPassesRule(t, graphql.UniqueOperationNamesRule, ` query Foo { field } mutation Bar { field } subscription Baz { field } `) }
explode_data.jsonl/23171
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 128 }
[ 2830, 3393, 17926, 62, 22811, 8432, 7980, 1245, 12229, 35120, 2124, 69123, 4173, 1155, 353, 8840, 836, 8, 341, 18185, 1314, 81893, 12187, 288, 11337, 1155, 11, 48865, 87443, 8432, 7980, 11337, 11, 22074, 414, 3239, 33428, 341, 286, 2070, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestListByIndex(t *testing.T) { ctx := context.Background() env, client := clientEnvForTest(ctx, t, stestonly.PreorderedLogTree) defer env.Close() // Add a few test leaves. leafData := [][]byte{ []byte("A"), []byte("B"), []byte("C"), } if err := addSequencedLeaves(ctx, env, client, leafData); err != nil { t.Fatalf("Failed to add leaves: %v", err) } // Fetch leaves. leaves, err := client.ListByIndex(ctx, 0, 3) if err != nil { t.Errorf("Failed to ListByIndex: %v", err) } for i, l := range leaves { if got, want := l.LeafValue, leafData[i]; !bytes.Equal(got, want) { t.Errorf("ListIndex()[%v] = %v, want %v", i, got, want) } } }
explode_data.jsonl/54600
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 281 }
[ 2830, 3393, 852, 1359, 1552, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 57538, 11, 2943, 1669, 2943, 14359, 2461, 2271, 7502, 11, 259, 11, 357, 477, 3243, 28770, 10544, 2201, 6533, 340, 16867, 6105, 10421, 2822, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGetPutTaskStatus(t *testing.T) { s := &etcdTester{} s.setUpTest(t) defer s.tearDownTest(t) ctx := context.Background() info := &model.TaskStatus{ Tables: map[model.TableID]*model.TableReplicaInfo{ 1: {StartTs: 100}, }, } feedID := "feedid" captureID := "captureid" err := s.client.PutTaskStatus(ctx, feedID, captureID, info) require.NoError(t, err) _, getInfo, err := s.client.GetTaskStatus(ctx, feedID, captureID) require.NoError(t, err) require.Equal(t, getInfo, info) err = s.client.ClearAllCDCInfo(context.Background()) require.NoError(t, err) _, _, err = s.client.GetTaskStatus(ctx, feedID, captureID) require.True(t, cerror.ErrTaskStatusNotExists.Equal(err)) }
explode_data.jsonl/70529
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 287 }
[ 2830, 3393, 1949, 19103, 6262, 2522, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 609, 295, 4385, 58699, 16094, 1903, 77700, 2271, 1155, 340, 16867, 274, 31853, 59342, 2271, 1155, 340, 20985, 1669, 2266, 19047, 741, 27043, 1669, 609, 2528, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestQueryInvalidStructure(t *testing.T) { req := URL("http://example.com/foo/bah") type s struct{} var sv s req.Query = &query.Builder{ BFilter: &[]filter.Filter{ {"a": map[s]string{sv: "x"}}, }, } if err := req.setupAction("GET"); err == nil { t.Error("Expected Get() to fail due to invalid Query structure") } }
explode_data.jsonl/24766
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 131 }
[ 2830, 3393, 2859, 7928, 22952, 1155, 353, 8840, 836, 8, 341, 24395, 1669, 5548, 445, 1254, 1110, 8687, 905, 60555, 3470, 1466, 1138, 13158, 274, 2036, 16094, 2405, 13559, 274, 271, 24395, 15685, 284, 609, 1631, 15641, 515, 197, 12791, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPingLargeMTU(t *testing.T) { skipIfNumNodesLessThan(t, 2) data, err := setupTest(t) if err != nil { t.Fatalf("Error when setting up test: %v", err) } defer teardownTest(t, data) podNames, deletePods := createPodsOnDifferentNodes(t, data, 2) defer deletePods() podName0 := podNames[0] podName1 := podNames[1] podIPs := waitForPodIPs(t, data, podNames) pingSize := 2000 cmd := fmt.Sprintf("ping -c %d -s %d %s", pingCount, pingSize, podIPs[podName1]) t.Logf("Running ping with size %d between Pods %s and %s", pingSize, podName0, podName1) stdout, stderr, err := data.runCommandFromPod(testNamespace, podName0, busyboxContainerName, strings.Fields(cmd)) if err != nil { t.Errorf("Error when running ping command: %v - stdout: %s - stderr: %s", err, stdout, stderr) } }
explode_data.jsonl/49181
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 317 }
[ 2830, 3393, 69883, 34253, 8505, 52, 1155, 353, 8840, 836, 8, 341, 1903, 13389, 2679, 4651, 12288, 27451, 26067, 1155, 11, 220, 17, 340, 8924, 11, 1848, 1669, 6505, 2271, 1155, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 145...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestIsValid(t *testing.T) { valid, _ := IsValid("0123455") if !valid { t.Errorf("should be valid") } valid, _ = IsValid("0123456") if valid { t.Errorf("should be invalid") } }
explode_data.jsonl/75584
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 81 }
[ 2830, 3393, 55470, 1155, 353, 8840, 836, 8, 341, 56322, 11, 716, 1669, 70647, 445, 15, 16, 17, 18, 19, 20, 20, 5130, 743, 753, 1891, 341, 197, 3244, 13080, 445, 5445, 387, 2697, 1138, 197, 630, 56322, 11, 716, 284, 70647, 445, 15,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestIntegrationHTTPDoGood(t *testing.T) { ctx := context.Background() results := HTTPDo(ctx, HTTPDoConfig{ Accept: "*/*", AcceptLanguage: "en", URL: "http://ooni.io", }) if results.Error != nil { t.Fatal(results.Error) } if results.StatusCode != 200 { t.Fatal("request failed?!") } if len(results.Headers) < 1 { t.Fatal("no headers?!") } if len(results.BodySnap) < 1 { t.Fatal("no body?!") } }
explode_data.jsonl/53533
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 196 }
[ 2830, 3393, 52464, 9230, 5404, 15216, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 55497, 1669, 10130, 5404, 7502, 11, 10130, 5404, 2648, 515, 197, 197, 16646, 25, 260, 15630, 1057, 756, 197, 197, 16646, 13806, 25, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestTelemetryBasic(t *testing.T) { RegisterFailHandler(Fail) var specReporters []Reporter junitReporter := reporters.NewJUnitReporter("/testresults/junit_telemetry.xml") specReporters = append(specReporters, junitReporter) RunSpecsWithDefaultAndCustomReporters(t, "Torpedo : Telemetry", specReporters) }
explode_data.jsonl/47824
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 102 }
[ 2830, 3393, 6639, 35958, 15944, 1155, 353, 8840, 836, 8, 341, 79096, 19524, 3050, 7832, 604, 692, 2405, 1398, 10361, 388, 3056, 52766, 198, 12428, 3843, 52766, 1669, 19040, 7121, 56248, 52766, 4283, 1944, 8074, 4437, 3843, 528, 18871, 151...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsGPULauncher(t *testing.T) { f := newFixture(t) startTime := metav1.Now() completionTime := metav1.Now() testCases := map[string]struct { gpu string expected bool }{ "isNvidiaGPU": { gpu: gpuResourceName, expected: true, }, "isExtendedGPU": { gpu: extendedGPUResourceName, expected: true, }, "notGPU": { gpu: "vendor-domain/resourcetype", expected: false, }, } for testName, testCase := range testCases { mpiJob := newMPIJobWithLauncher("test", int32Ptr(64), 1, testCase.gpu, &startTime, &completionTime) f.setUpMPIJob(mpiJob) if result := isGPULauncher(mpiJob); result != testCase.expected { t.Errorf("%s expected: %v, actual: %v, gpu=%v", testName, testCase.expected, result, testCase.gpu) } } }
explode_data.jsonl/29951
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 342 }
[ 2830, 3393, 3872, 24430, 1094, 18423, 261, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 18930, 1155, 692, 21375, 1462, 1669, 77520, 16, 13244, 741, 32810, 14386, 1462, 1669, 77520, 16, 13244, 2822, 18185, 37302, 1669, 2415, 14032, 60, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestStatusHandlerGetNoStatus(t *testing.T) { ms := mockStatusService() ms.StatusFn = func(ctx context.Context, statusID string) (cabby.Status, error) { return cabby.Status{}, nil } h := StatusHandler{StatusService: &ms} status, body := handlerTest(h.Get, "GET", testStatusURL, nil) if status != http.StatusNotFound { t.Error("Got:", status, "Expected:", http.StatusNotFound) } var result cabby.Error err := json.Unmarshal([]byte(body), &result) if err != nil { t.Fatal(err) } expected := tester.ErrorResourceNotFound expected.Description = "No status available for this id" passed := tester.CompareError(result, expected) if !passed { t.Error("Comparison failed") } }
explode_data.jsonl/39130
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 247 }
[ 2830, 3393, 2522, 3050, 1949, 2753, 2522, 1155, 353, 8840, 836, 8, 341, 47691, 1669, 7860, 2522, 1860, 741, 47691, 10538, 24911, 284, 2915, 7502, 2266, 9328, 11, 2639, 915, 914, 8, 320, 54793, 1694, 10538, 11, 1465, 8, 341, 197, 853, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRefMatchingSubdomainPrefix(t *testing.T) { for _, c := range []struct { ref, prefix string expected int }{ // Check for subdomain matches {"docker.io", "*.io", len("docker.io")}, {"docker.io/foo", "*.com", -1}, {"example.com/foo", "*.co", -1}, {"example.com/foo", "*.example.com", -1}, //FIXME: Port Number matching needs to be revisited. // https://github.com/containers/image/pull/1191#pullrequestreview-631869416 //{"example.com:5000", "*.com", len("example.com")}, //{"example.com:5000/foo", "*.com", len("example.com")}, //{"sub.example.com:5000/foo", "*.example.com", len("sub.example.com")}, //{"example.com:5000/foo/bar", "*.com", len("example.com")}, //{"example.com:5000/foo/bar:baz", "*.com", len("example.com")}, //{"example.com:5000/foo/bar/bbq:baz", "*.com", len("example.com")}, //{"example.com:50000/foo", "*.example.com", -1}, {"example.com/foo", "*.com", len("example.com")}, {"example.com/foo:bar", "*.com", len("example.com")}, {"example.com/foo/bar:baz", "*.com", len("example.com")}, {"yet.another.example.com/foo", "**.example.com", -1}, {"yet.another.example.com/foo", "***.another.example.com", -1}, {"yet.another.example.com/foo", "**********.another.example.com", -1}, {"yet.another.example.com/foo/bar", "**********.another.example.com", -1}, {"yet.another.example.com/foo/bar", "*.another.example.com", len("yet.another.example.com")}, {"another.example.com/namespace.com/foo/bar/bbq:baz", "*.example.com", len("another.example.com")}, {"example.net/namespace-ends-in.com/foo/bar/bbq:baz", "*.com", -1}, {"another.example.com/namespace.com/foo/bar/bbq:baz", "*.namespace.com", -1}, {"sub.example.com/foo/bar", "*.com", len("sub.example.com")}, {"sub.example.com/foo/bar", "*.example.com", len("sub.example.com")}, {"another.sub.example.com/foo/bar/bbq:baz", "*.example.com", len("another.sub.example.com")}, {"another.sub.example.com/foo/bar/bbq:baz", "*.sub.example.com", len("another.sub.example.com")}, {"yet.another.example.com/foo/bar@sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "*.example.com", len("yet.another.example.com")}, {"yet.another.sub.example.com/foo/bar@sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", "*.sub.example.com", len("yet.another.sub.example.com")}, } { refLen := refMatchingSubdomainPrefix(c.ref, c.prefix) assert.Equal(t, c.expected, refLen, fmt.Sprintf("%s vs. %s", c.ref, c.prefix)) } }
explode_data.jsonl/62225
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 993 }
[ 2830, 3393, 3945, 64430, 3136, 12204, 14335, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 272, 1669, 2088, 3056, 1235, 341, 197, 59504, 11, 9252, 914, 198, 197, 42400, 262, 526, 198, 197, 59403, 197, 197, 322, 4248, 369, 1186, 12204, 907...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAdjust(t *testing.T) { re := require.New(t) registerDefaultSchedulers() RegisterScheduler("random-merge") cfgData := ` name = "" lease = 0 max-request-bytes = 20000000 [pd-server] metric-storage = "http://127.0.0.1:9090" [schedule] max-merge-region-size = 0 enable-one-way-merge = true leader-schedule-limit = 0 ` cfg := NewConfig() meta, err := toml.Decode(cfgData, &cfg) re.NoError(err) err = cfg.Adjust(&meta, false) re.NoError(err) // When invalid, use default values. host, err := os.Hostname() re.NoError(err) re.Equal(fmt.Sprintf("%s-%s", defaultName, host), cfg.Name) re.Equal(defaultLeaderLease, cfg.LeaderLease) re.Equal(uint(20000000), cfg.MaxRequestBytes) // When defined, use values from config file. re.Equal(uint64(0), cfg.Schedule.MaxMergeRegionSize) re.True(cfg.Schedule.EnableOneWayMerge) re.Equal(uint64(0), cfg.Schedule.LeaderScheduleLimit) // When undefined, use default values. re.True(cfg.PreVote) re.Equal("info", cfg.Log.Level) re.Equal(uint64(defaultMaxMergeRegionKeys), cfg.Schedule.MaxMergeRegionKeys) re.Equal("http://127.0.0.1:9090", cfg.PDServerCfg.MetricStorage) re.Equal(DefaultTSOUpdatePhysicalInterval, cfg.TSOUpdatePhysicalInterval.Duration) // Check undefined config fields cfgData = ` type = "pd" name = "" lease = 0 [schedule] type = "random-merge" ` cfg = NewConfig() meta, err = toml.Decode(cfgData, &cfg) re.NoError(err) err = cfg.Adjust(&meta, false) re.NoError(err) re.Contains(cfg.WarningMsgs[0], "Config contains undefined item") // Check misspelled schedulers name cfgData = ` name = "" lease = 0 [[schedule.schedulers]] type = "random-merge-schedulers" ` cfg = NewConfig() meta, err = toml.Decode(cfgData, &cfg) re.NoError(err) err = cfg.Adjust(&meta, false) re.Error(err) // Check correct schedulers name cfgData = ` name = "" lease = 0 [[schedule.schedulers]] type = "random-merge" ` cfg = NewConfig() meta, err = toml.Decode(cfgData, &cfg) re.NoError(err) err = cfg.Adjust(&meta, false) re.NoError(err) cfgData = ` [metric] interval = "35s" address = "localhost:9090" ` cfg = NewConfig() meta, err = toml.Decode(cfgData, &cfg) re.NoError(err) err = cfg.Adjust(&meta, false) re.NoError(err) re.Equal(35*time.Second, cfg.Metric.PushInterval.Duration) re.Equal("localhost:9090", cfg.Metric.PushAddress) // Test clamping TSOUpdatePhysicalInterval value cfgData = ` tso-update-physical-interval = "10ms" ` cfg = NewConfig() meta, err = toml.Decode(cfgData, &cfg) re.NoError(err) err = cfg.Adjust(&meta, false) re.NoError(err) re.Equal(minTSOUpdatePhysicalInterval, cfg.TSOUpdatePhysicalInterval.Duration) cfgData = ` tso-update-physical-interval = "15s" ` cfg = NewConfig() meta, err = toml.Decode(cfgData, &cfg) re.NoError(err) err = cfg.Adjust(&meta, false) re.NoError(err) re.Equal(maxTSOUpdatePhysicalInterval, cfg.TSOUpdatePhysicalInterval.Duration) }
explode_data.jsonl/78165
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1180 }
[ 2830, 3393, 38616, 1155, 353, 8840, 836, 8, 341, 17200, 1669, 1373, 7121, 1155, 340, 29422, 3675, 74674, 741, 79096, 38878, 445, 11463, 12, 19052, 1138, 50286, 1043, 1669, 22074, 606, 284, 8389, 1623, 284, 220, 15, 198, 2810, 44808, 143...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRouterMixParamMatchAny(t *testing.T) { e := New() r := e.router // Route r.Add(http.MethodGet, "/users/:id/*", func(c Context) error { return nil }) c := e.NewContext(nil, nil).(*context) r.Find(http.MethodGet, "/users/joe/comments", c) c.handler(c) assert.Equal(t, "joe", c.Param("id")) }
explode_data.jsonl/47123
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 133 }
[ 2830, 3393, 9523, 58083, 2001, 8331, 8610, 1155, 353, 8840, 836, 8, 341, 7727, 1669, 1532, 741, 7000, 1669, 384, 22125, 271, 197, 322, 9572, 198, 7000, 1904, 19886, 20798, 1949, 11, 3521, 4218, 11315, 307, 1057, 497, 2915, 1337, 9608, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPodIPsValidation(t *testing.T) { testCases := []struct { pod core.Pod expectError bool }{ { expectError: false, pod: makePod("nil-ips", "ns", nil), }, { expectError: false, pod: makePod("empty-podips-list", "ns", []core.PodIP{}), }, { expectError: false, pod: makePod("single-ip-family-6", "ns", []core.PodIP{{IP: "::1"}}), }, { expectError: false, pod: makePod("single-ip-family-4", "ns", []core.PodIP{{IP: "1.1.1.1"}}), }, { expectError: false, pod: makePod("dual-stack-4-6", "ns", []core.PodIP{{IP: "1.1.1.1"}, {IP: "::1"}}), }, { expectError: false, pod: makePod("dual-stack-6-4", "ns", []core.PodIP{{IP: "::1"}, {IP: "1.1.1.1"}}), }, /* failure cases start here */ { expectError: true, pod: makePod("invalid-pod-ip", "ns", []core.PodIP{{IP: "this-is-not-an-ip"}}), }, { expectError: true, pod: makePod("dualstack-same-ip-family-6", "ns", []core.PodIP{{IP: "::1"}, {IP: "::2"}}), }, { expectError: true, pod: makePod("dualstack-same-ip-family-4", "ns", []core.PodIP{{IP: "1.1.1.1"}, {IP: "2.2.2.2"}}), }, { expectError: true, pod: makePod("dualstack-repeated-ip-family-6", "ns", []core.PodIP{{IP: "1.1.1.1"}, {IP: "::1"}, {IP: "::2"}}), }, { expectError: true, pod: makePod("dualstack-repeated-ip-family-4", "ns", []core.PodIP{{IP: "1.1.1.1"}, {IP: "::1"}, {IP: "2.2.2.2"}}), }, { expectError: true, pod: makePod("dualstack-duplicate-ip-family-4", "ns", []core.PodIP{{IP: "1.1.1.1"}, {IP: "1.1.1.1"}, {IP: "::1"}}), }, { expectError: true, pod: makePod("dualstack-duplicate-ip-family-6", "ns", []core.PodIP{{IP: "1.1.1.1"}, {IP: "::1"}, {IP: "::1"}}), }, } for _, testCase := range testCases { errs := ValidatePod(&testCase.pod) if len(errs) == 0 && testCase.expectError { t.Errorf("expected failure for %s, but there were none", testCase.pod.Name) return } if len(errs) != 0 && !testCase.expectError { t.Errorf("expected success for %s, but there were errors: %v", testCase.pod.Name, errs) return } } }
explode_data.jsonl/25703
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1145 }
[ 2830, 3393, 23527, 3298, 82, 13799, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 3223, 347, 260, 6200, 88823, 198, 197, 24952, 1454, 1807, 198, 197, 59403, 197, 197, 515, 298, 24952, 1454, 25, 895, 345, 298,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func Test_AddPodToVolume_Negative_VolumeDoesntExist(t *testing.T) { // Arrange volumePluginMgr, _ := volumetesting.GetTestVolumePluginMgr(t) asw := NewActualStateOfWorld("mynode" /* nodeName */, volumePluginMgr) pod := &v1.Pod{ ObjectMeta: metav1.ObjectMeta{ Name: "pod1", UID: "pod1uid", }, Spec: v1.PodSpec{ Volumes: []v1.Volume{ { Name: "volume-name", VolumeSource: v1.VolumeSource{ GCEPersistentDisk: &v1.GCEPersistentDiskVolumeSource{ PDName: "fake-device1", }, }, }, }, }, } volumeSpec := &volume.Spec{Volume: &pod.Spec.Volumes[0]} plugin, err := volumePluginMgr.FindPluginBySpec(volumeSpec) if err != nil { t.Fatalf( "volumePluginMgr.FindPluginBySpec failed to find volume plugin for %#v with: %v", volumeSpec, err) } blockplugin, err := volumePluginMgr.FindMapperPluginBySpec(volumeSpec) if err != nil { t.Fatalf( "volumePluginMgr.FindMapperPluginBySpec failed to find volume plugin for %#v with: %v", volumeSpec, err) } volumeName, err := util.GetUniqueVolumeNameFromSpec( plugin, volumeSpec) require.NoError(t, err) podName := util.GetUniquePodName(pod) mounter, err := plugin.NewMounter(volumeSpec, pod, volume.VolumeOptions{}) if err != nil { t.Fatalf("NewMounter failed. Expected: <no error> Actual: <%v>", err) } mapper, err := blockplugin.NewBlockVolumeMapper(volumeSpec, pod, volume.VolumeOptions{}) if err != nil { t.Fatalf("NewBlockVolumeMapper failed. Expected: <no error> Actual: <%v>", err) } // Act err = asw.AddPodToVolume( podName, pod.UID, volumeName, mounter, mapper, volumeSpec.Name(), "" /* volumeGidValue */, volumeSpec) // Assert if err == nil { t.Fatalf("AddPodToVolume did not fail. Expected: <\"no volume with the name ... exists in the list of attached volumes\"> Actual: <no error>") } verifyVolumeExistsAsw(t, volumeName, false /* shouldExist */, asw) verifyVolumeDoesntExistInUnmountedVolumes(t, volumeName, asw) verifyVolumeDoesntExistInGloballyMountedVolumes(t, volumeName, asw) verifyPodDoesntExistInVolumeAsw( t, podName, volumeName, false, /* expectVolumeToExist */ asw) verifyVolumeDoesntExistWithSpecNameInVolumeAsw(t, podName, volumeSpec.Name(), asw) }
explode_data.jsonl/28882
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 885 }
[ 2830, 3393, 21346, 23527, 1249, 18902, 1604, 15060, 2334, 4661, 21468, 406, 25613, 1155, 353, 8840, 836, 8, 341, 197, 322, 40580, 198, 5195, 4661, 11546, 25567, 11, 716, 1669, 62820, 57824, 287, 2234, 2271, 18902, 11546, 25567, 1155, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestIssue30101(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t1;") tk.MustExec("create table t1(c1 bigint unsigned, c2 bigint unsigned);") tk.MustExec("insert into t1 values(9223372036854775808, 9223372036854775809);") tk.MustQuery("select greatest(c1, c2) from t1;").Sort().Check(testkit.Rows("9223372036854775809")) }
explode_data.jsonl/65619
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 177 }
[ 2830, 3393, 42006, 18, 15, 16, 15, 16, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 340, 3244, 74, 50...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCustomNewTemplates(t *testing.T) { var buf bytes.Buffer err := templates.AddFile("newtemplate", customNewTemplate) assert.Nil(t, err) err = templates.AddFile("existingUsesNew", customExistingUsesNew) assert.Nil(t, err) headerTempl, err := templates.Get("bindprimitiveparam") assert.Nil(t, err) err = headerTempl.Execute(&buf, nil) assert.Nil(t, err) assert.Equal(t, "new template", buf.String()) }
explode_data.jsonl/49686
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 10268, 3564, 51195, 1155, 353, 8840, 836, 8, 341, 2405, 6607, 5820, 22622, 271, 9859, 1669, 19911, 1904, 1703, 445, 931, 4214, 497, 2526, 3564, 7275, 340, 6948, 59678, 1155, 11, 1848, 692, 9859, 284, 19911, 1904, 1703, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFileSD(t *testing.T) { defer os.Remove("fixtures/_test.yml") defer os.Remove("fixtures/_test.json") testFileSD(t, ".yml") testFileSD(t, ".json") }
explode_data.jsonl/69744
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 1703, 5491, 1155, 353, 8840, 836, 8, 341, 16867, 2643, 13270, 445, 45247, 19632, 1944, 33936, 1138, 16867, 2643, 13270, 445, 45247, 19632, 1944, 4323, 1138, 18185, 1703, 5491, 1155, 11, 5933, 88, 1014, 1138, 18185, 1703, 5491,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSortingActivePods(t *testing.T) { numPods := 5 // This rc is not needed by the test, only the newPodList to give the pods labels/a namespace. rc := newReplicationController(0) podList := newPodList(nil, numPods, api.PodRunning, rc) pods := make([]*api.Pod, len(podList.Items)) for i := range podList.Items { pods[i] = &podList.Items[i] } // pods[0] is not scheduled yet. pods[0].Spec.NodeName = "" pods[0].Status.Phase = api.PodPending // pods[1] is scheduled but pending. pods[1].Spec.NodeName = "bar" pods[1].Status.Phase = api.PodPending // pods[2] is unknown. pods[2].Spec.NodeName = "foo" pods[2].Status.Phase = api.PodUnknown // pods[3] is running but not ready. pods[3].Spec.NodeName = "foo" pods[3].Status.Phase = api.PodRunning // pods[4] is running and ready. pods[4].Spec.NodeName = "foo" pods[4].Status.Phase = api.PodRunning pods[4].Status.Conditions = []api.PodCondition{{Type: api.PodReady, Status: api.ConditionTrue}} getOrder := func(pods []*api.Pod) []string { names := make([]string, len(pods)) for i := range pods { names[i] = pods[i].Name } return names } expected := getOrder(pods) for i := 0; i < 20; i++ { idx := rand.Perm(numPods) randomizedPods := make([]*api.Pod, numPods) for j := 0; j < numPods; j++ { randomizedPods[j] = pods[idx[j]] } sort.Sort(ActivePods(randomizedPods)) actual := getOrder(randomizedPods) if !reflect.DeepEqual(actual, expected) { t.Errorf("expected %v, got %v", expected, actual) } } }
explode_data.jsonl/36059
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 615 }
[ 2830, 3393, 71681, 5728, 23527, 82, 1155, 353, 8840, 836, 8, 341, 22431, 23527, 82, 1669, 220, 20, 198, 197, 322, 1096, 10192, 374, 537, 4362, 553, 279, 1273, 11, 1172, 279, 501, 23527, 852, 311, 2968, 279, 54587, 9201, 14186, 4473, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUserWillLogIn_Blocked(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() err := th.App.UpdatePassword(th.BasicUser, "hunter2") if err != nil { t.Errorf("Error updating user password: %s", err) } tearDown, _, _ := SetAppEnvironmentWithPlugins(t, []string{ ` package main import ( "github.com/blastbao/mattermost-server/plugin" "github.com/blastbao/mattermost-server/model" ) type MyPlugin struct { plugin.MattermostPlugin } func (p *MyPlugin) UserWillLogIn(c *plugin.Context, user *model.User) string { return "Blocked By Plugin" } func main() { plugin.ClientMain(&MyPlugin{}) } `}, th.App, th.App.NewPluginAPI) defer tearDown() r := &http.Request{} w := httptest.NewRecorder() _, err = th.App.DoLogin(w, r, th.BasicUser, "") if !strings.HasPrefix(err.Id, "Login rejected by plugin") { t.Errorf("Expected Login rejected by plugin, got %s", err.Id) } }
explode_data.jsonl/30308
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 378 }
[ 2830, 3393, 1474, 9945, 2201, 641, 1668, 24633, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 2822, 9859, 1669, 270, 5105, 16689, 4876, 24365, 48868, 1474, 11, 330, 98272, 17,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSignalFxExporterConsumeMetadata(t *testing.T) { f := NewFactory() cfg := f.CreateDefaultConfig() rCfg := cfg.(*Config) rCfg.AccessToken = "token" rCfg.Realm = "realm" exp, err := f.CreateMetricsExporter(context.Background(), component.ExporterCreateParams{Logger: zap.NewNop()}, rCfg) require.NoError(t, err) kme, ok := exp.(metadata.MetadataExporter) require.True(t, ok, "SignalFx exporter does not implement metadata.MetadataExporter") require.NotNil(t, kme) }
explode_data.jsonl/61021
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 184 }
[ 2830, 3393, 26810, 81856, 88025, 1109, 31323, 14610, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 1532, 4153, 741, 50286, 1669, 282, 7251, 3675, 2648, 741, 7000, 42467, 1669, 13286, 41399, 2648, 340, 7000, 42467, 35645, 3323, 284, 330, 5839,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeleteGroupSnapshots(t *testing.T) { defer resetTest() name := "test-group-snap-delete" namespace := "default" selectors := map[string]string{"app": "mysql"} createGroupSnapshotAndVerify(t, name, namespace, selectors, "", "", nil, nil, 0) cmdArgs := []string{"delete", "groupsnapshots", name} expected := fmt.Sprintf("GroupVolumeSnapshot %s deleted successfully\n", name) testCommon(t, cmdArgs, nil, expected, false) // delete again. should fail cmdArgs = []string{"delete", "groupsnapshots", name} expected = fmt.Sprintf("Error from server (NotFound): groupvolumesnapshots.stork.libopenstorage.org \"%s\" not found", name) testCommon(t, cmdArgs, nil, expected, true) // delete multiple name1 := "test-group-snap-delete-1" name2 := "test-group-snap-delete-2" createGroupSnapshotAndVerify(t, name1, namespace, selectors, "", "", nil, nil, 0) createGroupSnapshotAndVerify(t, name2, namespace, selectors, "", "", nil, nil, 0) cmdArgs = []string{"delete", "groupsnapshots", name1, name2} expected = fmt.Sprintf("GroupVolumeSnapshot %s deleted successfully\n", name1) expected += fmt.Sprintf("GroupVolumeSnapshot %s deleted successfully\n", name2) testCommon(t, cmdArgs, nil, expected, false) }
explode_data.jsonl/20432
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 409 }
[ 2830, 3393, 6435, 2808, 61871, 27634, 1155, 353, 8840, 836, 8, 341, 16867, 7585, 2271, 2822, 11609, 1669, 330, 1944, 4351, 1331, 6861, 40904, 698, 56623, 1669, 330, 2258, 698, 38010, 1087, 1669, 2415, 14032, 30953, 4913, 676, 788, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBTreeCloneConcurrentOperations(t *testing.T) { const cloneTestSize = 1000 p := perm(cloneTestSize) var trees []*btree treeC, treeDone := make(chan *btree), make(chan struct{}) go func() { for b := range treeC { trees = append(trees, b) } close(treeDone) }() var wg sync.WaitGroup var populate func(tr *btree, start int) populate = func(tr *btree, start int) { t.Logf("Starting new clone at %v", start) treeC <- tr for i := start; i < cloneTestSize; i++ { tr.Set(p[i]) if i%(cloneTestSize/5) == 0 { wg.Add(1) c := tr.Clone() go populate(&c, i+1) } } wg.Done() } wg.Add(1) var tr btree go populate(&tr, 0) wg.Wait() close(treeC) <-treeDone t.Logf("Starting equality checks on %d trees", len(trees)) want := rang(0, cloneTestSize-1) for i, tree := range trees { if !reflect.DeepEqual(want, all(tree)) { t.Errorf("tree %v mismatch", i) } } t.Log("Removing half of items from first half") toRemove := want[cloneTestSize/2:] for i := 0; i < len(trees)/2; i++ { tree := trees[i] wg.Add(1) go func() { for _, item := range toRemove { tree.Delete(item) } wg.Done() }() } wg.Wait() t.Log("Checking all values again") for i, tree := range trees { var wantpart []*latch if i < len(trees)/2 { wantpart = want[:cloneTestSize/2] } else { wantpart = want } if got := all(tree); !reflect.DeepEqual(wantpart, got) { t.Errorf("tree %v mismatch, want %v got %v", i, len(want), len(got)) } } }
explode_data.jsonl/24891
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 675 }
[ 2830, 3393, 33, 6533, 37677, 1109, 3231, 35120, 1155, 353, 8840, 836, 8, 341, 4777, 14715, 2271, 1695, 284, 220, 16, 15, 15, 15, 198, 3223, 1669, 13854, 95906, 2271, 1695, 692, 2405, 12408, 29838, 65, 9344, 198, 51968, 34, 11, 4916, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestParserStart(t *testing.T) { const want = "unpacking header" var p Parser for i := 0; i <= 1; i++ { _, err := p.Start([]byte{}) if !checkErrorPrefix(err, want) { t.Errorf("got p.Start(nil) = _, %v, want = _, %s", err, want) } } }
explode_data.jsonl/60552
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 6570, 3479, 1155, 353, 8840, 836, 8, 341, 4777, 1366, 284, 330, 80774, 287, 4247, 698, 2405, 281, 21102, 198, 2023, 600, 1669, 220, 15, 26, 600, 2651, 220, 16, 26, 600, 1027, 341, 197, 197, 6878, 1848, 1669, 281, 12101, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestValidationChainWithFailingValidationAlwaysFails(t *testing.T) { v := NewChain() v.AddDetector(PassingDetection{}) v.AddDetector(FailingDetection{}) results := helpers.NewDetectionResults(talismanrc.HookMode) v.Test(nil, &talismanrc.TalismanRC{}, results) assert.False(t, results.Successful(), "Expected validation chain with a failure to fail.") }
explode_data.jsonl/49104
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 121 }
[ 2830, 3393, 13799, 18837, 2354, 37, 14277, 13799, 37095, 37, 6209, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 1532, 18837, 741, 5195, 1904, 31606, 5304, 72832, 54817, 37790, 5195, 1904, 31606, 7832, 14277, 54817, 37790, 55497, 1669, 30187, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSyncAsync(t *testing.T) { Given(t). Path(guestbookPath). Async(true). When(). CreateApp(). Sync(). Then(). Expect(Success("")). Expect(OperationPhaseIs(OperationSucceeded)). Expect(SyncStatusIs(SyncStatusCodeSynced)) }
explode_data.jsonl/35644
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 12154, 6525, 1155, 353, 8840, 836, 8, 341, 9600, 2071, 1155, 4292, 197, 69640, 3268, 3045, 2190, 1820, 4292, 197, 197, 6525, 3715, 4292, 197, 197, 4498, 25829, 197, 75569, 2164, 25829, 197, 7568, 1721, 25829, 197, 197, 12209...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTextMatch_String(t *testing.T) { v := TextMatch{ ObjectURL: String(""), ObjectType: String(""), Property: String(""), Fragment: String(""), } want := `github.TextMatch{ObjectURL:"", ObjectType:"", Property:"", Fragment:""}` if got := v.String(); got != want { t.Errorf("TextMatch.String = %v, want %v", got, want) } }
explode_data.jsonl/33294
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 132 }
[ 2830, 3393, 1178, 8331, 31777, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 2918, 8331, 515, 197, 23816, 3144, 25, 220, 923, 445, 4461, 197, 23816, 929, 25, 923, 445, 4461, 197, 197, 3052, 25, 256, 923, 445, 4461, 197, 197, 9488, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCheck_Run(t *testing.T) { type TestCase struct { name string check Check Expected State } testCases := []TestCase{ {name: "Manual check should WARN", check: Check{Type: MANUAL}, Expected: WARN}, {name: "Skip check should INFO", check: Check{Type: "skip"}, Expected: INFO}, {name: "Unscored check (with no type) should WARN on failure", check: Check{Scored: false}, Expected: WARN}, { name: "Unscored check that pass should PASS", check: Check{ Scored: false, Audit: "echo hello", Tests: &tests{TestItems: []*testItem{{ Flag: "hello", Set: true, }}}, }, Expected: PASS, }, {name: "Check with no tests should WARN", check: Check{Scored: true}, Expected: WARN}, {name: "Scored check with empty tests should FAIL", check: Check{Scored: true, Tests: &tests{}}, Expected: FAIL}, { name: "Scored check that doesn't pass should FAIL", check: Check{ Scored: true, Audit: "echo hello", Tests: &tests{TestItems: []*testItem{{ Flag: "hello", Set: false, }}}, }, Expected: FAIL, }, { name: "Scored checks that pass should PASS", check: Check{ Scored: true, Audit: "echo hello", Tests: &tests{TestItems: []*testItem{{ Flag: "hello", Set: true, }}}, }, Expected: PASS, }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { testCase.check.run() if testCase.check.State != testCase.Expected { t.Errorf("expected %s, actual %s", testCase.Expected, testCase.check.State) } }) } }
explode_data.jsonl/69360
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 675 }
[ 2830, 3393, 3973, 84158, 1155, 353, 8840, 836, 8, 341, 13158, 30573, 2036, 341, 197, 11609, 257, 914, 198, 197, 25157, 262, 4248, 198, 197, 197, 18896, 3234, 198, 197, 630, 18185, 37302, 1669, 3056, 16458, 515, 197, 197, 47006, 25, 33...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRegisterCommandAndSendNotification(t *testing.T) { // Create the Facade, register the FacadeTestCommand to // handle 'FacadeTest' notifications var facade = facade.GetInstance(func() interfaces.IFacade { return &facade.Facade{} }) facade.RegisterCommand("FacadeTestNote", func() interfaces.ICommand { return &FacadeTestCommand{} }) // Send notification. The Command associated with the event // (FacadeTestCommand) will be invoked, and will multiply // the vo.input value by 2 and set the result on vo.result var vo = FacadeTestVO{Input: 32} facade.SendNotification("FacadeTestNote", &vo, "") // test assertions if vo.Result != 64 { t.Error("Expecting vo.result == 64") } }
explode_data.jsonl/21991
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 212 }
[ 2830, 3393, 8690, 4062, 3036, 11505, 11196, 1155, 353, 8840, 836, 8, 341, 197, 322, 4230, 279, 16945, 1021, 11, 4161, 279, 16945, 1021, 2271, 4062, 311, 198, 197, 322, 3705, 364, 55331, 2271, 6, 21969, 198, 2405, 61616, 284, 61616, 53...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestControllerUpdateStatusWithFailure(t *testing.T) { rs := newReplicaSet(1, map[string]string{"foo": "bar"}) fakeClient := &fake.Clientset{} fakeClient.AddReactor("get", "replicasets", func(action core.Action) (bool, runtime.Object, error) { return true, rs, nil }) fakeClient.AddReactor("*", "*", func(action core.Action) (bool, runtime.Object, error) { return true, &extensions.ReplicaSet{}, fmt.Errorf("Fake error") }) fakeRSClient := fakeClient.Extensions().ReplicaSets("default") numReplicas := 10 updateReplicaCount(fakeRSClient, *rs, numReplicas, 0) updates, gets := 0, 0 for _, a := range fakeClient.Actions() { if a.GetResource().Resource != "replicasets" { t.Errorf("Unexpected action %+v", a) continue } switch action := a.(type) { case core.GetAction: gets++ // Make sure the get is for the right ReplicaSet even though the update failed. if action.GetName() != rs.Name { t.Errorf("Expected get for ReplicaSet %v, got %+v instead", rs.Name, action.GetName()) } case core.UpdateAction: updates++ // Confirm that the update has the right status.Replicas even though the Get // returned a ReplicaSet with replicas=1. if c, ok := action.GetObject().(*extensions.ReplicaSet); !ok { t.Errorf("Expected a ReplicaSet as the argument to update, got %T", c) } else if int(c.Status.Replicas) != numReplicas { t.Errorf("Expected update for ReplicaSet to contain replicas %v, got %v instead", numReplicas, c.Status.Replicas) } default: t.Errorf("Unexpected action %+v", a) break } } if gets != 1 || updates != 2 { t.Errorf("Expected 1 get and 2 updates, got %d gets %d updates", gets, updates) } }
explode_data.jsonl/10051
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 623 }
[ 2830, 3393, 2051, 4289, 2522, 2354, 17507, 1155, 353, 8840, 836, 8, 341, 41231, 1669, 501, 18327, 15317, 1649, 7, 16, 11, 2415, 14032, 30953, 4913, 7975, 788, 330, 2257, 23625, 1166, 726, 2959, 1669, 609, 30570, 11716, 746, 16094, 1166,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSerializeJobs(t *testing.T) { assert := assert.New(t) testSerializeInput.once.Do(initTestSerializeInput) f := &Fissile{ Manifest: &model.RoleManifest{ LoadedReleases: testSerializeInput.releases, }} result, err := f.SerializeJobs() if !assert.NoError(err) { return } actual, err := json.Marshal(result) if !assert.NoError(err) { return } expected := `{ "job-one": { "name": "first job", "fingerprint": "job-one", "packages": ["abc"], "release": "first release", "description": "a first job", "path": "", "properties": [], "sha1": "", "templates": [{ "sourcePath": "/dev/urandom", "destinationPath": "/dev/null", "job": "job-one", "content": "hello" }], "version": "" }, "job-two": { "name": "second job", "fingerprint": "job-two", "packages": ["def"], "release": "second release", "description": "a second job", "path": "", "properties": [], "sha1": "", "templates": [], "version": "" } }` assert.JSONEq(expected, string(actual)) _, err = (&Fissile{}).SerializeJobs() assert.EqualError(err, "Releases not loaded") }
explode_data.jsonl/47763
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 500 }
[ 2830, 3393, 15680, 40667, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 18185, 15680, 2505, 66899, 33596, 38250, 2271, 15680, 2505, 340, 1166, 1669, 609, 37, 1038, 457, 515, 197, 197, 38495, 25, 609, 2528, 35955, 3849...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRenderNodeHookEmpty(t *testing.T) { t.Parallel() tests := []string{ "[foo](gopher://foo.bar)", "", "[foo](mailto://bar/)\n", "", } htmlParams := html.RendererOptions{ RenderNodeHook: renderHookEmpty, } params := TestParams{ RendererOptions: htmlParams, } doTestsParam(t, tests, params) }
explode_data.jsonl/72527
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 136 }
[ 2830, 3393, 6750, 1955, 31679, 3522, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 78216, 1669, 3056, 917, 515, 197, 197, 36864, 7975, 9533, 70, 16940, 1110, 7975, 22001, 15752, 197, 197, 1, 25897, 197, 197, 36864, 7975, 9533, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAPIEndpointValidation(t *testing.T) { for _, networkConfig := range goodAPIEndpointConfigs { configBody := apiEndpointMinimalConfigYaml + availabilityZoneConfig + networkConfig if _, err := ClusterFromBytes([]byte(configBody)); err != nil { t.Errorf("Correct config tested invalid: %s\n%s", err, networkConfig) } } for _, networkConfig := range incorrectAPIEndpointConfigs { configBody := apiEndpointMinimalConfigYaml + availabilityZoneConfig + networkConfig if _, err := ClusterFromBytes([]byte(configBody)); err == nil { t.Errorf("Incorrect config tested valid, expected error:\n%s", networkConfig) } } }
explode_data.jsonl/4363
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 203 }
[ 2830, 3393, 7082, 27380, 13799, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 3922, 2648, 1669, 2088, 1661, 7082, 27380, 84905, 341, 197, 25873, 5444, 1669, 6330, 27380, 88328, 2648, 56, 9467, 488, 18048, 15363, 2648, 488, 3922, 2648, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestParseFullMethod(t *testing.T) { tests := []struct { fullMethod string name string attr []attribute.KeyValue }{ { fullMethod: "/grpc.test.EchoService/Echo", name: "grpc.test.EchoService/Echo", attr: []attribute.KeyValue{ semconv.RPCServiceKey.String("grpc.test.EchoService"), semconv.RPCMethodKey.String("Echo"), }, }, { fullMethod: "/com.example.ExampleRmiService/exampleMethod", name: "com.example.ExampleRmiService/exampleMethod", attr: []attribute.KeyValue{ semconv.RPCServiceKey.String("com.example.ExampleRmiService"), semconv.RPCMethodKey.String("exampleMethod"), }, }, { fullMethod: "/MyCalcService.Calculator/Add", name: "MyCalcService.Calculator/Add", attr: []attribute.KeyValue{ semconv.RPCServiceKey.String("MyCalcService.Calculator"), semconv.RPCMethodKey.String("Add"), }, }, { fullMethod: "/MyServiceReference.ICalculator/Add", name: "MyServiceReference.ICalculator/Add", attr: []attribute.KeyValue{ semconv.RPCServiceKey.String("MyServiceReference.ICalculator"), semconv.RPCMethodKey.String("Add"), }, }, { fullMethod: "/MyServiceWithNoPackage/theMethod", name: "MyServiceWithNoPackage/theMethod", attr: []attribute.KeyValue{ semconv.RPCServiceKey.String("MyServiceWithNoPackage"), semconv.RPCMethodKey.String("theMethod"), }, }, { fullMethod: "/pkg.srv", name: "pkg.srv", attr: []attribute.KeyValue(nil), }, { fullMethod: "/pkg.srv/", name: "pkg.srv/", attr: []attribute.KeyValue{ semconv.RPCServiceKey.String("pkg.srv"), }, }, } for _, test := range tests { n, a := parseFullMethod(test.fullMethod) assert.Equal(t, test.name, n) assert.Equal(t, test.attr, a) } }
explode_data.jsonl/79678
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 783 }
[ 2830, 3393, 14463, 9432, 3523, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 94042, 3523, 914, 198, 197, 11609, 981, 914, 198, 197, 60943, 981, 3056, 9116, 9610, 1130, 198, 197, 59403, 197, 197, 515, 298, 94042, 352...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCrossShardSubqueryStream(t *testing.T) { executor, sbc1, sbc2, _ := createLegacyExecutorEnv() result1 := []*sqltypes.Result{{ Fields: []*querypb.Field{ {Name: "id", Type: sqltypes.Int32}, {Name: "col", Type: sqltypes.Int32}, }, RowsAffected: 1, InsertID: 0, Rows: [][]sqltypes.Value{{ sqltypes.NewInt32(1), sqltypes.NewInt32(3), }}, }} sbc1.SetResults(result1) result, err := executorStream(executor, "select id1 from (select u1.id id1, u2.id from user u1 join user u2 on u2.id = u1.col where u1.id = 1) as t") require.NoError(t, err) wantQueries := []*querypb.BoundQuery{{ Sql: "select u1.id as id1, u1.col from user as u1 where u1.id = 1", BindVariables: map[string]*querypb.BindVariable{}, }} if !reflect.DeepEqual(sbc1.Queries, wantQueries) { t.Errorf("sbc1.Queries:\n%+v, want\n%+v\n", sbc1.Queries, wantQueries) } // We have to use string representation because bindvars type is too complex. got := fmt.Sprintf("%+v", sbc2.Queries) want := `[sql:"select u2.id from user as u2 where u2.id = :u1_col" bind_variables:<key:"u1_col" value:<type:INT32 value:"3" > > ]` if got != want { t.Errorf("sbc2.Queries:\n%s, want\n%s\n", got, want) } wantResult := &sqltypes.Result{ Fields: []*querypb.Field{ {Name: "id", Type: sqltypes.Int32}, }, Rows: [][]sqltypes.Value{{ sqltypes.NewInt32(1), }}, } if !result.Equal(wantResult) { t.Errorf("result: %+v, want %+v", result, wantResult) } }
explode_data.jsonl/67432
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 662 }
[ 2830, 3393, 28501, 2016, 567, 3136, 1631, 3027, 1155, 353, 8840, 836, 8, 341, 67328, 4831, 11, 7898, 66, 16, 11, 7898, 66, 17, 11, 716, 1669, 1855, 77415, 25255, 14359, 741, 9559, 16, 1669, 29838, 3544, 9242, 18456, 90, 515, 197, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestSinceSeconds(t *testing.T) { tests := []struct { description string duration time.Duration expected int64 }{ {"0s", 0, 1}, {"1ms", 1 * time.Millisecond, 1}, {"500ms", 500 * time.Millisecond, 1}, {"999ms", 999 * time.Millisecond, 1}, {"1s", 1 * time.Second, 1}, {"1.1s", 1100 * time.Millisecond, 2}, {"1.5s", 1500 * time.Millisecond, 2}, {"1.9s", 1500 * time.Millisecond, 2}, {"2s", 2 * time.Second, 2}, {"10s", 10 * time.Second, 10}, {"60s", 60 * time.Second, 60}, } for _, test := range tests { testutil.Run(t, test.description, func(t *testutil.T) { since := sinceSeconds(test.duration) t.CheckDeepEqual(test.expected, since) }) } }
explode_data.jsonl/33814
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 303 }
[ 2830, 3393, 12549, 15343, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 42407, 914, 198, 197, 89300, 262, 882, 33795, 198, 197, 42400, 262, 526, 21, 19, 198, 197, 59403, 197, 197, 4913, 15, 82, 497, 220, 15, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFilteredInt(t *testing.T) { if testing.Short() { t.Skip() } engine.se.Reload(context.Background()) execStatements(t, []string{ "create table t1(id1 int, id2 int, val varbinary(128), primary key(id1))", }) defer execStatements(t, []string{ "drop table t1", }) engine.se.Reload(context.Background()) filter := &binlogdatapb.Filter{ Rules: []*binlogdatapb.Rule{{ Match: "t1", Filter: "select id1, val from t1 where id2 = 200", }}, } testcases := []testcase{{ input: []string{ "begin", "insert into t1 values (1, 100, 'aaa')", "insert into t1 values (2, 200, 'bbb')", "insert into t1 values (3, 100, 'ccc')", "insert into t1 values (4, 200, 'ddd')", "insert into t1 values (5, 200, 'eee')", "update t1 set val = 'newddd' where id1 = 4", "update t1 set id2 = 200 where id1 = 1", "update t1 set id2 = 100 where id1 = 2", "update t1 set id2 = 100 where id1 = 1", "update t1 set id2 = 200 where id1 = 2", "commit", }, output: [][]string{{ `begin`, `type:FIELD field_event:{table_name:"t1" fields:{name:"id1" type:INT32 table:"t1" org_table:"t1" database:"vttest" org_name:"id1" column_length:11 charset:63 column_type:"int(11)"} fields:{name:"val" type:VARBINARY table:"t1" org_table:"t1" database:"vttest" org_name:"val" column_length:128 charset:63 column_type:"varbinary(128)"}}`, `type:ROW row_event:{table_name:"t1" row_changes:{after:{lengths:1 lengths:3 values:"2bbb"}}}`, `type:ROW row_event:{table_name:"t1" row_changes:{after:{lengths:1 lengths:3 values:"4ddd"}}}`, `type:ROW row_event:{table_name:"t1" row_changes:{after:{lengths:1 lengths:3 values:"5eee"}}}`, `type:ROW row_event:{table_name:"t1" row_changes:{before:{lengths:1 lengths:3 values:"4ddd"} after:{lengths:1 lengths:6 values:"4newddd"}}}`, `type:ROW row_event:{table_name:"t1" row_changes:{after:{lengths:1 lengths:3 values:"1aaa"}}}`, `type:ROW row_event:{table_name:"t1" row_changes:{before:{lengths:1 lengths:3 values:"2bbb"}}}`, `type:ROW row_event:{table_name:"t1" row_changes:{before:{lengths:1 lengths:3 values:"1aaa"}}}`, `type:ROW row_event:{table_name:"t1" row_changes:{after:{lengths:1 lengths:3 values:"2bbb"}}}`, `gtid`, `commit`, }}, }} runCases(t, filter, testcases, "", nil) }
explode_data.jsonl/10406
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 956 }
[ 2830, 3393, 67310, 1072, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 741, 197, 532, 80118, 4523, 38939, 2731, 5378, 19047, 12367, 67328, 93122, 1155, 11, 3056, 917, 515, 197, 197, 1, 3182, 1965, 259, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNetworkPeersWithEntityMatchers(t *testing.T) { endpointConfig, err := ConfigFromBackend(getMatcherConfig()) if err != nil { t.Fatal("Failed to get endpoint config from backend") } testNetworkPeers(t, endpointConfig) }
explode_data.jsonl/34102
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 77 }
[ 2830, 3393, 12320, 10197, 388, 2354, 3030, 37862, 1155, 353, 8840, 836, 8, 1476, 6246, 2768, 2648, 11, 1848, 1669, 5532, 3830, 29699, 5433, 37554, 2648, 2398, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 445, 9408, 311, 633, 14887, 2193...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestPkgIsCandidate(t *testing.T) { tests := [...]struct { filename string pkgIdent string pkg *pkg want bool }{ // normal match 0: { filename: "/gopath/src/my/pkg/pkg.go", pkgIdent: "client", pkg: &pkg{ dir: "/gopath/src/client", importPath: "client", importPathShort: "client", }, want: true, }, // not a match 1: { filename: "/gopath/src/my/pkg/pkg.go", pkgIdent: "zzz", pkg: &pkg{ dir: "/gopath/src/client", importPath: "client", importPathShort: "client", }, want: false, }, // would be a match, but "client" appears too deep. 2: { filename: "/gopath/src/my/pkg/pkg.go", pkgIdent: "client", pkg: &pkg{ dir: "/gopath/src/client/foo/foo/foo", importPath: "client/foo/foo", importPathShort: "client/foo/foo", }, want: false, }, // not an exact match, but substring is good enough. 3: { filename: "/gopath/src/my/pkg/pkg.go", pkgIdent: "client", pkg: &pkg{ dir: "/gopath/src/foo/go-client", importPath: "foo/go-client", importPathShort: "foo/go-client", }, want: true, }, // "internal" package, and not visible 4: { filename: "/gopath/src/my/pkg/pkg.go", pkgIdent: "client", pkg: &pkg{ dir: "/gopath/src/foo/internal/client", importPath: "foo/internal/client", importPathShort: "foo/internal/client", }, want: false, }, // "internal" package but visible 5: { filename: "/gopath/src/foo/bar.go", pkgIdent: "client", pkg: &pkg{ dir: "/gopath/src/foo/internal/client", importPath: "foo/internal/client", importPathShort: "foo/internal/client", }, want: true, }, // "vendor" package not visible 6: { filename: "/gopath/src/foo/bar.go", pkgIdent: "client", pkg: &pkg{ dir: "/gopath/src/other/vendor/client", importPath: "other/vendor/client", importPathShort: "client", }, want: false, }, // "vendor" package, visible 7: { filename: "/gopath/src/foo/bar.go", pkgIdent: "client", pkg: &pkg{ dir: "/gopath/src/foo/vendor/client", importPath: "other/foo/client", importPathShort: "client", }, want: true, }, // Ignore hyphens. 8: { filename: "/gopath/src/foo/bar.go", pkgIdent: "socketio", pkg: &pkg{ dir: "/gopath/src/foo/socket-io", importPath: "foo/socket-io", importPathShort: "foo/socket-io", }, want: true, }, // Ignore case. 9: { filename: "/gopath/src/foo/bar.go", pkgIdent: "fooprod", pkg: &pkg{ dir: "/gopath/src/foo/FooPROD", importPath: "foo/FooPROD", importPathShort: "foo/FooPROD", }, want: true, }, // Ignoring both hyphens and case together. 10: { filename: "/gopath/src/foo/bar.go", pkgIdent: "fooprod", pkg: &pkg{ dir: "/gopath/src/foo/Foo-PROD", importPath: "foo/Foo-PROD", importPathShort: "foo/Foo-PROD", }, want: true, }, } for i, tt := range tests { got := pkgIsCandidate(tt.filename, tt.pkgIdent, tt.pkg) if got != tt.want { t.Errorf("test %d. pkgIsCandidate(%q, %q, %+v) = %v; want %v", i, tt.filename, tt.pkgIdent, *tt.pkg, got, tt.want) } } }
explode_data.jsonl/12440
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1696 }
[ 2830, 3393, 47, 7351, 3872, 63901, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 48179, 1235, 341, 197, 66434, 914, 198, 197, 3223, 7351, 28301, 914, 198, 197, 3223, 7351, 414, 353, 30069, 198, 197, 50780, 257, 1807, 198, 197, 59403, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRouter_MatchingOptions_MatchesByHostReturnsErrorWhenMalformedHost(t *testing.T) { mainRouter := Router{} err := mainRouter.Get("/users", testHandlerFunc, MatchingOptions{"", "app.{subdomain:[a-z]+}{m}.test2.com", []string{}, map[string]string{}, map[string]string{}, nil}) assertNotNil(t, err) }
explode_data.jsonl/31737
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 111 }
[ 2830, 3393, 9523, 1245, 31924, 3798, 1245, 9118, 1359, 9296, 16446, 1454, 4498, 29600, 10155, 9296, 1155, 353, 8840, 836, 8, 341, 36641, 9523, 1669, 10554, 31483, 9859, 1669, 1887, 9523, 2234, 4283, 4218, 497, 1273, 3050, 9626, 11, 70691,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestShipper_SyncBlocksWithMigrating_e2e(t *testing.T) { e2eutil.ForeachPrometheus(t, func(t testing.TB, p *e2eutil.Prometheus) { dir, err := ioutil.TempDir("", "shipper-e2e-test") testutil.Ok(t, err) defer func() { testutil.Ok(t, os.RemoveAll(dir)) }() bkt := objstore.NewInMemBucket() ctx, cancel := context.WithCancel(context.Background()) defer cancel() extLset := labels.FromStrings("prometheus", "prom-1") testutil.Ok(t, p.Start()) upctx, upcancel := context.WithTimeout(ctx, 10*time.Second) defer upcancel() testutil.Ok(t, p.WaitPrometheusUp(upctx)) p.DisableCompaction() testutil.Ok(t, p.Restart()) upctx2, upcancel2 := context.WithTimeout(ctx, 10*time.Second) defer upcancel2() testutil.Ok(t, p.WaitPrometheusUp(upctx2)) shipper := New(log.NewLogfmtLogger(os.Stderr), nil, dir, bkt, func() labels.Labels { return extLset }, metadata.TestSource, true, false) // Create 10 new blocks. 9 of them (non compacted) should be actually uploaded. var ( expBlocks = map[ulid.ULID]struct{}{} expFiles = map[string][]byte{} randr = rand.New(rand.NewSource(0)) now = time.Now() ids = []ulid.ULID{} ) for i := 0; i < 10; i++ { id := ulid.MustNew(uint64(i), randr) bdir := filepath.Join(dir, id.String()) tmp := bdir + ".tmp" testutil.Ok(t, os.Mkdir(tmp, 0777)) meta := metadata.Meta{ BlockMeta: tsdb.BlockMeta{ Version: 1, ULID: id, Stats: tsdb.BlockStats{ NumSamples: 1, }, MinTime: timestamp.FromTime(now.Add(time.Duration(i) * time.Hour)), MaxTime: timestamp.FromTime(now.Add((time.Duration(i) * time.Hour) + 1)), Compaction: tsdb.BlockMetaCompaction{ Level: 1, }, }, Thanos: metadata.Thanos{ Source: metadata.TestSource, }, } // Fifth block is compacted one. if i == 4 { meta.Compaction.Level = 2 } metab, err := json.Marshal(&meta) testutil.Ok(t, err) testutil.Ok(t, ioutil.WriteFile(tmp+"/meta.json", metab, 0666)) testutil.Ok(t, ioutil.WriteFile(tmp+"/index", []byte("indexcontents"), 0666)) // Running shipper while a block is being written to temp dir should not trigger uploads. b, err := shipper.Sync(ctx) testutil.Ok(t, err) testutil.Equals(t, 0, b) shipMeta, err := ReadMetaFile(dir) testutil.Ok(t, err) if len(shipMeta.Uploaded) == 0 { shipMeta.Uploaded = []ulid.ULID{} } testutil.Equals(t, &Meta{Version: MetaVersion1, Uploaded: ids}, shipMeta) testutil.Ok(t, os.MkdirAll(tmp+"/chunks", 0777)) testutil.Ok(t, ioutil.WriteFile(tmp+"/chunks/0001", []byte("chunkcontents1"), 0666)) testutil.Ok(t, ioutil.WriteFile(tmp+"/chunks/0002", []byte("chunkcontents2"), 0666)) testutil.Ok(t, os.Rename(tmp, bdir)) // After rename sync should upload the block. b, err = shipper.Sync(ctx) testutil.Ok(t, err) testutil.Equals(t, 1, b) ids = append(ids, id) // The external labels must be attached to the meta file on upload. meta.Thanos.Labels = extLset.Map() meta.Thanos.SegmentFiles = []string{"0001", "0002"} meta.Thanos.Files = []metadata.File{ {RelPath: "chunks/0001", SizeBytes: 14}, {RelPath: "chunks/0002", SizeBytes: 14}, {RelPath: "index", SizeBytes: 13}, {RelPath: "meta.json"}, } buf := bytes.Buffer{} testutil.Ok(t, meta.Write(&buf)) // We will delete the fifth block and do not expect it to be re-uploaded later. if i != 4 { expBlocks[id] = struct{}{} expFiles[id.String()+"/meta.json"] = buf.Bytes() expFiles[id.String()+"/index"] = []byte("indexcontents") expFiles[id.String()+"/chunks/0001"] = []byte("chunkcontents1") expFiles[id.String()+"/chunks/0002"] = []byte("chunkcontents2") } if i == 4 { testutil.Ok(t, block.Delete(ctx, log.NewNopLogger(), bkt, ids[4])) } // The shipper meta file should show all blocks as uploaded except the compacted one. shipMeta, err = ReadMetaFile(dir) testutil.Ok(t, err) testutil.Equals(t, &Meta{Version: MetaVersion1, Uploaded: ids}, shipMeta) // Verify timestamps were updated correctly. minTotal, maxSync, err := shipper.Timestamps() testutil.Ok(t, err) testutil.Equals(t, timestamp.FromTime(now), minTotal) testutil.Equals(t, meta.MaxTime, maxSync) } for id := range expBlocks { ok, _ := bkt.Exists(ctx, path.Join(id.String(), block.MetaFilename)) testutil.Assert(t, ok, "block %s was not uploaded", id) } for fn, exp := range expFiles { rc, err := bkt.Get(ctx, fn) testutil.Ok(t, err) act, err := ioutil.ReadAll(rc) testutil.Ok(t, err) testutil.Ok(t, rc.Close()) testutil.Equals(t, string(exp), string(act)) } // Verify the fifth block is still deleted by the end. ok, err := bkt.Exists(ctx, ids[4].String()+"/meta.json") testutil.Ok(t, err) testutil.Assert(t, ok == false, "fifth block was reuploaded") }) }
explode_data.jsonl/15562
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2127 }
[ 2830, 3393, 29624, 712, 1098, 1721, 4713, 16056, 44, 5233, 1095, 2204, 17, 68, 1155, 353, 8840, 836, 8, 341, 7727, 17, 68, 1314, 991, 8539, 35186, 39705, 1155, 11, 2915, 1155, 7497, 836, 33, 11, 281, 353, 68, 17, 68, 1314, 1069, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeleteMulti(t *testing.T) { deletedKeys := func(rs *s3.DeleteObjectsOutput) []string { deleted := make([]string, len(rs.Deleted)) for idx, del := range rs.Deleted { deleted[idx] = *del.Key } sort.Strings(deleted) return deleted } assertDeletedKeys := func(t *testing.T, rs *s3.DeleteObjectsOutput, expected ...string) { t.Helper() found := deletedKeys(rs) if !reflect.DeepEqual(found, expected) { t.Fatal("multi deletion failed", found, "!=", expected) } } t.Run("one-file", func(t *testing.T) { ts := newTestServer(t) defer ts.Close() svc := ts.s3Client() ts.backendPutString(defaultBucket, "foo", nil, "one") ts.backendPutString(defaultBucket, "bar", nil, "two") ts.backendPutString(defaultBucket, "baz", nil, "three") rs, err := svc.DeleteObjects(&s3.DeleteObjectsInput{ Bucket: aws.String(defaultBucket), Delete: &s3.Delete{ Objects: []*s3.ObjectIdentifier{ {Key: aws.String("foo")}, }, }, }) ts.OK(err) assertDeletedKeys(t, rs, "foo") ts.assertLs(defaultBucket, "", nil, []string{"bar", "baz"}) }) t.Run("multiple-files", func(t *testing.T) { ts := newTestServer(t) defer ts.Close() svc := ts.s3Client() ts.backendPutString(defaultBucket, "foo", nil, "one") ts.backendPutString(defaultBucket, "bar", nil, "two") ts.backendPutString(defaultBucket, "baz", nil, "three") rs, err := svc.DeleteObjects(&s3.DeleteObjectsInput{ Bucket: aws.String(defaultBucket), Delete: &s3.Delete{ Objects: []*s3.ObjectIdentifier{ {Key: aws.String("bar")}, {Key: aws.String("foo")}, }, }, }) ts.OK(err) assertDeletedKeys(t, rs, "bar", "foo") ts.assertLs(defaultBucket, "", nil, []string{"baz"}) }) }
explode_data.jsonl/22260
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 756 }
[ 2830, 3393, 6435, 20358, 1155, 353, 8840, 836, 8, 341, 197, 26521, 8850, 1669, 2915, 17027, 353, 82, 18, 18872, 11543, 5097, 8, 3056, 917, 341, 197, 197, 26521, 1669, 1281, 10556, 917, 11, 2422, 17027, 8934, 6873, 1171, 197, 2023, 718...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNodesWherePreemptionMightHelp(t *testing.T) { // Prepare 4 node names. nodeNames := make([]string, 0, 4) for i := 1; i < 5; i++ { nodeNames = append(nodeNames, fmt.Sprintf("machine%d", i)) } tests := []struct { name string nodesStatuses framework.NodeToStatusMap expected map[string]bool // set of expected node names. Value is ignored. }{ { name: "No node should be attempted", nodesStatuses: framework.NodeToStatusMap{ "machine1": framework.NewStatus(framework.UnschedulableAndUnresolvable, nodeaffinity.ErrReason), "machine2": framework.NewStatus(framework.UnschedulableAndUnresolvable, nodename.ErrReason), "machine3": framework.NewStatus(framework.UnschedulableAndUnresolvable, tainttoleration.ErrReasonNotMatch), "machine4": framework.NewStatus(framework.UnschedulableAndUnresolvable, nodelabel.ErrReasonPresenceViolated), }, expected: map[string]bool{}, }, { name: "ErrReasonAffinityNotMatch should be tried as it indicates that the pod is unschedulable due to inter-pod affinity or anti-affinity", nodesStatuses: framework.NodeToStatusMap{ "machine1": framework.NewStatus(framework.Unschedulable, interpodaffinity.ErrReasonAffinityNotMatch), "machine2": framework.NewStatus(framework.UnschedulableAndUnresolvable, nodename.ErrReason), "machine3": framework.NewStatus(framework.UnschedulableAndUnresolvable, nodeunschedulable.ErrReasonUnschedulable), }, expected: map[string]bool{"machine1": true, "machine4": true}, }, { name: "pod with both pod affinity and anti-affinity should be tried", nodesStatuses: framework.NodeToStatusMap{ "machine1": framework.NewStatus(framework.Unschedulable, interpodaffinity.ErrReasonAffinityNotMatch), "machine2": framework.NewStatus(framework.UnschedulableAndUnresolvable, nodename.ErrReason), }, expected: map[string]bool{"machine1": true, "machine3": true, "machine4": true}, }, { name: "ErrReasonAffinityRulesNotMatch should not be tried as it indicates that the pod is unschedulable due to inter-pod affinity, but ErrReasonAffinityNotMatch should be tried as it indicates that the pod is unschedulable due to inter-pod affinity or anti-affinity", nodesStatuses: framework.NodeToStatusMap{ "machine1": framework.NewStatus(framework.UnschedulableAndUnresolvable, interpodaffinity.ErrReasonAffinityRulesNotMatch), "machine2": framework.NewStatus(framework.Unschedulable, interpodaffinity.ErrReasonAffinityNotMatch), }, expected: map[string]bool{"machine2": true, "machine3": true, "machine4": true}, }, { name: "Mix of failed predicates works fine", nodesStatuses: framework.NodeToStatusMap{ "machine1": framework.NewStatus(framework.UnschedulableAndUnresolvable, volumerestrictions.ErrReasonDiskConflict), "machine2": framework.NewStatus(framework.Unschedulable, fmt.Sprintf("Insufficient %v", v1.ResourceMemory)), }, expected: map[string]bool{"machine2": true, "machine3": true, "machine4": true}, }, { name: "Node condition errors should be considered unresolvable", nodesStatuses: framework.NodeToStatusMap{ "machine1": framework.NewStatus(framework.UnschedulableAndUnresolvable, nodeunschedulable.ErrReasonUnknownCondition), }, expected: map[string]bool{"machine2": true, "machine3": true, "machine4": true}, }, { name: "ErrVolume... errors should not be tried as it indicates that the pod is unschedulable due to no matching volumes for pod on node", nodesStatuses: framework.NodeToStatusMap{ "machine1": framework.NewStatus(framework.UnschedulableAndUnresolvable, volumezone.ErrReasonConflict), "machine2": framework.NewStatus(framework.UnschedulableAndUnresolvable, volumebinding.ErrReasonNodeConflict), "machine3": framework.NewStatus(framework.UnschedulableAndUnresolvable, volumebinding.ErrReasonBindConflict), }, expected: map[string]bool{"machine4": true}, }, { name: "ErrTopologySpreadConstraintsNotMatch should be tried as it indicates that the pod is unschedulable due to topology spread constraints", nodesStatuses: framework.NodeToStatusMap{ "machine1": framework.NewStatus(framework.Unschedulable, podtopologyspread.ErrReasonConstraintsNotMatch), "machine2": framework.NewStatus(framework.UnschedulableAndUnresolvable, nodename.ErrReason), "machine3": framework.NewStatus(framework.Unschedulable, podtopologyspread.ErrReasonConstraintsNotMatch), }, expected: map[string]bool{"machine1": true, "machine3": true, "machine4": true}, }, { name: "UnschedulableAndUnresolvable status should be skipped but Unschedulable should be tried", nodesStatuses: framework.NodeToStatusMap{ "machine2": framework.NewStatus(framework.UnschedulableAndUnresolvable, ""), "machine3": framework.NewStatus(framework.Unschedulable, ""), "machine4": framework.NewStatus(framework.UnschedulableAndUnresolvable, ""), }, expected: map[string]bool{"machine1": true, "machine3": true}, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { fitErr := FitError{ FilteredNodesStatuses: test.nodesStatuses, } var nodeInfos []*schedulernodeinfo.NodeInfo for _, n := range makeNodeList(nodeNames) { ni := schedulernodeinfo.NewNodeInfo() ni.SetNode(n) nodeInfos = append(nodeInfos, ni) } nodes := nodesWherePreemptionMightHelp(nodeInfos, &fitErr) if len(test.expected) != len(nodes) { t.Errorf("number of nodes is not the same as expected. exptectd: %d, got: %d. Nodes: %v", len(test.expected), len(nodes), nodes) } for _, node := range nodes { name := node.Node().Name if _, found := test.expected[name]; !found { t.Errorf("node %v is not expected.", name) } } }) } }
explode_data.jsonl/2396
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2069 }
[ 2830, 3393, 12288, 9064, 4703, 33106, 44, 491, 12689, 1155, 353, 8840, 836, 8, 341, 197, 322, 31166, 220, 19, 2436, 5036, 624, 20831, 7980, 1669, 1281, 10556, 917, 11, 220, 15, 11, 220, 19, 340, 2023, 600, 1669, 220, 16, 26, 600, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestTracingIdXML(t *testing.T) { src := "<trackingID><ns10:berth area=\"EH\">0734</ns10:berth><ns10:incorrectTrainID>1J04</ns10:incorrectTrainID><ns10:correctTrainID>1J23</ns10:correctTrainID></trackingID>" var id TrackingID if err := xml.Unmarshal([]byte(src), &id); err != nil { t.Errorf("Failed to unmarshal pport xml: %v", err) } if id.Berth.Area != "EH" { t.Errorf("area incorrect: %s", id.Berth.Area) } if id.Berth.Berth != "0734" { t.Errorf("berth incorrect: %s", id.Berth.Berth) } if id.IncorrectTrainID != "1J04" { t.Errorf("incorrectTrainID incorrect: %s", id.IncorrectTrainID) } if id.CorrectTrainID != "1J23" { t.Errorf("correctTrainID incorrect: %s", id.CorrectTrainID) } }
explode_data.jsonl/39592
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 303 }
[ 2830, 3393, 1282, 4527, 764, 10609, 1155, 353, 8840, 836, 8, 341, 41144, 1669, 4055, 44553, 915, 1784, 4412, 16, 15, 25, 652, 339, 3082, 4070, 45589, 11403, 15, 22, 18, 19, 522, 4412, 16, 15, 25, 652, 339, 1784, 4412, 16, 15, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestEncoderDecoderOperatorScenarios(t *testing.T) { for _, tt := range encoderDecoderOperatorScenarios { testScenario(t, &tt) } documentOperatorScenarios(t, "encode-decode", encoderDecoderOperatorScenarios) }
explode_data.jsonl/60325
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 83 }
[ 2830, 3393, 19921, 20732, 18461, 3326, 60494, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 23668, 20732, 18461, 3326, 60494, 341, 197, 18185, 54031, 1155, 11, 609, 5566, 340, 197, 532, 17470, 18461, 3326, 60494, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestClientAuthScheme(t *testing.T) { ts := createAuthServer(t) defer ts.Close() c := dc() // Ensure default Bearer c.SetTLSClientConfig(&tls.Config{InsecureSkipVerify: true}). SetAuthToken("004DDB79-6801-4587-B976-F093E6AC44FF"). SetHostURL(ts.URL + "/") resp, err := c.R().Get("/profile") assertError(t, err) assertEqual(t, http.StatusOK, resp.StatusCode()) // Ensure setting the scheme works as well c.SetAuthScheme("Bearer") resp2, err2 := c.R().Get("/profile") assertError(t, err2) assertEqual(t, http.StatusOK, resp2.StatusCode()) }
explode_data.jsonl/39334
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 226 }
[ 2830, 3393, 2959, 5087, 28906, 1155, 353, 8840, 836, 8, 341, 57441, 1669, 1855, 5087, 5475, 1155, 340, 16867, 10591, 10421, 2822, 1444, 1669, 19402, 741, 197, 322, 29279, 1638, 425, 20786, 198, 1444, 4202, 45439, 2959, 2648, 2099, 34488, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_getAllDomains(t *testing.T) { options := &config.Options{ Addr: "127.0.0.1:9000", GRPCAddr: "127.0.0.1:9001", Services: "all", AuthenticateURLString: "https://authenticate.example.com", AuthorizeURLString: "https://authorize.example.com:9001", DataBrokerURLString: "https://cache.example.com:9001", Policies: []config.Policy{ {Source: &config.StringURL{URL: mustParseURL(t, "http://a.example.com")}}, {Source: &config.StringURL{URL: mustParseURL(t, "https://b.example.com")}}, {Source: &config.StringURL{URL: mustParseURL(t, "https://c.example.com")}}, }, } t.Run("routable", func(t *testing.T) { t.Run("http", func(t *testing.T) { actual, err := getAllRouteableDomains(options, "127.0.0.1:9000") require.NoError(t, err) expect := []string{ "a.example.com", "a.example.com:80", "authenticate.example.com", "authenticate.example.com:443", "b.example.com", "b.example.com:443", "c.example.com", "c.example.com:443", } assert.Equal(t, expect, actual) }) t.Run("grpc", func(t *testing.T) { actual, err := getAllRouteableDomains(options, "127.0.0.1:9001") require.NoError(t, err) expect := []string{ "authorize.example.com:9001", "cache.example.com:9001", } assert.Equal(t, expect, actual) }) }) t.Run("tls", func(t *testing.T) { t.Run("http", func(t *testing.T) { actual, err := getAllTLSDomains(options, "127.0.0.1:9000") require.NoError(t, err) expect := []string{ "a.example.com", "authenticate.example.com", "b.example.com", "c.example.com", } assert.Equal(t, expect, actual) }) t.Run("grpc", func(t *testing.T) { actual, err := getAllTLSDomains(options, "127.0.0.1:9001") require.NoError(t, err) expect := []string{ "authorize.example.com", "cache.example.com", } assert.Equal(t, expect, actual) }) }) }
explode_data.jsonl/36951
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 930 }
[ 2830, 3393, 3062, 2403, 74713, 1155, 353, 8840, 836, 8, 341, 35500, 1669, 609, 1676, 22179, 515, 197, 197, 13986, 25, 1698, 330, 16, 17, 22, 13, 15, 13, 15, 13, 16, 25, 24, 15, 15, 15, 756, 197, 197, 8626, 4872, 13986, 25, 1060,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPlotterUpdate(t *testing.T) { t.Parallel() g := gomega.NewGomegaWithT(t) // Set the logger to development mode for verbose logs. logf.SetLogger(zap.New(zap.UseDevMode(true))) namespaced := types.NamespacedName{ Name: "read-test", Namespace: "default", } application := &app.FybrikApplication{} g.Expect(readObjectFromFile("../../testdata/unittests/data-usage.yaml", application)).NotTo(gomega.HaveOccurred()) application.Spec.Data[0] = app.DataContext{ DataSetID: "s3/allow-dataset", Requirements: app.DataRequirements{Interface: app.InterfaceDetails{Protocol: app.ArrowFlight, DataFormat: app.Arrow}}, } application.SetGeneration(1) // Objects to track in the fake client. objs := []runtime.Object{ application, } // Register operator types with the runtime scheme. s := utils.NewScheme(g) // Create a fake client to mock API calls. cl := fake.NewFakeClientWithScheme(s, objs...) // Read module readModule := &app.FybrikModule{} g.Expect(readObjectFromFile("../../testdata/unittests/module-read-parquet.yaml", readModule)).NotTo(gomega.HaveOccurred()) readModule.Namespace = utils.GetControllerNamespace() g.Expect(cl.Create(context.Background(), readModule)).NotTo(gomega.HaveOccurred(), "the read module could not be created") // Create a FybrikApplicationReconciler object with the scheme and fake client. r := createTestFybrikApplicationController(cl, s) req := reconcile.Request{ NamespacedName: namespaced, } _, err := r.Reconcile(context.Background(), req) g.Expect(err).To(gomega.BeNil()) err = cl.Get(context.Background(), req.NamespacedName, application) g.Expect(err).To(gomega.BeNil(), "Cannot fetch fybrikapplication") // check plotter creation g.Expect(application.Status.Generated).ToNot(gomega.BeNil()) g.Expect(application.Status.Generated.AppVersion).To(gomega.Equal(application.Generation)) plotterObjectKey := types.NamespacedName{ Namespace: application.Status.Generated.Namespace, Name: application.Status.Generated.Name, } plotter := &app.Plotter{} err = cl.Get(context.Background(), plotterObjectKey, plotter) g.Expect(err).NotTo(gomega.HaveOccurred()) // mark the plotter as in error state errorMsg := "failure to orchestrate modules" plotter.Status.ObservedState.Error = errorMsg g.Expect(cl.Update(context.Background(), plotter)).NotTo(gomega.HaveOccurred()) // the new reconcile should update the application state _, err = r.Reconcile(context.Background(), req) g.Expect(err).To(gomega.BeNil()) err = cl.Get(context.Background(), req.NamespacedName, application) g.Expect(err).To(gomega.BeNil(), "Cannot fetch fybrikapplication") g.Expect(getErrorMessages(application)).To(gomega.ContainSubstring(errorMsg)) // mark the plotter as ready plotter.Status.ObservedState.Error = "" plotter.Status.ObservedState.Ready = true g.Expect(cl.Update(context.Background(), plotter)).NotTo(gomega.HaveOccurred()) // the new reconcile should update the application state _, err = r.Reconcile(context.Background(), req) g.Expect(err).To(gomega.BeNil()) err = cl.Get(context.Background(), req.NamespacedName, application) g.Expect(err).To(gomega.BeNil(), "Cannot fetch fybrikapplication") g.Expect(application.Status.Ready).To(gomega.BeTrue()) }
explode_data.jsonl/45025
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1148 }
[ 2830, 3393, 25605, 465, 4289, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 3174, 1669, 342, 32696, 7121, 38, 32696, 2354, 51, 1155, 340, 197, 322, 2573, 279, 5925, 311, 4401, 3856, 369, 13694, 18422, 624, 6725, 69, 4202, 7395...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLoop1(t *testing.T) { const SCRIPT = ` function A() { var x = 1; for (var i = 0; i < 1; i++) { var x = 2; } return x; } var rv = A(); ` testScript(SCRIPT, intToValue(2), t) }
explode_data.jsonl/75218
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 124 }
[ 2830, 3393, 14620, 16, 1155, 353, 8840, 836, 8, 341, 4777, 53679, 284, 22074, 7527, 362, 368, 341, 7782, 2405, 856, 284, 220, 16, 280, 7782, 2023, 320, 947, 600, 284, 220, 15, 26, 600, 366, 220, 16, 26, 600, 2457, 341, 17362, 2405...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1