text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestOnActivate(t *testing.T) { testAPI := &plugintest.API{} p := NewPlugin( config.BuildConfig{ Manifest: manifest, BuildHash: BuildHash, BuildHashShort: BuildHashShort, BuildDate: BuildDate, }, ) p.API = testAPI testAPI.On("GetServerVersion").Return("5.30.1") testAPI.On("KVGet", "mmi_botid").Return([]byte("the_bot_id"), nil) username := "appsbot" displayName := "Mattermost Apps" description := "Mattermost Apps Registry and API proxy." testAPI.On("PatchBot", "the_bot_id", &model.BotPatch{ Username: &username, DisplayName: &displayName, Description: &description, }).Return(nil, nil) testAPI.On("GetBundlePath").Return("../", nil) testAPI.On("SetProfileImage", "the_bot_id", mock.AnythingOfType("[]uint8")).Return(nil) testAPI.On("LoadPluginConfiguration", mock.AnythingOfType("*config.StoredConfig")).Return(nil) listenAddress := "localhost:8065" siteURL := "http://" + listenAddress testAPI.On("GetConfig").Return(&model.Config{ ServiceSettings: model.ServiceSettings{ SiteURL: &siteURL, ListenAddress: &listenAddress, }, }) testAPI.On("GetLicense").Return(&model.License{ Features: &model.Features{}, SkuShortName: "professional", }) expectLog(testAPI, "LogDebug", 9) expectLog(testAPI, "LogInfo", 5) testAPI.On("RegisterCommand", mock.AnythingOfType("*model.Command")).Return(nil) testAPI.On("PublishWebSocketEvent", "plugin_enabled", map[string]interface{}{"version": manifest.Version}, &model.WebsocketBroadcast{}) err := p.OnActivate() require.NoError(t, err) }
explode_data.jsonl/13315
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 592 }
[ 2830, 3393, 1925, 31242, 1155, 353, 8840, 836, 8, 341, 18185, 7082, 1669, 609, 47474, 396, 477, 24922, 16094, 3223, 1669, 1532, 11546, 1006, 197, 25873, 25212, 2648, 515, 298, 197, 38495, 25, 981, 14455, 345, 298, 197, 11066, 6370, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIntentResolution(t *testing.T) { defer leaktest.AfterTest(t)() testCases := []struct { keys []string ranges [][2]string exp []string }{ // Note that the first key (or, range, if no keys present) determines // the base key of the Txn. In these examples, it's always the first // range, so "a"-"s" is local. Any examples added must stick to that // convention and write the first key into "a"-"s". { keys: []string{"a", "x", "b", "c", "s"}, ranges: [][2]string{{"d", "e"}}, exp: []string{"s", "x"}, }, { keys: []string{"h", "y", "z"}, ranges: [][2]string{{"g", "z"}}, exp: []string{`"s"-"z\x00"`}, }, { keys: []string{"q", "s"}, ranges: [][2]string{{"a", "w"}, {"b", "x"}, {"t", "u"}}, exp: []string{`"s"-"x"`}, }, { keys: []string{"q", "s", "y", "v"}, ranges: [][2]string{{"a", "s"}, {"r", "t"}, {"u", "w"}}, exp: []string{`"s"-"t"`, `"u"-"w"`, "y"}, }, } splitKey := []byte("s") for i, tc := range testCases { // Use deterministic randomness to randomly put the writes in separate // batches or commit them with EndTransaction. rnd, seed := randutil.NewPseudoRand() log.Infof(context.Background(), "%d: using intent test seed %d", i, seed) results := map[string]struct{}{} func() { var storeKnobs storage.StoreTestingKnobs var mu syncutil.Mutex closer := make(chan struct{}, 2) var done bool storeKnobs.EvalKnobs.TestingEvalFilter = func(filterArgs storagebase.FilterArgs) *roachpb.Error { mu.Lock() defer mu.Unlock() header := filterArgs.Req.Header() // Ignore anything outside of the intent key range of "a" - "z" if header.Key.Compare(roachpb.Key("a")) < 0 || header.Key.Compare(roachpb.Key("z")) > 0 { return nil } var entry string switch arg := filterArgs.Req.(type) { case *roachpb.ResolveIntentRequest: if arg.Status == roachpb.COMMITTED { entry = string(header.Key) } case *roachpb.ResolveIntentRangeRequest: if arg.Status == roachpb.COMMITTED { entry = fmt.Sprintf("%s-%s", header.Key, header.EndKey) } } if entry != "" { log.Infof(context.Background(), "got %s", entry) results[entry] = struct{}{} } if len(results) >= len(tc.exp) && !done { done = true close(closer) } return nil } // TODO(benesch): starting a test server for every test case is needlessly // inefficient. s, _, kvDB := serverutils.StartServer(t, base.TestServerArgs{ Knobs: base.TestingKnobs{Store: &storeKnobs}}) defer s.Stopper().Stop(context.TODO()) // Split the Range. This should not have any asynchronous intents. if err := kvDB.AdminSplit(context.TODO(), splitKey, splitKey, true /* manual */); err != nil { t.Fatal(err) } if err := kvDB.Txn(context.TODO(), func(ctx context.Context, txn *client.Txn) error { b := txn.NewBatch() if tc.keys[0] >= string(splitKey) { t.Fatalf("first key %s must be < split key %s", tc.keys[0], splitKey) } for i, key := range tc.keys { // The first write must not go to batch, it anchors the // transaction to the correct range. local := i != 0 && rnd.Intn(2) == 0 log.Infof(context.Background(), "%d: %s: local: %t", i, key, local) if local { b.Put(key, "test") } else if err := txn.Put(ctx, key, "test"); err != nil { return err } } for _, kr := range tc.ranges { local := rnd.Intn(2) == 0 log.Infof(context.Background(), "%d: [%s,%s): local: %t", i, kr[0], kr[1], local) if local { b.DelRange(kr[0], kr[1], false) } else if err := txn.DelRange(ctx, kr[0], kr[1]); err != nil { return err } } return txn.CommitInBatch(ctx, b) }); err != nil { t.Fatalf("%d: %s", i, err) } <-closer // wait for async intents // Use Raft to make it likely that any straddling intent // resolutions have come in. Don't touch existing data; that could // generate unexpected intent resolutions. if _, err := kvDB.Scan(context.TODO(), "z\x00", "z\x00\x00", 0); err != nil { t.Fatal(err) } }() // Verification. Note that this runs after the system has stopped, so that // everything asynchronous has already happened. expResult := tc.exp sort.Strings(expResult) var actResult []string for k := range results { actResult = append(actResult, k) } sort.Strings(actResult) if !reflect.DeepEqual(actResult, expResult) { t.Fatalf("%d: unexpected non-local intents, expected %s: %s", i, expResult, actResult) } } }
explode_data.jsonl/50993
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2023 }
[ 2830, 3393, 11536, 38106, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 2822, 18185, 37302, 1669, 3056, 1235, 341, 197, 80112, 256, 3056, 917, 198, 197, 7000, 5520, 508, 1457, 17, 30953, 198, 197, 48558, 262, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestRewriteMailtoLink(t *testing.T) { description := `<a href="mailto:ryan@qwantz.com?subject=blah%20blah">contact</a>` output := Rewriter("https://www.qwantz.com/", description, ``) expected := `<a href="mailto:ryan@qwantz.com?subject=blah%20blah">contact [blah blah]</a>` if expected != output { t.Errorf(`Not expected output: got "%s" instead of "%s"`, output, expected) } }
explode_data.jsonl/21477
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 149 }
[ 2830, 3393, 58465, 1247, 16702, 983, 3939, 1155, 353, 8840, 836, 8, 341, 42407, 1669, 30586, 64, 1801, 428, 38526, 25, 56758, 31, 80, 52657, 89, 905, 30, 11501, 28, 70614, 4, 17, 15, 70614, 755, 6287, 522, 64, 53722, 21170, 1669, 31...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetImageRepoFromImageTrigger(t *testing.T) { trigger := &model.ImageTrigger{} valuesWithoutPrefix := map[string]interface{}{ "image": map[string]interface{}{"repository": "test-image"}, } valuesWithPrefix := map[string]interface{}{ "image": map[string]interface{}{"prefix": "example.io/", "repository": "test-image"}, } assert.Equal(t, image.DefaultDockerRegistry+"/test-image", GetImageRefFromImageTrigger(trigger, valuesWithoutPrefix).WithoutTag()) assert.Equal(t, "example.io/test-image", GetImageRefFromImageTrigger(trigger, valuesWithPrefix).WithoutTag()) }
explode_data.jsonl/3667
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 194 }
[ 2830, 3393, 1949, 1906, 25243, 3830, 1906, 17939, 1155, 353, 8840, 836, 8, 341, 83228, 1669, 609, 2528, 7528, 17939, 16094, 45939, 26040, 14335, 1669, 2415, 14032, 31344, 67066, 197, 197, 1, 1805, 788, 2415, 14032, 31344, 6257, 4913, 2331...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNoRecordedMatchingMethodNameForAReceiver(t *testing.T) { reporter, ctrl := createFixtures(t) subject := new(Subject) ctrl.RecordCall(subject, "FooMethod", "argument") reporter.assertFatal(func() { ctrl.Call(subject, "NotRecordedMethod", "argument") }, "Unexpected call to", "there are no expected calls of the method \"NotRecordedMethod\" for that receiver") reporter.assertFatal(func() { // The expected call wasn't made. ctrl.Finish() }) }
explode_data.jsonl/17268
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 2753, 6471, 291, 64430, 75648, 2461, 32, 25436, 1155, 353, 8840, 836, 8, 341, 69931, 261, 11, 23743, 1669, 1855, 25958, 18513, 1155, 340, 28624, 583, 1669, 501, 7, 13019, 692, 84381, 49959, 7220, 29128, 11, 330, 40923, 3523,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_validateVolumeCapabilities(t *testing.T) { testsBench := []struct { volCaps []*csi.VolumeCapability err error }{ { volCaps: nil, err: errVolumeCapabilitiesIsNil, }, { volCaps: []*csi.VolumeCapability{ { AccessType: &csi.VolumeCapability_Block{}, }, }, err: errAccessModeNotSupported, }, { volCaps: []*csi.VolumeCapability{ { AccessType: &csi.VolumeCapability_Block{}, AccessMode: &csi.VolumeCapability_AccessMode{ Mode: csi.VolumeCapability_AccessMode_MULTI_NODE_MULTI_WRITER, }, }, }, err: errAccessModeNotSupported, }, { volCaps: []*csi.VolumeCapability{ { AccessType: &csi.VolumeCapability_Block{}, AccessMode: &csi.VolumeCapability_AccessMode{ Mode: csi.VolumeCapability_AccessMode_SINGLE_NODE_WRITER, }, }, }, err: nil, }, { volCaps: []*csi.VolumeCapability{ { AccessType: &csi.VolumeCapability_Mount{}, AccessMode: &csi.VolumeCapability_AccessMode{ Mode: csi.VolumeCapability_AccessMode_SINGLE_NODE_WRITER, }, }, }, err: nil, }, } for _, test := range testsBench { err := validateVolumeCapabilities(test.volCaps) Equals(t, test.err, err) } }
explode_data.jsonl/53193
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 598 }
[ 2830, 3393, 42681, 18902, 55315, 1155, 353, 8840, 836, 8, 341, 78216, 33, 19762, 1669, 3056, 1235, 341, 197, 5195, 337, 60741, 29838, 63229, 79106, 63746, 198, 197, 9859, 257, 1465, 198, 197, 59403, 197, 197, 515, 298, 5195, 337, 60741,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDistinctTypes(t *testing.T) { g := NewGomegaWithT(t) cases := []struct { list FieldList expected TypeSet }{ {FieldList{id}, NewTypeSet(i64)}, {FieldList{id, id, id}, NewTypeSet(i64)}, {FieldList{id, category}, NewTypeSet(i64, cat)}, {FieldList{id, category, name, qual, diff, age, bmi, active, labels, fave, avatar, updated}, NewTypeSet(i64, boo, cat, str, spt, ipt, upt, fpt, bgi, sli, bys, tim)}, } for _, c := range cases { s := c.list.DistinctTypes() g.Expect(NewTypeSet(s...)).To(Equal(c.expected)) } }
explode_data.jsonl/16860
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 281 }
[ 2830, 3393, 72767, 4173, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1532, 38, 32696, 2354, 51, 1155, 692, 1444, 2264, 1669, 3056, 1235, 341, 197, 14440, 257, 8601, 852, 198, 197, 42400, 3990, 1649, 198, 197, 59403, 197, 197, 90, 1877, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNode_LeavingETA(t *testing.T) { n := NewNode(gen.Reference(), insolar.StaticRoleVirtual, nil, "127.0.0.1", "123") assert.Equal(t, insolar.NodeReady, n.GetState()) n.(MutableNode).SetLeavingETA(25) assert.Equal(t, insolar.NodeLeaving, n.GetState()) assert.EqualValues(t, 25, n.LeavingETA()) }
explode_data.jsonl/46251
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 123 }
[ 2830, 3393, 1955, 62, 2304, 2317, 20695, 1155, 353, 8840, 836, 8, 341, 9038, 1669, 1532, 1955, 36884, 58416, 1507, 1640, 7417, 58826, 9030, 33026, 11, 2092, 11, 330, 16, 17, 22, 13, 15, 13, 15, 13, 16, 497, 330, 16, 17, 18, 1138, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNamespaceBootstrapUnfulfilledShards(t *testing.T) { unfulfilledRangeForShards := func(ids ...uint32) result.IndexBootstrapResult { var ( unfulfilledRange = result.NewIndexBootstrapResult() unfulfilledTo = xtime.Now().Truncate(time.Hour) unfulfilledFrom = unfulfilledTo.Add(-time.Hour) ) unfulfilledRange.SetUnfulfilled(result.NewShardTimeRangesFromRange( unfulfilledFrom, unfulfilledTo, ids...)) return unfulfilledRange } shardIDs := []uint32{0, 1} tests := []struct { name string withIndex bool unfulfilledShardIDs []uint32 nsResult bootstrap.NamespaceResult }{ { name: "no index, unfulfilled data", withIndex: false, unfulfilledShardIDs: []uint32{0}, nsResult: bootstrap.NamespaceResult{ DataResult: unfulfilledRangeForShards(0), Shards: shardIDs, }, }, { name: "with index, unfulfilled data", withIndex: true, unfulfilledShardIDs: []uint32{0, 1}, nsResult: bootstrap.NamespaceResult{ DataResult: unfulfilledRangeForShards(0, 1), IndexResult: unfulfilledRangeForShards(), Shards: shardIDs, }, }, { name: "with index, unfulfilled index", withIndex: true, unfulfilledShardIDs: []uint32{1}, nsResult: bootstrap.NamespaceResult{ DataResult: unfulfilledRangeForShards(), IndexResult: unfulfilledRangeForShards(1), Shards: shardIDs, }, }, { name: "with index, unfulfilled data and index", withIndex: true, unfulfilledShardIDs: []uint32{0, 1}, nsResult: bootstrap.NamespaceResult{ DataResult: unfulfilledRangeForShards(0), IndexResult: unfulfilledRangeForShards(1), Shards: shardIDs, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { testNamespaceBootstrapUnfulfilledShards(t, shardIDs, tt.unfulfilledShardIDs, tt.withIndex, tt.nsResult) }) } }
explode_data.jsonl/35353
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 925 }
[ 2830, 3393, 22699, 45511, 1806, 1262, 27511, 2016, 2347, 1155, 353, 8840, 836, 8, 341, 20479, 1262, 27511, 6046, 2461, 2016, 2347, 1669, 2915, 43448, 2503, 2496, 18, 17, 8, 1102, 18338, 45511, 2077, 341, 197, 2405, 2399, 298, 20479, 126...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestProtodesc_GetMethodDescFromProtoSet(t *testing.T) { t.Run("invalid path", func(t *testing.T) { md, err := GetMethodDescFromProtoSet("pkg.Call", "invalid.protoset") assert.Error(t, err) assert.Nil(t, md) }) t.Run("invalid call symbol", func(t *testing.T) { md, err := GetMethodDescFromProtoSet("pkg.Call", "../testdata/bundle.protoset") assert.Error(t, err) assert.Nil(t, md) }) t.Run("invalid package", func(t *testing.T) { md, err := GetMethodDescFromProtoSet("helloworld.pkg.SayHello", "../testdata/bundle.protoset") assert.Error(t, err) assert.Nil(t, md) }) t.Run("invalid method", func(t *testing.T) { md, err := GetMethodDescFromProtoSet("helloworld.Greeter.Foo", "../testdata/bundle.protoset") assert.Error(t, err) assert.Nil(t, md) }) t.Run("valid symbol", func(t *testing.T) { md, err := GetMethodDescFromProtoSet("helloworld.Greeter.SayHello", "../testdata/bundle.protoset") assert.NoError(t, err) assert.NotNil(t, md) }) t.Run("valid symbol proto 2", func(t *testing.T) { md, err := GetMethodDescFromProtoSet("cap.Capper.Cap", "../testdata/bundle.protoset") assert.NoError(t, err) assert.NotNil(t, md) }) t.Run("valid symbol slashes", func(t *testing.T) { md, err := GetMethodDescFromProtoSet("helloworld.Greeter/SayHello", "../testdata/bundle.protoset") assert.NoError(t, err) assert.NotNil(t, md) }) }
explode_data.jsonl/10865
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 585 }
[ 2830, 3393, 12423, 2539, 66, 13614, 3523, 11065, 3830, 31549, 1649, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 11808, 1815, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 84374, 11, 1848, 1669, 2126, 3523, 11065, 3830, 31549, 1649, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseTime(t *testing.T) { // Assert equals for input and output works for a precision upto three decimal places cases := map[string]struct { formatName, input string expectedOutput time.Time }{ "UnixTest1": { formatName: UnixTimeFormatName, input: "946845296.123", expectedOutput: time.Date(2000, time.January, 2, 20, 34, 56, .123e9, time.UTC), }, "UnixTest2": { formatName: UnixTimeFormatName, input: "946845296.12344", expectedOutput: time.Date(2000, time.January, 2, 20, 34, 56, .123e9, time.UTC), }, "UnixTest3": { formatName: UnixTimeFormatName, input: "946845296.1229999", expectedOutput: time.Date(2000, time.January, 2, 20, 34, 56, .123e9, time.UTC), }, "ISO8601Test1": { formatName: ISO8601TimeFormatName, input: "2000-01-02T20:34:56.123Z", expectedOutput: time.Date(2000, time.January, 2, 20, 34, 56, .123e9, time.UTC), }, "ISO8601Test2": { formatName: ISO8601TimeFormatName, input: "2000-01-02T20:34:56.123456789Z", expectedOutput: time.Date(2000, time.January, 2, 20, 34, 56, .123456789e9, time.UTC), }, "RFC822Test1": { formatName: RFC822TimeFormatName, input: "Sun, 2 Jan 2000 20:34:56 GMT", expectedOutput: time.Date(2000, time.January, 2, 20, 34, 56, 0, time.UTC), }, } for name, c := range cases { t.Run(name, func(t *testing.T) { timeVal, err := ParseTime(c.formatName, c.input) if err != nil { t.Errorf("unable to parse time, %v", err) } if timeVal.UTC() != c.expectedOutput { t.Errorf("input: %v \n and output time %v,\n don't match for %s format ", c.input, c.expectedOutput, c.formatName) } }) } }
explode_data.jsonl/57379
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 783 }
[ 2830, 3393, 14463, 1462, 1155, 353, 8840, 836, 8, 1476, 197, 322, 5319, 16819, 369, 1946, 323, 2550, 4278, 369, 264, 16052, 80126, 2326, 12122, 7482, 198, 1444, 2264, 1669, 2415, 14032, 60, 1235, 341, 197, 59416, 675, 11, 1946, 914, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGet(t *testing.T) { // reset registered locales : Locales = map[string]map[string]string{} // registrer a test locale fr_FR := map[string]string{} fr_FR["test_text"] = "lorem ipsum" Locales["fr_FR"] = fr_FR // ____________________________________________________________ res, warn := Get("test_text", "fr_FR") if warn != nil { switch warn.(type) { case *WarningLocaleNotFound: t.Error("Unexpected warning : the locale should exists.") case *WarningTextNotFoundInLocale: t.Error("Unexpected warning : the text should exists in the locale.") default: t.Errorf("Unexpected warning : unknown warning : %+v", warn) } } if res != fr_FR["test_text"] { t.Errorf("The translated text should be the translated text, <%s>, got <%s>.", fr_FR["test_text"], res) } // ____________________________________________________________ expectedText := "this is not registered" res, warn = Get(expectedText, "fr_FR") if warn == nil { t.Error("Getting translation should returns a warning because it doesn't exists in the requested locale.") } if _, goodType := warn.(WarningTextNotFoundInLocale); !goodType { t.Errorf("Wrong warning type : %+v.", reflect.TypeOf(warn)) } if res != expectedText { t.Errorf("The translated text should be the requested text, <%s>, got <%s>.", expectedText, res) } // ____________________________________________________________ res, warn = Get(expectedText, "unknown") if warn == nil { t.Error("Getting translation should returns a warning because the requested locale doesn't exists.") } if _, goodType := warn.(WarningLocaleNotFound); !goodType { t.Errorf("Wrong warning type : %+v.", reflect.TypeOf(warn)) } if res != expectedText { t.Errorf("The translated text should be the requested text, <%s>, got <%s>.", expectedText, res) } }
explode_data.jsonl/13291
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 580 }
[ 2830, 3393, 1949, 1155, 353, 8840, 836, 8, 1476, 197, 322, 7585, 9681, 52297, 6260, 197, 9152, 3831, 284, 2415, 14032, 60, 2186, 14032, 30953, 31483, 197, 322, 22855, 261, 264, 1273, 18565, 198, 97714, 14220, 1669, 2415, 14032, 30953, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestListFollowedStreams(t *testing.T) { setup() defer teardown() mux.HandleFunc("/streams/followed", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") testParams(t, r, params{ "limit": "1", "offset": "1", "hls": "true", }) fmt.Fprint(w, `{ "_total": 1, "streams": [ { "_id": 1, "preview": { "medium": "m", "small": "s", "large": "l", "template": "t" }, "game": "g", "channel": { "mature": null, "background": "b", "updated_at": "2013-02-15T15:22:24Z", "_id": 1, "status": "s", "logo": "l", "teams": [], "url": "u", "display_name": "d", "game": "g", "banner": "b", "name": "n", "delay": 0, "video_banner": null, "_links": { "chat": "c", "subscriptions": "s", "features": "f", "commercial": "c", "stream_key": "s", "editors": "e", "videos": "v", "self": "s", "follows": "f" }, "created_at": "2011-12-23T18:03:44Z" }, "viewers": 1, "created_at": "2014-09-12T02:03:17Z", "_links": { "self": "h" } } ], "_links": { "summary": "h", "followed": "h", "next": "https://api.twitch.tv/kraken/streams?channel=zisss%2Cvoyboy&game=Diablo+III&limit=100&offset=100", "featured": "f", "self": "https://api.twitch.tv/kraken/streams?channel=zisss%2Cvoyboy&game=Diablo+III&limit=100&offset=0" } }`) }) opts := &RequestOptions{HLS: true, ListOptions: ListOptions{Limit: 1, Offset: 1}} got, resp, err := client.Users.ListFollowedStreams(opts) if err != nil { t.Errorf("Users.ListFollowedStreams: returned error: %v", err) } testListResponse(t, resp, intPtr(1), intPtr(100), nil) want := []Stream{ Stream{ ID: intPtr(1), Viewers: intPtr(1), CreatedAt: stringPtr("2014-09-12T02:03:17Z"), Preview: assetPtr(), Channel: channelPtr(), Game: stringPtr("g"), }, } if !reflect.DeepEqual(got, want) { t.Errorf("Users.ListFollowedStreams response did not match:\nwant: %+v\ngot: %+v", want, got) } }
explode_data.jsonl/62971
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 814 }
[ 2830, 3393, 852, 12480, 291, 73576, 1155, 353, 8840, 836, 8, 341, 84571, 741, 16867, 49304, 2822, 2109, 2200, 63623, 4283, 60975, 6663, 1544, 291, 497, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 18185, 3523, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestVote(t *testing.T) { p := poll.New("test", []string{"option"}) if err := p.Vote("option", "voter"); err != nil { t.Fatalf("unexpected error: %v", err.Error()) } if err := p.Vote("option", "voter"); err != poll.ErrorVoterAlreadyVoted { t.Fatalf("expected error: %v", poll.ErrorVoterAlreadyVoted) } }
explode_data.jsonl/54884
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 122 }
[ 2830, 3393, 41412, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 7085, 7121, 445, 1944, 497, 3056, 917, 4913, 2047, 23625, 743, 1848, 1669, 281, 5058, 1272, 445, 2047, 497, 330, 85, 25392, 5038, 1848, 961, 2092, 341, 197, 3244, 30762, 445...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAssignStrict(t *testing.T) { const SCRIPT = ` 'use strict'; try { eval("eval = 42"); } catch(e) { var rv = e instanceof SyntaxError } ` testScript(SCRIPT, valueTrue, t) }
explode_data.jsonl/75279
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 82 }
[ 2830, 3393, 28933, 41857, 1155, 353, 8840, 836, 8, 341, 4777, 53679, 284, 22074, 197, 19176, 7304, 2357, 6799, 341, 197, 93413, 445, 14170, 284, 220, 19, 17, 797, 197, 92, 2287, 2026, 8, 341, 197, 2405, 17570, 284, 384, 8083, 32117, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestFloatConvert(t *testing.T) { tests := []struct { name string metrics []telegraf.Metric expected bool }{ { name: "int64 field", metrics: []telegraf.Metric{ testutil.MustMetric( "cpu", map[string]string{}, map[string]interface{}{ "time_idle": int64(42.0), }, time.Now()), }, expected: true, }, { name: "uint64 field", metrics: []telegraf.Metric{ testutil.MustMetric( "cpu", map[string]string{}, map[string]interface{}{ "time_idle": uint64(42.0), }, time.Now()), }, expected: true, }, { name: "float64 field", metrics: []telegraf.Metric{ testutil.MustMetric( "cpu", map[string]string{}, map[string]interface{}{ "time_idle": float64(42.0), }, time.Now()), }, expected: true, }, { name: "bool field true", metrics: []telegraf.Metric{ testutil.MustMetric( "cpu", map[string]string{}, map[string]interface{}{ "time_idle": true, }, time.Now()), }, expected: true, }, { name: "bool field false", metrics: []telegraf.Metric{ testutil.MustMetric( "cpu", map[string]string{}, map[string]interface{}{ "time_idle": false, }, time.Now()), }, expected: false, }, { name: "string field", metrics: []telegraf.Metric{ testutil.MustMetric( "cpu", map[string]string{}, map[string]interface{}{ "time_idle": "42.0", }, time.Now()), }, expected: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { compares := &health.Compares{ Field: "time_idle", GT: addr(0.0), } actual := compares.Check(tt.metrics) require.Equal(t, tt.expected, actual) }) } }
explode_data.jsonl/65115
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1047 }
[ 2830, 3393, 5442, 12012, 1155, 353, 8840, 836, 8, 972, 78216, 1669, 3056, 1235, 972, 197, 11609, 257, 914, 319, 197, 2109, 13468, 220, 3056, 15410, 76039, 1321, 16340, 319, 197, 42400, 1807, 319, 197, 92, 1666, 197, 197, 1666, 298, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestResourceTokens_TokenExpiry(t *testing.T) { s, _, _, _, broker, err := setupHydraTest(true) if err != nil { t.Fatalf("setupHydraTest() failed: %v", err) } resp := sendResourceTokens(t, s, broker, "invalid", true) if resp.StatusCode != http.StatusUnauthorized { t.Errorf("status = %d, wants %d", resp.StatusCode, http.StatusUnauthorized) } }
explode_data.jsonl/18508
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 139 }
[ 2830, 3393, 4783, 29300, 1139, 1679, 840, 48209, 1155, 353, 8840, 836, 8, 341, 1903, 11, 8358, 8358, 8358, 22316, 11, 1848, 1669, 6505, 30816, 22248, 2271, 3715, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 15188, 30816, 22248...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetAddrTxs(t *testing.T) { q, mem := initEnv(0) defer q.Close() defer mem.Close() // add tx _, err := add4TxHash(mem.client) if err != nil { t.Error("add tx error", err.Error()) return } ad := address.PubKeyToAddress(privKey.PubKey().Bytes()).String() addrs := []string{ad} msg := mem.client.NewMessage("mempool", types.EventGetAddrTxs, &types.ReqAddrs{Addrs: addrs}) mem.client.Send(msg, true) data, err := mem.client.Wait(msg) if err != nil { t.Error(err) return } txsFact := data.GetData().(*types.TransactionDetails).Txs txsExpect := mem.GetAccTxs(&types.ReqAddrs{Addrs: addrs}).Txs if len(txsExpect) != len(txsFact) { t.Error("TestGetAddrTxs failed", "length not match") } same := 0 for _, i := range txsExpect { for _, j := range txsFact { if j.Tx == i.Tx { same++ break } } } if same != len(txsExpect) { t.Error("TestGetAddrTxs failed", same) } }
explode_data.jsonl/16835
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 404 }
[ 2830, 3393, 1949, 13986, 51, 18561, 1155, 353, 8840, 836, 8, 341, 18534, 11, 1833, 1669, 2930, 14359, 7, 15, 340, 16867, 2804, 10421, 741, 16867, 1833, 10421, 2822, 197, 322, 912, 9854, 198, 197, 6878, 1848, 1669, 912, 19, 31584, 6370...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestRefreshAliases(t *testing.T) { for _, tt := range []struct { name string configureRequest *plugin.ConfigureRequest err string fakeEntries []fakeKeyEntry expectedEntries []fakeKeyEntry updateAliasErr string }{ { name: "refresh aliases error", configureRequest: configureRequestWithDefaults(t), err: "update failure", updateAliasErr: "update failure", fakeEntries: []fakeKeyEntry{ { AliasName: aws.String("alias/SPIRE_SERVER/test_example_org/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/id_01"), KeyID: aws.String("key_id_01"), KeySpec: types.CustomerMasterKeySpecRsa4096, Enabled: true, PublicKey: []byte("foo"), CreationDate: &unixEpoch, AliasLastUpdatedDate: &unixEpoch, }, }, }, { name: "refresh aliases succeeds", configureRequest: configureRequestWithDefaults(t), fakeEntries: []fakeKeyEntry{ { AliasName: aws.String("alias/SPIRE_SERVER/test_example_org/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/id_01"), KeyID: aws.String("key_id_01"), KeySpec: types.CustomerMasterKeySpecRsa4096, Enabled: true, PublicKey: []byte("foo"), CreationDate: &unixEpoch, AliasLastUpdatedDate: &unixEpoch, }, { AliasName: aws.String("alias/SPIRE_SERVER/test_example_org/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/id_02"), KeyID: aws.String("key_id_02"), KeySpec: types.CustomerMasterKeySpecRsa2048, Enabled: true, PublicKey: []byte("foo"), CreationDate: &unixEpoch, AliasLastUpdatedDate: &unixEpoch, }, { AliasName: aws.String("alias/SPIRE_SERVER/test_example_org/another_server_id/id_03"), KeyID: aws.String("key_id_03"), KeySpec: types.CustomerMasterKeySpecEccNistP384, Enabled: true, PublicKey: []byte("foo"), CreationDate: &unixEpoch, AliasLastUpdatedDate: &unixEpoch, }, { AliasName: aws.String("alias/SPIRE_SERVER/another_td/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/id_04"), KeyID: aws.String("key_id_04"), KeySpec: types.CustomerMasterKeySpecRsa4096, Enabled: true, PublicKey: []byte("foo"), CreationDate: &unixEpoch, AliasLastUpdatedDate: &unixEpoch, }, { AliasName: aws.String("alias/SPIRE_SERVER/another_td/another_server_id/id_05"), KeyID: aws.String("key_id_05"), KeySpec: types.CustomerMasterKeySpecEccNistP384, Enabled: true, PublicKey: []byte("foo"), CreationDate: &unixEpoch, AliasLastUpdatedDate: &unixEpoch, }, { AliasName: aws.String("alias/SPIRE_SERVER/unrelated"), KeyID: aws.String("key_id_06"), KeySpec: types.CustomerMasterKeySpecEccNistP384, Enabled: true, PublicKey: []byte("foo"), CreationDate: &unixEpoch, AliasLastUpdatedDate: &unixEpoch, }, { AliasName: aws.String("alias/SPIRE_SERVER/unrelated/unrelated/id_07"), KeyID: aws.String("key_id_07"), KeySpec: types.CustomerMasterKeySpecEccNistP384, Enabled: true, PublicKey: []byte("foo"), CreationDate: &unixEpoch, AliasLastUpdatedDate: &unixEpoch, }, { AliasName: nil, KeyID: aws.String("key_id_08"), KeySpec: types.CustomerMasterKeySpecRsa4096, Enabled: true, PublicKey: []byte("foo"), CreationDate: &unixEpoch, AliasLastUpdatedDate: &unixEpoch, }, }, expectedEntries: []fakeKeyEntry{ { AliasName: aws.String("alias/SPIRE_SERVER/test_example_org/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/id_01"), KeyID: aws.String("key_id_01"), AliasLastUpdatedDate: &refreshedDate, }, { AliasName: aws.String("alias/SPIRE_SERVER/test_example_org/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/id_02"), KeyID: aws.String("key_id_02"), AliasLastUpdatedDate: &refreshedDate, }, { AliasName: aws.String("alias/SPIRE_SERVER/test_example_org/another_server_id/id_03"), KeyID: aws.String("key_id_03"), AliasLastUpdatedDate: &unixEpoch, }, { AliasName: aws.String("alias/SPIRE_SERVER/another_td/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/id_04"), KeyID: aws.String("key_id_04"), AliasLastUpdatedDate: &unixEpoch, }, { AliasName: aws.String("alias/SPIRE_SERVER/another_td/another_server_id/id_05"), KeyID: aws.String("key_id_05"), AliasLastUpdatedDate: &unixEpoch, }, { AliasName: aws.String("alias/SPIRE_SERVER/unrelated"), KeyID: aws.String("key_id_06"), AliasLastUpdatedDate: &unixEpoch, }, { AliasName: aws.String("alias/SPIRE_SERVER/unrelated/unrelated/id_07"), KeyID: aws.String("key_id_07"), AliasLastUpdatedDate: &unixEpoch, }, { AliasName: nil, KeyID: aws.String("key_id_08"), AliasLastUpdatedDate: &unixEpoch, }, }, }, } { tt := tt t.Run(tt.name, func(t *testing.T) { // setup ts := setupTest(t) ts.fakeClient.setEntries(tt.fakeEntries) ts.fakeClient.setUpdateAliasErr(tt.updateAliasErr) refreshAliasesSignal := make(chan error) ts.plugin.hooks.refreshAliasesSignal = refreshAliasesSignal // exercise _, err := ts.plugin.Configure(ctx, tt.configureRequest) require.NoError(t, err) // wait for refresh alias task to be initialized _ = waitForSignal(t, refreshAliasesSignal) // move the clock forward so the task is run ts.clockHook.Add(6 * time.Hour) // wait for refresh aliases to be run err = waitForSignal(t, refreshAliasesSignal) // assert if tt.updateAliasErr != "" { require.NotNil(t, err) require.Equal(t, tt.err, err.Error()) return } require.NoError(t, err) storedAliases := ts.fakeClient.store.aliases require.Len(t, storedAliases, 7) storedKeys := ts.fakeClient.store.keyEntries require.Len(t, storedKeys, len(tt.expectedEntries)) for _, expected := range tt.expectedEntries { if expected.AliasName == nil { continue } // check aliases alias, ok := storedAliases[*expected.AliasName] require.True(t, ok, "Expected alias was not present on end result: %q", *expected.AliasName) require.EqualValues(t, expected.AliasLastUpdatedDate.String(), alias.KeyEntry.AliasLastUpdatedDate.String(), *expected.AliasName) // check keys key, ok := storedKeys[*expected.KeyID] require.True(t, ok, "Expected alias was not present on end result: %q", *expected.KeyID) require.EqualValues(t, expected.AliasLastUpdatedDate.String(), key.AliasLastUpdatedDate.String(), *expected.KeyID) } }) } }
explode_data.jsonl/69040
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 3817 }
[ 2830, 3393, 14567, 95209, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 3056, 1235, 341, 197, 11609, 1797, 914, 198, 197, 197, 21002, 1900, 353, 9138, 78281, 1900, 198, 197, 9859, 1060, 914, 198, 197, 1166, 726, 24533, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestFIPS(t *testing.T) { tests := []struct { inConfigString string inFIPSMode bool outError bool }{ { inConfigString: configWithoutFIPSKex, inFIPSMode: true, outError: true, }, { inConfigString: configWithoutFIPSKex, inFIPSMode: false, outError: false, }, { inConfigString: configWithFIPSKex, inFIPSMode: true, outError: false, }, { inConfigString: configWithFIPSKex, inFIPSMode: false, outError: false, }, } for i, tt := range tests { comment := fmt.Sprintf("Test %v", i) clf := CommandLineFlags{ ConfigString: base64.StdEncoding.EncodeToString([]byte(tt.inConfigString)), FIPS: tt.inFIPSMode, } cfg := service.MakeDefaultConfig() service.ApplyDefaults(cfg) service.ApplyFIPSDefaults(cfg) err := Configure(&clf, cfg) if tt.outError { require.Error(t, err, comment) } else { require.NoError(t, err, comment) } } }
explode_data.jsonl/47173
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 464 }
[ 2830, 3393, 37, 25944, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 17430, 2648, 703, 914, 198, 197, 17430, 37, 25944, 3636, 257, 1807, 198, 197, 13967, 1454, 981, 1807, 198, 197, 59403, 197, 197, 515, 298, 17430, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_CrawlerBadHead(t *testing.T) { t.Parallel() ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) { w.WriteHeader(http.StatusNotFound) })) defer ts.Close() c := New("", 1, 1, time.Millisecond*50, false, false, RobotsIgnore) if err := c.Run(ts.URL, nil); err != nil { t.Error("run - error") } }
explode_data.jsonl/2967
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 142 }
[ 2830, 3393, 920, 33369, 17082, 12346, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 57441, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 3622, 1758, 37508, 11, 716, 353, 1254, 9659, 8, 341, 197, 6692, 69794, 19886, 10538, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAccAlksIamTrustRole_Basic(t *testing.T) { var resp alks.IamRoleResponse resource.Test(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, Providers: testAccProviders, CheckDestroy: testAccCheckAlksIamRoleDestroy(&resp), Steps: []resource.TestStep{ resource.TestStep{ Config: testAccCheckAlksIamTrustRoleConfigBasic, Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr( "alks_iamtrustrole.bar", "name", "bar"), resource.TestCheckResourceAttr( "alks_iamtrustrole.bar", "type", "Inner Account"), ), }, }, }) }
explode_data.jsonl/32399
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 246 }
[ 2830, 3393, 14603, 2101, 2787, 40, 309, 45548, 9030, 1668, 5971, 1155, 353, 8840, 836, 8, 341, 2405, 9039, 452, 2787, 2447, 309, 9030, 2582, 271, 50346, 8787, 1155, 11, 5101, 31363, 515, 197, 197, 4703, 3973, 25, 257, 2915, 368, 314, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetVolumeCapabilities(t *testing.T) { blockVolumeMode := v1.PersistentVolumeMode(v1.PersistentVolumeBlock) filesystemVolumeMode := v1.PersistentVolumeMode(v1.PersistentVolumeFilesystem) tests := []struct { name string volumeMode *v1.PersistentVolumeMode fsType string modes []v1.PersistentVolumeAccessMode mountOptions []string supportsSingleNodeMultiWriter bool expectedCapability *csi.VolumeCapability expectError bool }{ { name: "RWX", volumeMode: &filesystemVolumeMode, modes: []v1.PersistentVolumeAccessMode{v1.ReadWriteMany}, expectedCapability: createMountCapability(defaultFSType, csi.VolumeCapability_AccessMode_MULTI_NODE_MULTI_WRITER, nil), expectError: false, }, { name: "Block RWX", volumeMode: &blockVolumeMode, modes: []v1.PersistentVolumeAccessMode{v1.ReadWriteMany}, expectedCapability: createBlockCapability(csi.VolumeCapability_AccessMode_MULTI_NODE_MULTI_WRITER), expectError: false, }, { name: "RWX + specified fsType", fsType: "ext3", modes: []v1.PersistentVolumeAccessMode{v1.ReadWriteMany}, expectedCapability: createMountCapability("ext3", csi.VolumeCapability_AccessMode_MULTI_NODE_MULTI_WRITER, nil), expectError: false, }, { name: "RWO", modes: []v1.PersistentVolumeAccessMode{v1.ReadWriteOnce}, expectedCapability: createMountCapability(defaultFSType, csi.VolumeCapability_AccessMode_SINGLE_NODE_WRITER, nil), expectError: false, }, { name: "Block RWO", volumeMode: &blockVolumeMode, modes: []v1.PersistentVolumeAccessMode{v1.ReadWriteOnce}, expectedCapability: createBlockCapability(csi.VolumeCapability_AccessMode_SINGLE_NODE_WRITER), expectError: false, }, { name: "ROX", modes: []v1.PersistentVolumeAccessMode{v1.ReadOnlyMany}, expectedCapability: createMountCapability(defaultFSType, csi.VolumeCapability_AccessMode_MULTI_NODE_READER_ONLY, nil), expectError: false, }, { name: "RWX + anything", modes: []v1.PersistentVolumeAccessMode{v1.ReadWriteMany, v1.ReadOnlyMany, v1.ReadWriteOnce}, expectedCapability: createMountCapability(defaultFSType, csi.VolumeCapability_AccessMode_MULTI_NODE_MULTI_WRITER, nil), expectError: false, }, { name: "mount options", modes: []v1.PersistentVolumeAccessMode{v1.ReadWriteMany}, expectedCapability: createMountCapability(defaultFSType, csi.VolumeCapability_AccessMode_MULTI_NODE_MULTI_WRITER, []string{"first", "second"}), mountOptions: []string{"first", "second"}, expectError: false, }, { name: "ROX+RWO", modes: []v1.PersistentVolumeAccessMode{v1.ReadWriteOnce, v1.ReadOnlyMany}, expectedCapability: nil, expectError: true, // not possible in CSI }, { name: "nothing", modes: []v1.PersistentVolumeAccessMode{}, expectedCapability: nil, expectError: true, }, { name: "RWX with SINGLE_NODE_MULTI_WRITER capable driver", volumeMode: &filesystemVolumeMode, modes: []v1.PersistentVolumeAccessMode{v1.ReadWriteMany}, supportsSingleNodeMultiWriter: true, expectedCapability: createMountCapability(defaultFSType, csi.VolumeCapability_AccessMode_MULTI_NODE_MULTI_WRITER, nil), expectError: false, }, { name: "ROX + RWO with SINGLE_NODE_MULTI_WRITER capable driver", volumeMode: &filesystemVolumeMode, modes: []v1.PersistentVolumeAccessMode{v1.ReadOnlyMany, v1.ReadWriteOnce}, supportsSingleNodeMultiWriter: true, expectedCapability: nil, expectError: true, }, { name: "ROX + RWOP with SINGLE_NODE_MULTI_WRITER capable driver", volumeMode: &filesystemVolumeMode, modes: []v1.PersistentVolumeAccessMode{v1.ReadOnlyMany, v1.ReadWriteOncePod}, supportsSingleNodeMultiWriter: true, expectedCapability: nil, expectError: true, }, { name: "RWO + RWOP with SINGLE_NODE_MULTI_WRITER capable driver", volumeMode: &filesystemVolumeMode, modes: []v1.PersistentVolumeAccessMode{v1.ReadWriteOnce, v1.ReadWriteOncePod}, supportsSingleNodeMultiWriter: true, expectedCapability: nil, expectError: true, }, { name: "ROX with SINGLE_NODE_MULTI_WRITER capable driver", volumeMode: &filesystemVolumeMode, modes: []v1.PersistentVolumeAccessMode{v1.ReadOnlyMany}, supportsSingleNodeMultiWriter: true, expectedCapability: createMountCapability(defaultFSType, csi.VolumeCapability_AccessMode_MULTI_NODE_READER_ONLY, nil), expectError: false, }, { name: "RWO with SINGLE_NODE_MULTI_WRITER capable driver", volumeMode: &filesystemVolumeMode, modes: []v1.PersistentVolumeAccessMode{v1.ReadWriteOnce}, supportsSingleNodeMultiWriter: true, expectedCapability: createMountCapability(defaultFSType, csi.VolumeCapability_AccessMode_SINGLE_NODE_MULTI_WRITER, nil), expectError: false, }, { name: "RWOP with SINGLE_NODE_MULTI_WRITER capable driver", volumeMode: &filesystemVolumeMode, modes: []v1.PersistentVolumeAccessMode{v1.ReadWriteOncePod}, supportsSingleNodeMultiWriter: true, expectedCapability: createMountCapability(defaultFSType, csi.VolumeCapability_AccessMode_SINGLE_NODE_SINGLE_WRITER, nil), expectError: false, }, { name: "nothing with SINGLE_NODE_MULTI_WRITER capable driver", modes: []v1.PersistentVolumeAccessMode{}, supportsSingleNodeMultiWriter: true, expectedCapability: nil, expectError: true, }, } for _, test := range tests { pv := &v1.PersistentVolume{ Spec: v1.PersistentVolumeSpec{ VolumeMode: test.volumeMode, AccessModes: test.modes, MountOptions: test.mountOptions, PersistentVolumeSource: v1.PersistentVolumeSource{ CSI: &v1.CSIPersistentVolumeSource{ FSType: test.fsType, }, }, }, } cap, err := GetVolumeCapabilities(&pv.Spec, test.supportsSingleNodeMultiWriter) if err == nil && test.expectError { t.Errorf("test %s: expected error, got none", test.name) } if err != nil && !test.expectError { t.Errorf("test %s: got error: %s", test.name, err) } if !test.expectError && !reflect.DeepEqual(cap, test.expectedCapability) { t.Errorf("test %s: unexpected VolumeCapability: %+v", test.name, cap) } } }
explode_data.jsonl/42154
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 3721 }
[ 2830, 3393, 1949, 18902, 55315, 1155, 353, 8840, 836, 8, 341, 47996, 18902, 3636, 1669, 348, 16, 61655, 18902, 3636, 3747, 16, 61655, 18902, 4713, 340, 74075, 612, 18902, 3636, 1669, 348, 16, 61655, 18902, 3636, 3747, 16, 61655, 18902, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestRleplus(t *testing.T) { t.Run("Encode", func(t *testing.T) { // Encode an intset ints := []uint64{ // run of 1 0, // gap of 1 // run of 1 2, // gap of 1 // run of 3 4, 5, 6, // gap of 4 // run of 17 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, } expectedBits := []byte{ 0, 0, // version 1, // first bit 1, // run of 1 1, // gap of 1 1, // run of 1 1, // gap of 1 0, 1, 1, 1, 0, 0, // run of 3 0, 1, 0, 0, 1, 0, // gap of 4 // run of 17 < 0 0 (varint) > 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, } v := bitvector.BitVector{} for _, bit := range expectedBits { v.Push(bit) } actualBytes, _, err := rleplus.Encode(ints) assert.NilError(t, err) assert.Equal(t, len(v.Buf), len(actualBytes)) for idx, expected := range v.Buf { assert.Equal( t, fmt.Sprintf("%08b", expected), fmt.Sprintf("%08b", actualBytes[idx]), ) } }) t.Run("Encode allows all runs sizes possible uint64", func(t *testing.T) { // create a run of math.MaxUint64 ints := []uint64{math.MaxUint64} _, _, err := rleplus.Encode(ints) assert.NilError(t, err) }) t.Run("Decode", func(t *testing.T) { testCases := [][]uint64{ {}, {1}, {0}, {0, 1, 2, 3}, { // run of 1 0, // gap of 1 // run of 1 2, // gap of 1 // run of 3 4, 5, 6, // gap of 4 // run of 17 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, }, } for _, tc := range testCases { encoded, _, err := rleplus.Encode(tc) assert.NilError(t, err) result, err := rleplus.Decode(encoded) assert.NilError(t, err) sort.Slice(tc, func(i, j int) bool { return tc[i] < tc[j] }) sort.Slice(result, func(i, j int) bool { return result[i] < result[j] }) assert.Equal(t, len(tc), len(result)) for idx, expected := range tc { assert.Equal(t, expected, result[idx]) } } }) t.Run("Decode version check", func(t *testing.T) { _, err := rleplus.Decode([]byte{0xff}) assert.Error(t, err, "invalid RLE+ version") }) t.Run("Decode returns an error with a bad encoding", func(t *testing.T) { // create an encoding with a buffer with a run which is too long _, err := rleplus.Decode([]byte{0xe0, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff}) assert.Error(t, err, "invalid encoding for RLE+ version 0") }) t.Run("outputs same as reference implementation", func(t *testing.T) { // Encoding bitvec![LittleEndian; 1, 0, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1] // in the Rust reference implementation gives an encoding of [223, 145, 136, 0] (without version field) // The bit vector is equivalent to the integer set { 0, 2, 4, 5, 6, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27 } // This is the above reference output with a version header "00" manually added referenceEncoding := []byte{124, 71, 34, 2} expectedNumbers := []uint64{0, 2, 4, 5, 6, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27} encoded, _, err := rleplus.Encode(expectedNumbers) assert.NilError(t, err) // Our encoded bytes are the same as the ref bytes assert.Equal(t, len(referenceEncoding), len(encoded)) for idx, expected := range referenceEncoding { assert.Equal(t, expected, encoded[idx]) } decoded, err := rleplus.Decode(referenceEncoding) assert.NilError(t, err) // Our decoded integers are the same as expected sort.Slice(decoded, func(i, j int) bool { return decoded[i] < decoded[j] }) assert.Equal(t, len(expectedNumbers), len(decoded)) for idx, expected := range expectedNumbers { assert.Equal(t, expected, decoded[idx]) } }) t.Run("RunLengths", func(t *testing.T) { testCases := []struct { ints []uint64 first byte runs []uint64 }{ // empty {}, // leading with ones {[]uint64{0}, 1, []uint64{1}}, {[]uint64{0, 1}, 1, []uint64{2}}, {[]uint64{0, 0xffffffff, 0xffffffff + 1}, 1, []uint64{1, 0xffffffff - 1, 2}}, // leading with zeroes {[]uint64{1}, 0, []uint64{1, 1}}, {[]uint64{2}, 0, []uint64{2, 1}}, {[]uint64{10, 11, 13, 20}, 0, []uint64{10, 2, 1, 1, 6, 1}}, {[]uint64{10, 11, 11, 13, 20, 10, 11, 13, 20}, 0, []uint64{10, 2, 1, 1, 6, 1}}, } for _, testCase := range testCases { first, runs := rleplus.RunLengths(testCase.ints) assert.Equal(t, testCase.first, first) assert.Equal(t, len(testCase.runs), len(runs)) for idx, runLength := range testCase.runs { assert.Equal(t, runLength, runs[idx]) } } }) }
explode_data.jsonl/9025
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2191 }
[ 2830, 3393, 49, 273, 7138, 1155, 353, 8840, 836, 8, 1476, 3244, 16708, 445, 32535, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 197, 322, 56562, 458, 526, 746, 198, 197, 2084, 82, 1669, 3056, 2496, 21, 19, 515, 298, 197, 322, 159...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRemoveTable(t *testing.T) { t.Parallel() ctx := cdcContext.NewBackendContext4Test(false) communicator := NewMockScheduleDispatcherCommunicator() dispatcher := NewBaseScheduleDispatcher("cf-1", communicator, 1000) dispatcher.captureStatus = map[model.CaptureID]*captureStatus{ "capture-1": { SyncStatus: captureSyncFinished, CheckpointTs: 1500, ResolvedTs: 1500, }, "capture-2": { SyncStatus: captureSyncFinished, CheckpointTs: 1500, ResolvedTs: 1500, }, } dispatcher.tables.AddTableRecord(&util.TableRecord{ TableID: 1, CaptureID: "capture-1", Status: util.RunningTable, }) dispatcher.tables.AddTableRecord(&util.TableRecord{ TableID: 2, CaptureID: "capture-2", Status: util.RunningTable, }) dispatcher.tables.AddTableRecord(&util.TableRecord{ TableID: 3, CaptureID: "capture-1", Status: util.RunningTable, }) checkpointTs, resolvedTs, err := dispatcher.Tick(ctx, 1500, []model.TableID{1, 2, 3}, defaultMockCaptureInfos) require.NoError(t, err) require.Equal(t, model.Ts(1500), checkpointTs) require.Equal(t, model.Ts(1500), resolvedTs) // Inject a dispatch table failure communicator.On("DispatchTable", mock.Anything, "cf-1", model.TableID(3), "capture-1", true). Return(false, nil) checkpointTs, resolvedTs, err = dispatcher.Tick(ctx, 1500, []model.TableID{1, 2}, defaultMockCaptureInfos) require.NoError(t, err) require.Equal(t, CheckpointCannotProceed, checkpointTs) require.Equal(t, CheckpointCannotProceed, resolvedTs) communicator.AssertExpectations(t) communicator.Reset() communicator.On("DispatchTable", mock.Anything, "cf-1", model.TableID(3), "capture-1", true). Return(true, nil) checkpointTs, resolvedTs, err = dispatcher.Tick(ctx, 1500, []model.TableID{1, 2}, defaultMockCaptureInfos) require.NoError(t, err) require.Equal(t, CheckpointCannotProceed, checkpointTs) require.Equal(t, CheckpointCannotProceed, resolvedTs) communicator.AssertExpectations(t) dispatcher.OnAgentFinishedTableOperation("capture-1", 3) communicator.Reset() checkpointTs, resolvedTs, err = dispatcher.Tick(ctx, 1500, []model.TableID{1, 2}, defaultMockCaptureInfos) require.NoError(t, err) require.Equal(t, model.Ts(1500), checkpointTs) require.Equal(t, model.Ts(1500), resolvedTs) }
explode_data.jsonl/28502
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 869 }
[ 2830, 3393, 13021, 2556, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 20985, 1669, 272, 7628, 1972, 7121, 29699, 1972, 19, 2271, 3576, 340, 197, 25579, 850, 1669, 1532, 11571, 32210, 21839, 80923, 850, 741, 60072, 261, 1669, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLoginBadCredentials(t *testing.T) { EnsureCleanState(t) closer, sessionClient := ArgoCDClientset.NewSessionClientOrDie() defer io.Close(closer) requests := []session.SessionCreateRequest{{ Username: "user-does-not-exist", Password: "some-password", }, { Username: "admin", Password: "bad-password", }} for _, r := range requests { _, err := sessionClient.Create(context.Background(), &r) if !assert.Error(t, err) { return } errStatus, ok := status.FromError(err) if !assert.True(t, ok) { return } assert.Equal(t, codes.Unauthenticated, errStatus.Code()) assert.Equal(t, "Invalid username or password", errStatus.Message()) } }
explode_data.jsonl/20989
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 250 }
[ 2830, 3393, 6231, 17082, 27025, 1155, 353, 8840, 836, 8, 341, 197, 64439, 27529, 1397, 1155, 692, 1444, 69215, 11, 3797, 2959, 1669, 1644, 3346, 6484, 2959, 746, 7121, 5283, 2959, 2195, 18175, 741, 16867, 6399, 10421, 1337, 69215, 692, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestModuleSearchDuplicateGlobs(t *testing.T) { moduleSystem := NewModuleSystem( map[string][]string{"client": {"clients/*", "clients/*"}}, map[string][]string{}, false, ) var err error err = moduleSystem.RegisterClass(ModuleClass{ Name: "client", NamePlural: "clients", ClassType: MultiModule, }) assert.NoError(t, err) err = moduleSystem.RegisterClassType( "client", "http", &TestHTTPClientGenerator{}, ) assert.NoError(t, err) currentDir := getTestDirName() testServiceDir := path.Join(currentDir, "test-service") instances, err := moduleSystem.GenerateBuild( "github.com/uber/zanzibar/codegen/test-service", testServiceDir, path.Join(testServiceDir, "build"), false, ) assert.NoError(t, err) var instance *ModuleInstance for _, v := range instances["client"] { if v.InstanceName == "example" { instance = v } } assert.Equal(t, []string{"client"}, instance.DependencyOrder) }
explode_data.jsonl/34599
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 357 }
[ 2830, 3393, 3332, 5890, 53979, 38, 68164, 1155, 353, 8840, 836, 8, 341, 54020, 2320, 1669, 1532, 3332, 2320, 1006, 197, 19567, 14032, 45725, 917, 4913, 2972, 788, 5212, 31869, 1057, 497, 330, 31869, 1057, 48085, 197, 19567, 14032, 45725, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestEntry_CursorColumn_Jump(t *testing.T) { entry := widget.NewMultiLineEntry() entry.SetText("a\nbc") // go to end of text right := &fyne.KeyEvent{Name: fyne.KeyRight} entry.TypedKey(right) entry.TypedKey(right) entry.TypedKey(right) entry.TypedKey(right) assert.Equal(t, 1, entry.CursorRow) assert.Equal(t, 2, entry.CursorColumn) // go up, to a shorter line up := &fyne.KeyEvent{Name: fyne.KeyUp} entry.TypedKey(up) assert.Equal(t, 0, entry.CursorRow) assert.Equal(t, 1, entry.CursorColumn) }
explode_data.jsonl/12315
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 216 }
[ 2830, 3393, 5874, 920, 3823, 2933, 10598, 1510, 1155, 353, 8840, 836, 8, 341, 48344, 1669, 9086, 7121, 20358, 2460, 5874, 741, 48344, 92259, 445, 64, 1699, 8904, 5130, 197, 322, 728, 311, 835, 315, 1467, 198, 47921, 1669, 609, 30595, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSortRef_DepthFirstSort(t *testing.T) { values := []string{ "#/definitions/datedTag/allOf/0", "#/definitions/pneumonoultramicroscopicsilicovolcanoconiosisAntidisestablishmentarianism", "#/definitions/namedThing", "#/definitions/datedTag/properties/id", "#/paths/~1some~1where~1{id}/get/responses/200/schema", "#/definitions/tags/additionalProperties/properties/id", "#/parameters/someParam/schema", "#/definitions/records/items/0/properties/createdAt", "#/definitions/datedTaggedRecords", "#/paths/~1some~1where~1{id}/get/responses/default/schema/properties/createdAt", "#/definitions/namedAgain", "#/definitions/tags", "#/paths/~1some~1where~1{id}/get/responses/404/schema", "#/definitions/datedRecords/items/1", "#/definitions/records/items/0", "#/definitions/datedTaggedRecords/items/0", "#/definitions/datedTag/allOf/1", "#/definitions/otherRecords/items/properties/createdAt", "#/responses/someResponse/schema/properties/createdAt", "#/definitions/namedAgain/properties/id", "#/definitions/datedTag", "#/paths/~1some~1where~1{id}/parameters/1/schema", "#/parameters/someParam/schema/properties/createdAt", "#/paths/~1some~1where~1{id}/get/parameters/2/schema/properties/createdAt", "#/definitions/otherRecords", "#/definitions/datedTaggedRecords/items/1", "#/definitions/datedTaggedRecords/items/1/properties/createdAt", "#/definitions/otherRecords/items", "#/definitions/datedRecords/items/0", "#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/id", "#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/value", "#/definitions/records", "#/definitions/namedThing/properties/name/properties/id", "#/definitions/datedTaggedRecords/additionalItems/properties/id", "#/definitions/datedTaggedRecords/additionalItems/properties/value", "#/definitions/datedRecords", "#/definitions/datedTag/properties/value", "#/definitions/pneumonoultramicroscopicsilicovolcanoconiosisAntidisestablishmentarianism/properties/floccinaucinihilipilificationCreatedAt", "#/definitions/datedRecords/items/1/properties/createdAt", "#/definitions/tags/additionalProperties", "#/paths/~1some~1where~1{id}/parameters/1/schema/properties/createdAt", "#/definitions/namedThing/properties/name", "#/paths/~1some~1where~1{id}/get/responses/default/schema", "#/definitions/tags/additionalProperties/properties/value", "#/responses/someResponse/schema", "#/definitions/datedTaggedRecords/additionalItems", "#/paths/~1some~1where~1{id}/get/parameters/2/schema", } valuesMap := make(map[string]struct{}, len(values)) for _, v := range values { valuesMap[v] = struct{}{} } expected := []string{ // Added shared parameters and responses "#/parameters/someParam/schema/properties/createdAt", "#/parameters/someParam/schema", "#/responses/someResponse/schema/properties/createdAt", "#/responses/someResponse/schema", "#/paths/~1some~1where~1{id}/parameters/1/schema/properties/createdAt", "#/paths/~1some~1where~1{id}/parameters/1/schema", "#/paths/~1some~1where~1{id}/get/parameters/2/schema/properties/createdAt", "#/paths/~1some~1where~1{id}/get/parameters/2/schema", "#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/id", "#/paths/~1some~1where~1{id}/get/responses/200/schema/properties/value", "#/paths/~1some~1where~1{id}/get/responses/200/schema", "#/paths/~1some~1where~1{id}/get/responses/404/schema", "#/paths/~1some~1where~1{id}/get/responses/default/schema/properties/createdAt", "#/paths/~1some~1where~1{id}/get/responses/default/schema", "#/definitions/datedRecords/items/1/properties/createdAt", "#/definitions/datedTaggedRecords/items/1/properties/createdAt", "#/definitions/namedThing/properties/name/properties/id", "#/definitions/records/items/0/properties/createdAt", "#/definitions/datedTaggedRecords/additionalItems/properties/id", "#/definitions/datedTaggedRecords/additionalItems/properties/value", "#/definitions/otherRecords/items/properties/createdAt", "#/definitions/tags/additionalProperties/properties/id", "#/definitions/tags/additionalProperties/properties/value", "#/definitions/datedRecords/items/0", "#/definitions/datedRecords/items/1", "#/definitions/datedTag/allOf/0", "#/definitions/datedTag/allOf/1", "#/definitions/datedTag/properties/id", "#/definitions/datedTag/properties/value", "#/definitions/datedTaggedRecords/items/0", "#/definitions/datedTaggedRecords/items/1", "#/definitions/namedAgain/properties/id", "#/definitions/namedThing/properties/name", "#/definitions/pneumonoultramicroscopicsilicovolcanoconiosisAntidisestablishmentarianism/properties/" + "floccinaucinihilipilificationCreatedAt", "#/definitions/records/items/0", "#/definitions/datedTaggedRecords/additionalItems", "#/definitions/otherRecords/items", "#/definitions/tags/additionalProperties", "#/definitions/datedRecords", "#/definitions/datedTag", "#/definitions/datedTaggedRecords", "#/definitions/namedAgain", "#/definitions/namedThing", "#/definitions/otherRecords", "#/definitions/pneumonoultramicroscopicsilicovolcanoconiosisAntidisestablishmentarianism", "#/definitions/records", "#/definitions/tags", } assert.Equal(t, expected, DepthFirst(valuesMap)) }
explode_data.jsonl/71601
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2124 }
[ 2830, 3393, 10231, 3945, 90680, 339, 5338, 10231, 1155, 353, 8840, 836, 8, 341, 45939, 1669, 3056, 917, 515, 197, 197, 57676, 14, 48563, 3446, 657, 5668, 31406, 2124, 14, 15, 756, 197, 197, 57676, 14, 48563, 4322, 811, 372, 263, 10965...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIterator(t *testing.T) { testStr := `{ "_source": { "docid": 123, "content": "hello", "arr": [ {"obj": 1, "name": "abbot"}, {"obj": 2, "name": "costello"} ] }, "hits": 1 }` jn := FromBytes([]byte(testStr)) it := jn.Select("_source", "arr").Iterator() count := 0 for it.Next() { count++ } if count != 2 { t.Errorf("Wrong number of elements: %d", count) } }
explode_data.jsonl/10436
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 192 }
[ 2830, 3393, 11951, 1155, 353, 8840, 836, 8, 341, 18185, 2580, 1669, 1565, 515, 197, 197, 35089, 2427, 788, 341, 298, 197, 1, 5236, 307, 788, 220, 16, 17, 18, 345, 298, 197, 1, 1796, 788, 330, 14990, 756, 298, 197, 1, 1118, 788, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRPM(t *testing.T) { rpms := getFiles(t, regexp.MustCompile(`\.rpm$`)) for _, rpm := range rpms { checkRPM(t, rpm) } }
explode_data.jsonl/2317
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 64 }
[ 2830, 3393, 49, 8795, 1155, 353, 8840, 836, 8, 341, 7000, 79, 1011, 1669, 633, 10809, 1155, 11, 41877, 98626, 5809, 18831, 73782, 3, 63, 1171, 2023, 8358, 49925, 1669, 2088, 33109, 1011, 341, 197, 25157, 49, 8795, 1155, 11, 49925, 340...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestResize(t *testing.T) { m := New(2) itemCount := 50 for i := 0; i < itemCount; i++ { m.Set(uintptr(i), &Animal{strconv.Itoa(i)}) } if m.Len() != itemCount { t.Error("Expected element count did not match.") } for { // make sure to wait for resize operation to finish if atomic.LoadUintptr(&m.resizing) == 0 { break } time.Sleep(time.Microsecond * 50) } if m.Fillrate() != 34 { t.Error("Expecting 34 percent fillrate.") } for i := 0; i < itemCount; i++ { _, ok := m.Get(uintptr(i)) if !ok { t.Error("Getting inserted item failed.") } } }
explode_data.jsonl/24428
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 244 }
[ 2830, 3393, 30561, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1532, 7, 17, 340, 22339, 2507, 1669, 220, 20, 15, 271, 2023, 600, 1669, 220, 15, 26, 600, 366, 55893, 26, 600, 1027, 341, 197, 2109, 4202, 8488, 3505, 1956, 701, 609, 41...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestPodDeletionEnqueuesRecreateDeployment(t *testing.T) { f := newFixture(t) foo := newDeployment("foo", 1, nil, nil, nil, map[string]string{"foo": "bar"}) foo.Spec.Strategy.Type = apps.RecreateDeploymentStrategyType rs := newReplicaSet(foo, "foo-1", 1) pod := generatePodFromRS(rs) f.dLister = append(f.dLister, foo) f.rsLister = append(f.rsLister, rs) f.objects = append(f.objects, foo, rs) c, _, err := f.newController() if err != nil { t.Fatalf("error creating Deployment controller: %v", err) } enqueued := false c.enqueueDeployment = func(d *apps.Deployment) { if d.Name == "foo" { enqueued = true } } c.deletePod(pod) if !enqueued { t.Errorf("expected deployment %q to be queued after pod deletion", foo.Name) } }
explode_data.jsonl/66040
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 304 }
[ 2830, 3393, 23527, 1912, 52625, 1702, 76089, 693, 3182, 75286, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 18930, 1155, 692, 197, 7975, 1669, 501, 75286, 445, 7975, 497, 220, 16, 11, 2092, 11, 2092, 11, 2092, 11, 2415, 14032, 30953...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDeployAndInvoke(t *testing.T) { chainID := util.GetTestChainID() var ctxt = context.Background() url := "github.com/hyperledger/fabric/examples/chaincode/go/chaincode_example01" chaincodeID := &pb.ChaincodeID{Path: url, Name: "ex01", Version: "0"} defer deleteChaincodeOnDisk("ex01.0") args := []string{"10"} f := "init" argsDeploy := util.ToChaincodeArgs(f, "a", "100", "b", "200") spec := &pb.ChaincodeSpec{Type: 1, ChaincodeId: chaincodeID, Input: &pb.ChaincodeInput{Args: argsDeploy}} cccid := ccprovider.NewCCContext(chainID, "ex01", "0", "", false, nil, nil) resp, prop, err := deploy(endorserServer, chainID, spec, nil) chaincodeID1 := spec.ChaincodeId.Name if err != nil { t.Fail() t.Logf("Error deploying <%s>: %s", chaincodeID1, err) chaincode.GetChain().Stop(ctxt, cccid, &pb.ChaincodeDeploymentSpec{ChaincodeSpec: &pb.ChaincodeSpec{ChaincodeId: chaincodeID}}) return } var nextBlockNumber uint64 = 1 // first block needs to be block number = 1. Genesis block is block 0 err = endorserServer.(*Endorser).commitTxSimulation(prop, chainID, signer, resp, nextBlockNumber) if err != nil { t.Fail() t.Logf("Error committing deploy <%s>: %s", chaincodeID1, err) chaincode.GetChain().Stop(ctxt, cccid, &pb.ChaincodeDeploymentSpec{ChaincodeSpec: &pb.ChaincodeSpec{ChaincodeId: chaincodeID}}) return } f = "invoke" invokeArgs := append([]string{f}, args...) spec = &pb.ChaincodeSpec{Type: 1, ChaincodeId: chaincodeID, Input: &pb.ChaincodeInput{Args: util.ToChaincodeArgs(invokeArgs...)}} prop, resp, txid, nonce, err := invoke(chainID, spec) if err != nil { t.Fail() t.Logf("Error invoking transaction: %s", err) chaincode.GetChain().Stop(ctxt, cccid, &pb.ChaincodeDeploymentSpec{ChaincodeSpec: &pb.ChaincodeSpec{ChaincodeId: chaincodeID}}) return } // Commit invoke nextBlockNumber++ err = endorserServer.(*Endorser).commitTxSimulation(prop, chainID, signer, resp, nextBlockNumber) if err != nil { t.Fail() t.Logf("Error committing first invoke <%s>: %s", chaincodeID1, err) chaincode.GetChain().Stop(ctxt, cccid, &pb.ChaincodeDeploymentSpec{ChaincodeSpec: &pb.ChaincodeSpec{ChaincodeId: chaincodeID}}) return } // Now test for an invalid TxID f = "invoke" invokeArgs = append([]string{f}, args...) spec = &pb.ChaincodeSpec{Type: 1, ChaincodeId: chaincodeID, Input: &pb.ChaincodeInput{Args: util.ToChaincodeArgs(invokeArgs...)}} _, err = invokeWithOverride("invalid_tx_id", chainID, spec, nonce) if err == nil { t.Fail() t.Log("Replay attack protection faild. Transaction with invalid txid passed") chaincode.GetChain().Stop(ctxt, cccid, &pb.ChaincodeDeploymentSpec{ChaincodeSpec: &pb.ChaincodeSpec{ChaincodeId: chaincodeID}}) return } // Now test for duplicated TxID f = "invoke" invokeArgs = append([]string{f}, args...) spec = &pb.ChaincodeSpec{Type: 1, ChaincodeId: chaincodeID, Input: &pb.ChaincodeInput{Args: util.ToChaincodeArgs(invokeArgs...)}} _, err = invokeWithOverride(txid, chainID, spec, nonce) if err == nil { t.Fail() t.Log("Replay attack protection faild. Transaction with duplicaged txid passed") chaincode.GetChain().Stop(ctxt, cccid, &pb.ChaincodeDeploymentSpec{ChaincodeSpec: &pb.ChaincodeSpec{ChaincodeId: chaincodeID}}) return } fmt.Printf("Invoke test passed\n") t.Logf("Invoke test passed") chaincode.GetChain().Stop(ctxt, cccid, &pb.ChaincodeDeploymentSpec{ChaincodeSpec: &pb.ChaincodeSpec{ChaincodeId: chaincodeID}}) }
explode_data.jsonl/27803
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1263 }
[ 2830, 3393, 69464, 3036, 17604, 1155, 353, 8840, 836, 8, 341, 197, 8819, 915, 1669, 4094, 2234, 2271, 18837, 915, 741, 2405, 59162, 284, 2266, 19047, 2822, 19320, 1669, 330, 5204, 905, 7530, 39252, 50704, 6663, 28897, 67020, 14, 8819, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestBasicRW(t *testing.T) { env := NewTestVDBEnv(t) env.Cleanup("testbasicrw_") env.Cleanup("testbasicrw_ns") env.Cleanup("testbasicrw_ns1") env.Cleanup("testbasicrw_ns2") defer env.Cleanup("testbasicrw_") defer env.Cleanup("testbasicrw_ns") defer env.Cleanup("testbasicrw_ns1") defer env.Cleanup("testbasicrw_ns2") commontests.TestBasicRW(t, env.DBProvider) }
explode_data.jsonl/590
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 159 }
[ 2830, 3393, 15944, 56368, 1155, 353, 8840, 836, 8, 341, 57538, 1669, 1532, 2271, 53, 3506, 14359, 1155, 340, 57538, 727, 60639, 445, 1944, 22342, 31768, 62, 1138, 57538, 727, 60639, 445, 1944, 22342, 31768, 34728, 1138, 57538, 727, 60639,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_setRun(t *testing.T) { type args struct { n int runlen int b []byte run int } tests := []struct { name string args args want []byte }{ // set an arbitrary chunk of 11 bits to 1s and 0s. {"a", args{1, 11, []byte{0, 0, 0, 0}, 0x55}, []byte{0, 0x01, 0x54, 0}}, // set 11 1s in a row {"b", args{1, 11, []byte{0, 0, 0, 0}, 0x7FF}, []byte{0, 0x1F, 0xFC, 0x00}}, // set 11 zeros in a row {"c", args{1, 11, []byte{0xFF, 0xFF, 0xFF, 0xFF}, 0}, []byte{0xFF, 0xE0, 0x03, 0xFF}}, // set at the left {"d", args{0, 11, []byte{0, 0, 0, 0}, 0x7FF}, []byte{0xFF, 0xE0, 0, 0}}, // set at the right {"e", args{7, 11, []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, 0x7FF}, []byte{0, 0, 0, 0, 0, 0, 0, 0, 0, 0x07, 0xFF}}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := setRun(tt.args.n, tt.args.runlen, tt.args.b, tt.args.run); !reflect.DeepEqual(got, tt.want) { t.Errorf("setRun() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/32828
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 533 }
[ 2830, 3393, 2602, 6727, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 9038, 414, 526, 198, 197, 56742, 2892, 526, 198, 197, 2233, 414, 3056, 3782, 198, 197, 56742, 262, 526, 198, 197, 532, 78216, 1669, 3056, 1235, 341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDefaultArguments(t *testing.T) { cases := []struct { name string t buildv1alpha1.TemplateInstantiationSpec env map[string]string expected buildv1alpha1.TemplateInstantiationSpec }{ { name: "nothing set works", }, { name: "add env", env: map[string]string{ "hello": "world", }, expected: buildv1alpha1.TemplateInstantiationSpec{ Arguments: []buildv1alpha1.ArgumentSpec{{Name: "hello", Value: "world"}}, }, }, { name: "do not override env", t: buildv1alpha1.TemplateInstantiationSpec{ Arguments: []buildv1alpha1.ArgumentSpec{ {Name: "ignore", Value: "this"}, {Name: "keep", Value: "original value"}, }, }, env: map[string]string{ "hello": "world", "keep": "should not see this", }, expected: buildv1alpha1.TemplateInstantiationSpec{ Arguments: []buildv1alpha1.ArgumentSpec{ {Name: "ignore", Value: "this"}, {Name: "keep", Value: "original value"}, {Name: "hello", Value: "world"}, }, }, }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { templ := tc.t defaultArguments(&templ, tc.env) if !equality.Semantic.DeepEqual(templ, tc.expected) { t.Errorf("builds do not match:\n%s", diff.ObjectReflectDiff(&tc.expected, templ)) } }) } }
explode_data.jsonl/78930
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 571 }
[ 2830, 3393, 3675, 19139, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 3244, 286, 1936, 85, 16, 7141, 16, 52530, 30340, 7101, 8327, 198, 197, 57538, 414, 2415, 14032, 30953, 198, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRateLimiter_ReachLimit_SameTS(t *testing.T) { limiter := New(10, time.Second) // a limit of 10 tokens should be allowed on a second for i := 0; i < 10; i++ { mockClock(limiter, "2000-01-01T00:00:00.100Z") assert.True(t, limiter.Allow("pable")) } // the 11th token withing the same second, musn't be allowed. mockClock(limiter, "2000-01-01T00:00:00.100Z") // user is throttled assert.False(t, limiter.Allow("pable")) }
explode_data.jsonl/74376
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 176 }
[ 2830, 3393, 11564, 43, 17700, 62, 48368, 16527, 1098, 373, 9951, 1155, 353, 8840, 836, 8, 341, 197, 4659, 2015, 1669, 1532, 7, 16, 15, 11, 882, 32435, 692, 197, 322, 264, 3930, 315, 220, 16, 15, 11211, 1265, 387, 5420, 389, 264, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTask_Validate(t *testing.T) { t.Parallel() tests := []struct { name string input internal.Task withErr bool }{ { "OK", internal.Task{ Description: "complete this microservice", Priority: internal.PriorityHigh, Dates: internal.Dates{ Start: time.Now(), Due: time.Now().Add(time.Hour), }, }, false, }, { "ERR: Description", internal.Task{ Priority: internal.PriorityHigh, Dates: internal.Dates{ Start: time.Now(), Due: time.Now().Add(time.Hour), }, }, true, }, { "ERR: Priority", internal.Task{ Description: "complete this microservice", Priority: internal.Priority(-1), Dates: internal.Dates{ Start: time.Now(), Due: time.Now().Add(time.Hour), }, }, true, }, { "ERR: Dates", internal.Task{ Description: "complete this microservice", Priority: internal.PriorityHigh, Dates: internal.Dates{ Start: time.Now().Add(time.Hour), Due: time.Now(), }, }, true, }, } for _, tt := range tests { tt := tt t.Run(tt.name, func(t *testing.T) { t.Parallel() actualErr := tt.input.Validate() if (actualErr != nil) != tt.withErr { t.Fatalf("expected error %t, got %s", tt.withErr, actualErr) } var ierr *internal.Error if tt.withErr && !errors.As(actualErr, &ierr) { t.Fatalf("expected %T error, got %T", ierr, actualErr) } }) } }
explode_data.jsonl/74769
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 709 }
[ 2830, 3393, 6262, 62, 17926, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 22427, 256, 5306, 28258, 198, 197, 46948, 7747, 1807, 198, 197, 59403, 197, 197, 515, 29...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDefaultLogger(t *testing.T) { defer func() { loggerProviderOnce = sync.Once{} }() const module = "sample-module" //get new logger since Initialize is not called, default logger implementation will be used logger := New(module) //force logger instance loading to switch output of logger to buffer for testing logger.Infof("sample output") modlog.SwitchLogOutputToBuffer(logger.instance) //verify default logger modlog.VerifyDefaultLogging(t, logger, module, metadata.SetLevel) }
explode_data.jsonl/79294
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 144 }
[ 2830, 3393, 3675, 7395, 1155, 353, 8840, 836, 8, 1476, 16867, 2915, 368, 314, 5925, 5179, 12522, 284, 12811, 77946, 6257, 50746, 4777, 4688, 284, 330, 13611, 46718, 1837, 197, 322, 455, 501, 5925, 2474, 9008, 374, 537, 2598, 11, 1638, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMetricsForwarder_setTags(t *testing.T) { tests := []struct { name string dda *datadoghqv1alpha1.DatadogAgent want []string }{ { name: "nil dad", dda: nil, want: []string{}, }, { name: "empty labels", dda: test.NewDefaultedDatadogAgent("foo", "bar", &test.NewDatadogAgentOptions{}), want: []string{}, }, { name: "with labels", dda: test.NewDefaultedDatadogAgent("foo", "bar", &test.NewDatadogAgentOptions{ Labels: map[string]string{ "firstKey": "firstValue", "secondKey": "secondValue", }, }), want: []string{ "firstKey:firstValue", "secondKey:secondValue", }, }, { name: "with clustername", dda: test.NewDefaultedDatadogAgent("foo", "bar", &test.NewDatadogAgentOptions{ ClusterName: datadoghqv1alpha1.NewStringPointer("testcluster"), }), want: []string{ "cluster_name:testcluster", }, }, { name: "with clustername and labels", dda: test.NewDefaultedDatadogAgent("foo", "bar", &test.NewDatadogAgentOptions{ ClusterName: datadoghqv1alpha1.NewStringPointer("testcluster"), Labels: map[string]string{ "firstKey": "firstValue", "secondKey": "secondValue", }, }), want: []string{ "cluster_name:testcluster", "firstKey:firstValue", "secondKey:secondValue", }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { dd := &metricsForwarder{} dd.updateTags(tt.dda) sort.Strings(dd.tags) sort.Strings(tt.want) if !reflect.DeepEqual(dd.tags, tt.want) { t.Errorf("metricsForwarder.setTags() dd.tags = %v, want %v", dd.tags, tt.want) } }) } }
explode_data.jsonl/8872
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 793 }
[ 2830, 3393, 27328, 25925, 261, 2602, 15930, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 197, 71487, 220, 353, 5911, 329, 63936, 69578, 16, 7141, 16, 909, 266, 329, 538, 16810, 198, 197, 50780...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestShouldReturnNormalizedValueForMatchSpecificationOfApplicationConfigWhenStateFuncIsCalledAndValueCanBeNormalized(t *testing.T) { resourceHandle := NewApplicationConfigResourceHandle() schema := resourceHandle.MetaData().Schema expectedValue := expressionEntityTypeDestEqValue newValue := validMatchSpecification require.Equal(t, expectedValue, schema[ApplicationConfigFieldMatchSpecification].StateFunc(newValue)) }
explode_data.jsonl/64921
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 110 }
[ 2830, 3393, 14996, 5598, 79082, 1130, 2461, 8331, 56139, 2124, 4988, 2648, 4498, 1397, 9626, 3872, 20960, 3036, 1130, 69585, 79082, 1155, 353, 8840, 836, 8, 341, 50346, 6999, 1669, 1532, 4988, 2648, 4783, 6999, 741, 1903, 3416, 1669, 5101...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestErrorWrapper(t *testing.T) { assert := New(t) mockAssert := New(new(testing.T)) // start with a nil error var err error = nil assert.False(mockAssert.Error(err), "Error should return False for nil arg") // now set an error err = errors.New("Some error") assert.True(mockAssert.Error(err), "Error with error should return True") }
explode_data.jsonl/54979
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 119 }
[ 2830, 3393, 1454, 11542, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 1532, 1155, 340, 77333, 8534, 1669, 1532, 1755, 8623, 287, 836, 4390, 197, 322, 1191, 448, 264, 2092, 1465, 198, 2405, 1848, 1465, 284, 2092, 271, 6948, 50757, 30389, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBuildCodec(t *testing.T) { t.Parallel() type some struct { I []int32 } type all struct { A bool B int32 C int64 D float32 E float64 F []byte G string H some J map[string]int K [4]byte L int16 } allSchema := Schema{ Type: "record", Object: &SchemaObject{ Fields: []SchemaRecordField{ { Name: "A", Type: Schema{Type: "boolean"}, }, { Name: "B", Type: Schema{Type: "int"}, }, { Name: "C", Type: Schema{Type: "long"}, }, { Name: "D", Type: Schema{Type: "float"}, }, { Name: "E", Type: Schema{Type: "double"}, }, { Name: "F", Type: Schema{Type: "bytes"}, }, { Name: "G", Type: Schema{Type: "string"}, }, { Name: "H", Type: Schema{ Type: "record", Object: &SchemaObject{ Name: "some", Fields: []SchemaRecordField{ { Name: "I", Type: Schema{ Type: "array", Object: &SchemaObject{ Items: Schema{Type: "int"}, }, }, }, }, }, }, }, { Name: "J", Type: Schema{ Type: "map", Object: &SchemaObject{ Values: Schema{Type: "long"}, }, }, }, { Name: "K", Type: Schema{ Type: "fixed", Object: &SchemaObject{ Size: 4, }, }, }, { Name: "L", Type: Schema{ Type: "long", }, }, }, }, } c, err := buildCodec(allSchema, reflect.TypeOf(all{})) if err != nil { t.Fatal(err) } _ = c }
explode_data.jsonl/39602
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 965 }
[ 2830, 3393, 11066, 36913, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 13158, 1045, 2036, 341, 197, 24486, 3056, 396, 18, 17, 198, 197, 630, 13158, 678, 2036, 341, 197, 22985, 1807, 198, 197, 12791, 526, 18, 17, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMapProxy_RemoveEntryListenerToKeyWithInvalidRegistrationId(t *testing.T) { var wg *sync.WaitGroup = new(sync.WaitGroup) entryAdded := &AddEntry{wg: wg} registrationId, err := mp.AddEntryListenerToKey(entryAdded, "key1", true) AssertEqual(t, err, nil, nil) invalidRegistrationId := "invalid" removed, _ := mp.RemoveEntryListener(&invalidRegistrationId) if removed { t.Fatal("remove entry listener to key with invalid registration id failed") } mp.RemoveEntryListener(registrationId) mp.Clear() }
explode_data.jsonl/57036
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 171 }
[ 2830, 3393, 2227, 16219, 66843, 5874, 2743, 1249, 1592, 2354, 7928, 23365, 764, 1155, 353, 8840, 836, 8, 341, 2405, 63581, 353, 12996, 28384, 2808, 284, 501, 97233, 28384, 2808, 340, 48344, 19337, 1669, 609, 2212, 5874, 90, 45540, 25, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTLSCodec_8(t *testing.T) { t.Parallel() l, p, _ := RandomListen("") l0 := NewEventListener(l) l0.AddCodecFactory(func(ctx Context) Codec { c0 := NewTLSServerCodec() c0.RequireClientAuth(false) // server certificates c0.AddCertificate(demoCert, demoKEY) return c0 }) l0.OnAccept(func(ctx Context, c net.Conn) { _, err := c.Write([]byte("hello")) assert.Error(t, err) }) l0.OnAcceptError(func(ctx Context, err error) { t.Log(err) }) l0.Start() time.Sleep(time.Second) conn, _ := net.Dial("tcp", "127.0.0.1:"+p) conn0 := NewConn(conn) c1 := NewTLSClientCodec() c1.SkipVerify(false) c1.AddServerCa(testCert) conn0.AddCodec(c1) _, err := Read(conn0, 5) time.Sleep(time.Second) assert.Error(t, err) c2 := NewTLSClientCodec() c2.SkipVerify(false) c2.AddServerCa([]byte("aaa")) conn0.AddCodec(c2) assert.Error(t, conn0.initialize()) }
explode_data.jsonl/34696
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 397 }
[ 2830, 3393, 45439, 36913, 62, 23, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 8810, 11, 281, 11, 716, 1669, 10612, 38714, 31764, 8810, 15, 1669, 1532, 7765, 2333, 340, 8810, 15, 1904, 36913, 4153, 18552, 7502, 9608, 8, 67077...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetGCSBrowserPrefix(t *testing.T) { testCases := []struct { id string config Spyglass expected string }{ { id: "only default", config: Spyglass{ GCSBrowserPrefixesByRepo: map[string]string{ "*": "https://default.com/gcs/", }, }, expected: "https://default.com/gcs/", }, { id: "org exists", config: Spyglass{ GCSBrowserPrefixesByRepo: map[string]string{ "*": "https://default.com/gcs/", "org": "https://org.com/gcs/", }, }, expected: "https://org.com/gcs/", }, { id: "repo exists", config: Spyglass{ GCSBrowserPrefixesByRepo: map[string]string{ "*": "https://default.com/gcs/", "org": "https://org.com/gcs/", "org/repo": "https://repo.com/gcs/", }, }, expected: "https://repo.com/gcs/", }, { id: "repo overrides bucket", config: Spyglass{ GCSBrowserPrefixesByRepo: map[string]string{ "*": "https://default.com/gcs/", "org": "https://org.com/gcs/", "org/repo": "https://repo.com/gcs/", }, GCSBrowserPrefixesByBucket: map[string]string{ "*": "https://default.com/gcs/", "bucket": "https://bucket.com/gcs/", }, }, expected: "https://repo.com/gcs/", }, { id: "bucket exists", config: Spyglass{ GCSBrowserPrefixesByRepo: map[string]string{ "*": "https://default.com/gcs/", }, GCSBrowserPrefixesByBucket: map[string]string{ "*": "https://default.com/gcs/", "bucket": "https://bucket.com/gcs/", }, }, expected: "https://bucket.com/gcs/", }, } for _, tc := range testCases { actual := tc.config.GetGCSBrowserPrefix("org", "repo", "bucket") if !reflect.DeepEqual(actual, tc.expected) { t.Fatalf("%s", cmp.Diff(tc.expected, actual)) } } }
explode_data.jsonl/41006
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 913 }
[ 2830, 3393, 1949, 38, 6412, 17878, 14335, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 15710, 981, 914, 198, 197, 25873, 256, 46314, 58436, 198, 197, 42400, 914, 198, 197, 59403, 197, 197, 515, 298, 15710, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCommissionValidate(t *testing.T) { testCases := []struct { input Commission expectErr bool }{ // invalid commission; max rate < 0% {NewCommission(sdk.ZeroDec(), sdk.MustNewDecFromStr("-1.00"), sdk.ZeroDec()), true}, // invalid commission; max rate > 100% {NewCommission(sdk.ZeroDec(), sdk.MustNewDecFromStr("2.00"), sdk.ZeroDec()), true}, // invalid commission; rate < 0% {NewCommission(sdk.MustNewDecFromStr("-1.00"), sdk.ZeroDec(), sdk.ZeroDec()), true}, // invalid commission; rate > max rate {NewCommission(sdk.MustNewDecFromStr("0.75"), sdk.MustNewDecFromStr("0.50"), sdk.ZeroDec()), true}, // invalid commission; max change rate < 0% {NewCommission(sdk.OneDec(), sdk.OneDec(), sdk.MustNewDecFromStr("-1.00")), true}, // invalid commission; max change rate > max rate {NewCommission(sdk.OneDec(), sdk.MustNewDecFromStr("0.75"), sdk.MustNewDecFromStr("0.90")), true}, // valid commission {NewCommission(sdk.MustNewDecFromStr("0.20"), sdk.OneDec(), sdk.MustNewDecFromStr("0.10")), false}, } for i, tc := range testCases { err := tc.input.Validate() require.Equal(t, tc.expectErr, err != nil, "unexpected result; tc #%d, input: %v", i, tc.input) } }
explode_data.jsonl/82248
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 463 }
[ 2830, 3393, 73750, 17926, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 22427, 257, 9652, 198, 197, 24952, 7747, 1807, 198, 197, 59403, 197, 197, 322, 8318, 12123, 26, 1932, 4379, 366, 220, 15, 13744, 197, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIngressRouteHTTPS(t *testing.T) { rh, cc, done := setup(t, func(reh *contour.ResourceEventHandler) { reh.IngressRouteRootNamespaces = []string{} }) defer done() // assert that there is only a static listener assertEqual(t, &v2.DiscoveryResponse{ VersionInfo: "0", Resources: []types.Any{ any(t, staticListener()), }, TypeUrl: listenerType, Nonce: "0", }, streamLDS(t, cc)) // s1 is a tls secret s1 := &v1.Secret{ ObjectMeta: metav1.ObjectMeta{ Name: "secret", Namespace: "default", }, Type: "kubernetes.io/tls", Data: map[string][]byte{ v1.TLSCertKey: []byte("certificate"), v1.TLSPrivateKeyKey: []byte("key"), }, } // ir1 is an ingressroute that has TLS ir1 := &ingressroutev1.IngressRoute{ ObjectMeta: metav1.ObjectMeta{ Name: "simple", Namespace: "default", }, Spec: ingressroutev1.IngressRouteSpec{ VirtualHost: &ingressroutev1.VirtualHost{ Fqdn: "example.com", TLS: &ingressroutev1.TLS{ SecretName: "secret", }, }, Routes: []ingressroutev1.Route{{ Match: "/", Services: []ingressroutev1.Service{{ Name: "kuard", Port: 8080, }}, }}, }, } svc1 := &v1.Service{ ObjectMeta: metav1.ObjectMeta{ Name: "kuard", Namespace: "default", }, Spec: v1.ServiceSpec{ Ports: []v1.ServicePort{{ Name: "http", Protocol: "TCP", Port: 8080, }}, }, } // add secret rh.OnAdd(s1) // add service rh.OnAdd(svc1) // add ingressroute rh.OnAdd(ir1) ingressHTTP := &v2.Listener{ Name: "ingress_http", Address: *envoy.SocketAddress("0.0.0.0", 8080), FilterChains: envoy.FilterChains(envoy.HTTPConnectionManager("ingress_http", "/dev/stdout")), } ingressHTTPS := &v2.Listener{ Name: "ingress_https", Address: *envoy.SocketAddress("0.0.0.0", 8443), ListenerFilters: []listener.ListenerFilter{ envoy.TLSInspector(), }, FilterChains: filterchaintls("example.com", s1, envoy.HTTPConnectionManager("ingress_https", "/dev/stdout"), "h2", "http/1.1"), } assertEqual(t, &v2.DiscoveryResponse{ VersionInfo: "3", Resources: []types.Any{ any(t, ingressHTTP), any(t, ingressHTTPS), any(t, staticListener()), }, TypeUrl: listenerType, Nonce: "3", }, streamLDS(t, cc)) }
explode_data.jsonl/22829
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1062 }
[ 2830, 3393, 641, 2483, 4899, 82354, 1155, 353, 8840, 836, 8, 341, 7000, 71, 11, 12527, 11, 2814, 1669, 6505, 1155, 11, 2915, 5801, 71, 353, 772, 413, 20766, 17945, 8, 341, 197, 197, 11063, 5337, 2483, 4899, 8439, 7980, 27338, 284, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCheckErrorCondition(t *testing.T) { t.Parallel() // tx with almost empty scripts. tx := &wire.MsgTx{ Version: 1, TxIn: []*wire.TxIn{ { PreviousOutPoint: wire.OutPoint{ Hash: wire.ShaHash([32]byte{ 0xc9, 0x97, 0xa5, 0xe5, 0x6e, 0x10, 0x41, 0x02, 0xfa, 0x20, 0x9c, 0x6a, 0x85, 0x2d, 0xd9, 0x06, 0x60, 0xa2, 0x0b, 0x2d, 0x9c, 0x35, 0x24, 0x23, 0xed, 0xce, 0x25, 0x85, 0x7f, 0xcd, 0x37, 0x04, }), Index: 0, }, SignatureScript: []uint8{}, Sequence: 4294967295, }, }, TxOut: []*wire.TxOut{ { Value: 1000000000, PkScript: nil, }, }, LockTime: 0, } pkScript := []byte{ txscript.OP_NOP, txscript.OP_NOP, txscript.OP_NOP, txscript.OP_NOP, txscript.OP_NOP, txscript.OP_NOP, txscript.OP_NOP, txscript.OP_NOP, txscript.OP_NOP, txscript.OP_NOP, txscript.OP_TRUE, } vm, err := txscript.NewEngine(pkScript, tx, 0, 0, nil) if err != nil { t.Errorf("failed to create script: %v", err) } for i := 0; i < len(pkScript)-1; i++ { done, err := vm.Step() if err != nil { t.Errorf("failed to step %dth time: %v", i, err) return } if done { t.Errorf("finshed early on %dth time", i) return } err = vm.CheckErrorCondition(false) if err != txscript.ErrStackScriptUnfinished { t.Errorf("got unexepected error %v on %dth iteration", err, i) return } } done, err := vm.Step() if err != nil { t.Errorf("final step failed %v", err) return } if !done { t.Errorf("final step isn't done!") return } err = vm.CheckErrorCondition(false) if err != nil { t.Errorf("unexpected error %v on final check", err) } }
explode_data.jsonl/24199
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 893 }
[ 2830, 3393, 3973, 1454, 10547, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 322, 9854, 448, 4558, 4287, 19502, 624, 46237, 1669, 609, 35531, 30365, 31584, 515, 197, 77847, 25, 220, 16, 345, 197, 10261, 87, 641, 25, 2983...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestHttpNameField(t *testing.T) { tests := []struct { testName string URI string MethodName string problems testutils.Problems }{ {"Valid", "/v1/{big_book.name=publishers/*/books/*}", "UpdateBigBook", nil}, {"InvalidNoUnderscore", "/v1/{bigbook.name=publishers/*/books/*}", "UpdateBigBook", testutils.Problems{{Message: "`big_book.name` field"}}}, {"InvalidVarNameBook", "/v1/{big_book=publishers/*/books/*}", "UpdateBigBook", testutils.Problems{{Message: "`big_book.name` field"}}}, {"InvalidVarNameName", "/v1/{name=publishers/*/books/*}", "UpdateBigBook", testutils.Problems{{Message: "`big_book.name` field"}}}, {"InvalidVarNameReversed", "/v1/{name.big_book=publishers/*/books/*}", "UpdateBigBook", testutils.Problems{{Message: "`big_book.name` field"}}}, {"NoVarName", "/v1/publishers/*/books/*", "UpdateBigBook", testutils.Problems{{Message: "`big_book.name` field"}}}, {"Irrelevant", "/v1/{book=publishers/*/books/*}", "AcquireBigBook", nil}, } for _, test := range tests { t.Run(test.testName, func(t *testing.T) { f := testutils.ParseProto3Tmpl(t, ` import "google/api/annotations.proto"; service Library { rpc {{.MethodName}}({{.MethodName}}Request) returns (BigBook) { option (google.api.http) = { patch: "{{.URI}}" }; } } message BigBook {} message {{.MethodName}}Request {} `, test) method := f.GetServices()[0].GetMethods()[0] if diff := test.problems.SetDescriptor(method).Diff(httpNameField.Lint(f)); diff != "" { t.Errorf(diff) } }) } }
explode_data.jsonl/75056
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 680 }
[ 2830, 3393, 2905, 675, 1877, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 18185, 675, 256, 914, 198, 197, 197, 10301, 286, 914, 198, 197, 197, 75648, 914, 198, 197, 197, 95340, 256, 1273, 6031, 7763, 21699, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDistSQLDeadHosts(t *testing.T) { defer leaktest.AfterTest(t)() defer log.Scope(t).Close(t) skip.WithIssue(t, 49843, "test is too slow; we need to tweak timeouts so connections die faster (see #14376)") const n = 100 const numNodes = 5 tc := serverutils.StartTestCluster(t, numNodes, base.TestClusterArgs{ ReplicationMode: base.ReplicationManual, ServerArgs: base.TestServerArgs{UseDatabase: "test"}, }) defer tc.Stopper().Stop(context.Background()) db := tc.ServerConn(0) db.SetMaxOpenConns(1) r := sqlutils.MakeSQLRunner(db) r.Exec(t, "CREATE DATABASE test") r.Exec(t, "CREATE TABLE t (x INT PRIMARY KEY, xsquared INT)") for i := 0; i < numNodes; i++ { r.Exec(t, fmt.Sprintf("ALTER TABLE t SPLIT AT VALUES (%d)", n*i/5)) } for i := 0; i < numNodes; i++ { r.Exec(t, fmt.Sprintf( "ALTER TABLE t EXPERIMENTAL_RELOCATE VALUES (ARRAY[%d,%d,%d], %d)", i+1, (i+1)%5+1, (i+2)%5+1, n*i/5, )) } r.Exec(t, "SHOW RANGES FROM TABLE t") r.Exec(t, fmt.Sprintf("INSERT INTO t SELECT i, i*i FROM generate_series(1, %d) AS g(i)", n)) r.Exec(t, "SET DISTSQL = ON") // Run a query that uses the entire table and is easy to verify. runQuery := func() error { log.Infof(context.Background(), "running test query") var res int if err := db.QueryRow("SELECT sum(xsquared) FROM t").Scan(&res); err != nil { return err } if exp := (n * (n + 1) * (2*n + 1)) / 6; res != exp { t.Fatalf("incorrect result %d, expected %d", res, exp) } log.Infof(context.Background(), "test query OK") return nil } if err := runQuery(); err != nil { t.Error(err) } // Verify the plan (should include all 5 nodes). r.CheckQueryResults(t, "SELECT url FROM [EXPLAIN (DISTSQL) SELECT sum(xsquared) FROM t]", [][]string{{"https://cockroachdb.github.io/distsqlplan/decode.html#eJy8k09LwzAYxu9-CnlOCu9h7bo5e5rHHXQy9SQ91OalFLamJCkoo99d1iDaIskgo8f8-T2_PG1yRC0FP-UH1kjfEYEQgzAHIQFhgYzQKFmw1lKdtlhgIz6RzghV3bTmNJ0RCqkY6RGmMntGitf8Y887zgUrEASbvNr3kkZVh1x9rQ0I29ak1-sYWUeQrflJ6-h8z0NZKi5zI0eal7fHm3V0e3b0b2JbSyVYsRgEZt2F5dFE38_jCakQT1TB4wmpMJ-ogscTUiGZqILHc6mH-E_0jnUja82jBznMywgsSrZvWctWFfysZNGH2-G2391PCNbGrkZ2sKnt0ulYf-HICccDOBrDsdvsUc-ddOKGk5BzL5zw0m1ehpjvnPDKbV6FmO_d_2rmuSbuSzZ2Z93VdwAAAP__XTV6BQ=="}}, ) // Stop node 5. tc.StopServer(4) testutils.SucceedsSoon(t, runQuery) r.CheckQueryResults(t, "SELECT url FROM [EXPLAIN (DISTSQL) SELECT sum(xsquared) FROM t]", [][]string{{"https://cockroachdb.github.io/distsqlplan/decode.html#eJy8k8FK7DAYhff3KS5npZCF6dRx7KouZ6Ejo64ki9j8lEKnKUkKytB3lzaItkg60qHL5M93vpySHlFpRQ_yQBbJKzgYIjCswBBDMNRGZ2StNt3YH96qdyRXDEVVN67bFgyZNoTkCFe4kpDgWb6VtCepyIBBkZNF2QtqUxyk-UgdGHaNS_6nEUTLoBv3lday0z13eW4ol06PNE8v9xcpvzw5-juxqbRRZEgNAkV7Zjlf6PtNeOZUiBaqMOGZU2G1UIUJz7le8S_Re7K1riyNXvMwTzCQysn_CFY3JqNHo7M-3C93_el-Q5F1fsr9Ylv5UXetnzAPwtEA5mM4CsK3YfMqCMdhOJ5z7esgvA6b13PMN0F4EzZv_mQW7b_PAAAA__-DuA-E"}}, ) // Stop node 2; note that no range had replicas on both 2 and 5. tc.StopServer(1) testutils.SucceedsSoon(t, runQuery) r.CheckQueryResults(t, "SELECT url FROM [EXPLAIN (DISTSQL) SELECT sum(xsquared) FROM t]", [][]string{{"https://cockroachdb.github.io/distsqlplan/decode.html#eJy8kkFLwzAUx-9-CvmfFHIwXZ3QUz3uoJOpJ8khNo9S6JrykoIy-t2lDaItkk02dkxe_r_fe-Ht0FhDj3pLDtkbJAQWEEihBFq2BTlneSiFhyvzgexGoGrazg_XSqCwTMh28JWvCRle9HtNG9KGGAKGvK7qEd5ytdX8mXsIrDufXeYJVC9gO_9N68XhnvuyZCq1tzPN8-vDVS6vD0b_ELvGsiEmMwGq_sRyeab_2-M5ZoTkTCPs8ZxqBf5Ab8i1tnE0W4UpTwmQKSlskbMdF_TEthjh4bgeX48XhpwPVRkOqyaUhrZ-h2U0nEzCch5OouG7uHkRDafxcHpM27fR8DJuXv7LrPqLrwAAAP__vMyldA=="}}, ) }
explode_data.jsonl/43682
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1904 }
[ 2830, 3393, 23356, 6688, 28320, 9296, 82, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 16867, 1487, 77940, 1155, 568, 7925, 1155, 692, 1903, 13389, 26124, 42006, 1155, 11, 220, 19, 24, 23, 19, 18, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestReconciler_JobStopped(t *testing.T) { job := mock.Job() job.Stop = true cases := []struct { name string job *structs.Job jobID, taskGroup string }{ { name: "stopped job", job: job, jobID: job.ID, taskGroup: job.TaskGroups[0].Name, }, { name: "nil job", job: nil, jobID: "foo", taskGroup: "bar", }, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { // Create 10 allocations var allocs []*structs.Allocation for i := 0; i < 10; i++ { alloc := mock.Alloc() alloc.Job = c.job alloc.JobID = c.jobID alloc.NodeID = uuid.Generate() alloc.Name = structs.AllocName(c.jobID, c.taskGroup, uint(i)) alloc.TaskGroup = c.taskGroup allocs = append(allocs, alloc) } reconciler := NewAllocReconciler(testLogger(), allocUpdateFnIgnore, false, c.jobID, c.job, nil, allocs, nil) r := reconciler.Compute() // Assert the correct results assertResults(t, r, &resultExpectation{ createDeployment: nil, deploymentUpdates: nil, place: 0, inplace: 0, stop: 10, desiredTGUpdates: map[string]*structs.DesiredUpdates{ c.taskGroup: { Stop: 10, }, }, }) assertNamesHaveIndexes(t, intRange(0, 9), stopResultsToNames(r.stop)) }) } }
explode_data.jsonl/67247
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 686 }
[ 2830, 3393, 693, 40446, 5769, 10598, 674, 59803, 1155, 353, 8840, 836, 8, 341, 68577, 1669, 7860, 45293, 741, 68577, 30213, 284, 830, 271, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 1797, 914, 198, 197, 68577, 1060, 353, 1235, 82, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSymmetricGrant(t *testing.T) { ref := []*reference.Ref{{ Address: []byte("adddressss"), SecretKey: []byte("other secret"), }} grt, err := SymmetricGrant(ref, nil) assert.Error(t, err) assert.Nil(t, grt) secret := []byte("sshh") grt, err = SymmetricGrant(ref, secret) assert.Errorf(t, err, "SymmetricGrant cannot encrypt with a secret of size < %d", encryption.KeySize) assert.Nil(t, grt) secret = deriveSecret(t, []byte("sssshhhh")) grt, err = SymmetricGrant(ref, secret) assert.NoError(t, err) assert.NotNil(t, grt) refOut, err := SymmetricReference(grt, secret, versions.LatestGrantVersion) assert.NoError(t, err) assertRefsEqual(t, ref, refOut) }
explode_data.jsonl/50134
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 269 }
[ 2830, 3393, 27912, 15903, 67971, 1155, 353, 8840, 836, 8, 341, 59504, 1669, 29838, 16291, 18369, 90, 515, 197, 98090, 25, 256, 3056, 3782, 445, 329, 631, 673, 778, 4461, 197, 7568, 50856, 1592, 25, 3056, 3782, 445, 1575, 6234, 4461, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLedgerProvider(t *testing.T) { env := newTestEnv(t) defer env.cleanup() numLedgers := 10 provider := testutilNewProvider(t) existingLedgerIDs, err := provider.List() assert.NoError(t, err) assert.Len(t, existingLedgerIDs, 0) genesisBlocks := make([]*common.Block, numLedgers) for i := 0; i < numLedgers; i++ { genesisBlock, _ := configtxtest.MakeGenesisBlock(constructTestLedgerID(i)) genesisBlocks[i] = genesisBlock provider.Create(genesisBlock) } existingLedgerIDs, err = provider.List() assert.NoError(t, err) assert.Len(t, existingLedgerIDs, numLedgers) provider.Close() provider = testutilNewProvider(t) defer provider.Close() ledgerIds, _ := provider.List() assert.Len(t, ledgerIds, numLedgers) t.Logf("ledgerIDs=%#v", ledgerIds) for i := 0; i < numLedgers; i++ { assert.Equal(t, constructTestLedgerID(i), ledgerIds[i]) } for i := 0; i < numLedgers; i++ { ledgerid := constructTestLedgerID(i) status, _ := provider.Exists(ledgerid) assert.True(t, status) ledger, err := provider.Open(ledgerid) assert.NoError(t, err) bcInfo, err := ledger.GetBlockchainInfo() ledger.Close() assert.NoError(t, err) assert.Equal(t, uint64(1), bcInfo.Height) // check that the genesis block was persisted in the provider's db s := provider.(*Provider).idStore gbBytesInProviderStore, err := s.db.Get(s.encodeLedgerKey(ledgerid)) assert.NoError(t, err) gb := &common.Block{} assert.NoError(t, proto.Unmarshal(gbBytesInProviderStore, gb)) assert.True(t, proto.Equal(gb, genesisBlocks[i]), "proto messages are not equal") } gb, _ := configtxtest.MakeGenesisBlock(constructTestLedgerID(2)) _, err = provider.Create(gb) assert.Equal(t, ErrLedgerIDExists, err) status, err := provider.Exists(constructTestLedgerID(numLedgers)) assert.NoError(t, err, "Failed to check for ledger existence") assert.Equal(t, status, false) _, err = provider.Open(constructTestLedgerID(numLedgers)) assert.Equal(t, ErrNonExistingLedgerID, err) }
explode_data.jsonl/2819
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 764 }
[ 2830, 3393, 60850, 1389, 5179, 1155, 353, 8840, 836, 8, 341, 57538, 1669, 501, 2271, 14359, 1155, 340, 16867, 6105, 87689, 741, 22431, 60850, 10637, 1669, 220, 16, 15, 198, 197, 19979, 1669, 1273, 1314, 3564, 5179, 1155, 340, 8122, 1108...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestNotificationsToMap(t *testing.T) { got := notificationsToMap(nil) want := map[string]*Notification{} if !testutil.Equal(got, want) { t.Errorf("got %+v, want %+v", got, want) } in := []*raw.Notification{ {Id: "a", Topic: "//pubsub.googleapis.com/projects/P1/topics/T1"}, {Id: "b", Topic: "//pubsub.googleapis.com/projects/P2/topics/T2"}, {Id: "c", Topic: "//pubsub.googleapis.com/projects/P3/topics/T3"}, } got = notificationsToMap(in) want = map[string]*Notification{ "a": &Notification{ID: "a", TopicProjectID: "P1", TopicID: "T1"}, "b": &Notification{ID: "b", TopicProjectID: "P2", TopicID: "T2"}, "c": &Notification{ID: "c", TopicProjectID: "P3", TopicID: "T3"}, } if diff := testutil.Diff(got, want); diff != "" { t.Errorf("got=-, want=+:\n%s", diff) } }
explode_data.jsonl/57371
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 349 }
[ 2830, 3393, 34736, 1249, 2227, 1155, 353, 8840, 836, 8, 341, 3174, 354, 1669, 21969, 1249, 2227, 27907, 340, 50780, 1669, 2415, 14032, 8465, 11196, 16094, 743, 753, 1944, 1314, 12808, 3268, 354, 11, 1366, 8, 341, 197, 3244, 13080, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestVisitFileInfoShouldNotAddFilesToWatcher(t *testing.T) { // mock mockFileInfo := &MockFileInfo{} mockFileInfo.On("IsDir").Return(false) fw := NewFsWatcher() fw.watcher, _ = fsnotify.NewWatcher() invoked := PatchAdd() // act fw.visitFileInfo("path", mockFileInfo, nil) // assert assert.False(t, *invoked) // cleanup monkey.UnpatchAll() }
explode_data.jsonl/75740
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 139 }
[ 2830, 3393, 26218, 45430, 14996, 2623, 2212, 10809, 1249, 47248, 1155, 353, 8840, 836, 8, 341, 197, 322, 7860, 198, 77333, 45430, 1669, 609, 11571, 45430, 16094, 77333, 45430, 8071, 445, 3872, 6184, 1827, 5598, 3576, 340, 1166, 86, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFunction_WriteIdempotency(t *testing.T) { root, rm := Mktemp(t) defer rm() client := fn.New(fn.WithRegistry(TestRegistry)) // Create a function f := fn.Function{ Runtime: TestRuntime, Root: root, } if err := client.Create(f); err != nil { t.Fatal(err) } // Load the function and write it again f1, err := fn.NewFunction(root) if err != nil { t.Fatal(err) } if err := f1.Write(); err != nil { t.Fatal(err) } // Load it again and compare f2, err := fn.NewFunction(root) if err != nil { t.Fatal(err) } if diff := cmp.Diff(f1, f2); diff != "" { t.Error("function differs after reload (-before, +after):", diff) } }
explode_data.jsonl/76495
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 273 }
[ 2830, 3393, 5152, 31825, 764, 3262, 354, 2251, 1155, 353, 8840, 836, 8, 341, 33698, 11, 18998, 1669, 52848, 3888, 1155, 340, 16867, 18998, 741, 25291, 1669, 5168, 7121, 24008, 26124, 15603, 31159, 15603, 4390, 197, 322, 4230, 264, 729, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestValidate_AnonymousOperationMustBeAlone_MultipleAnonOperations(t *testing.T) { testutil.ExpectFailsRule(t, graphql.LoneAnonymousOperationRule, ` { fieldA } { fieldB } `, []gqlerrors.FormattedError{ testutil.RuleError(`This anonymous operation must be the only defined operation.`, 2, 7), testutil.RuleError(`This anonymous operation must be the only defined operation.`, 5, 7), }) }
explode_data.jsonl/78446
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 167 }
[ 2830, 3393, 17926, 1566, 6280, 89392, 8432, 31776, 3430, 2101, 603, 1245, 12229, 2082, 263, 35120, 1155, 353, 8840, 836, 8, 341, 18185, 1314, 81893, 37, 6209, 11337, 1155, 11, 48865, 1214, 603, 32684, 8432, 11337, 11, 22074, 414, 341, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestManagerCancel(t *testing.T) { manager := Manager{} benchlist := benchlist.NewNoBenchlist() err := manager.Initialize( &timer.AdaptiveTimeoutConfig{ InitialTimeout: time.Millisecond, MinimumTimeout: time.Millisecond, MaximumTimeout: 10 * time.Second, TimeoutCoefficient: 1.25, TimeoutHalflife: 5 * time.Minute, }, benchlist, "", prometheus.NewRegistry(), ) if err != nil { t.Fatal(err) } go manager.Dispatch() wg := sync.WaitGroup{} wg.Add(1) fired := new(bool) id := ids.GenerateTestID() manager.RegisterRequest(ids.ShortID{}, ids.ID{}, message.PullQuery, id, func() { *fired = true }) manager.RegisterResponse(ids.ShortID{}, ids.ID{}, id, message.Get, 1*time.Second) manager.RegisterRequest(ids.ShortID{}, ids.ID{}, message.PullQuery, ids.GenerateTestID(), wg.Done) wg.Wait() if *fired { t.Fatalf("Should have cancelled the function") } }
explode_data.jsonl/43232
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 364 }
[ 2830, 3393, 2043, 9269, 1155, 353, 8840, 836, 8, 341, 92272, 1669, 10567, 16094, 2233, 19762, 1607, 1669, 13425, 1607, 7121, 2753, 33, 19762, 1607, 741, 9859, 1669, 6645, 45829, 1006, 197, 197, 5, 19278, 17865, 27781, 7636, 2648, 515, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContainerNextStateWithPullCredentials(t *testing.T) { testCases := []struct { containerCurrentStatus apicontainerstatus.ContainerStatus containerDesiredStatus apicontainerstatus.ContainerStatus expectedContainerStatus apicontainerstatus.ContainerStatus credentialsID string useExecutionRole bool expectedTransitionActionable bool expectedTransitionReason error }{ // NONE -> RUNNING transition is not allowed when container is waiting for credentials {apicontainerstatus.ContainerStatusNone, apicontainerstatus.ContainerRunning, apicontainerstatus.ContainerStatusNone, "not_existed", true, false, dependencygraph.CredentialsNotResolvedErr}, // NONE -> RUNNING transition is allowed when the required execution credentials existed {apicontainerstatus.ContainerStatusNone, apicontainerstatus.ContainerRunning, apicontainerstatus.ContainerPulled, "existed", true, true, nil}, // PULLED -> RUNNING transition is allowed even the credentials is required {apicontainerstatus.ContainerPulled, apicontainerstatus.ContainerRunning, apicontainerstatus.ContainerCreated, "not_existed", true, true, nil}, // NONE -> STOPPED transition is allowed even the credentials is required {apicontainerstatus.ContainerStatusNone, apicontainerstatus.ContainerStopped, apicontainerstatus.ContainerStopped, "not_existed", true, false, nil}, // NONE -> RUNNING transition is allowed when the container doesn't use execution credentials {apicontainerstatus.ContainerStatusNone, apicontainerstatus.ContainerRunning, apicontainerstatus.ContainerPulled, "not_existed", false, true, nil}, } taskEngine := &DockerTaskEngine{ credentialsManager: credentials.NewManager(), } err := taskEngine.credentialsManager.SetTaskCredentials(&credentials.TaskIAMRoleCredentials{ ARN: "taskarn", IAMRoleCredentials: credentials.IAMRoleCredentials{ CredentialsID: "existed", SessionToken: "token", AccessKeyID: "id", SecretAccessKey: "accesskey", }, }) assert.NoError(t, err, "setting task credentials failed") for _, tc := range testCases { t.Run(fmt.Sprintf("%s to %s transition with useExecutionRole %v and credentials %s", tc.containerCurrentStatus.String(), tc.containerDesiredStatus.String(), tc.useExecutionRole, tc.credentialsID), func(t *testing.T) { container := &apicontainer.Container{ DesiredStatusUnsafe: tc.containerDesiredStatus, KnownStatusUnsafe: tc.containerCurrentStatus, RegistryAuthentication: &apicontainer.RegistryAuthenticationData{ Type: "ecr", ECRAuthData: &apicontainer.ECRAuthData{ UseExecutionRole: tc.useExecutionRole, }, }, } task := &managedTask{ Task: &apitask.Task{ Containers: []*apicontainer.Container{ container, }, ExecutionCredentialsID: tc.credentialsID, DesiredStatusUnsafe: apitaskstatus.TaskRunning, }, engine: taskEngine, credentialsManager: taskEngine.credentialsManager, } transition := task.containerNextState(container) assert.Equal(t, tc.expectedContainerStatus, transition.nextState, "Mismatch container status") assert.Equal(t, tc.expectedTransitionReason, transition.reason, "Mismatch transition possible") assert.Equal(t, tc.expectedTransitionActionable, transition.actionRequired, "Mismatch transition actionable") }) } }
explode_data.jsonl/24570
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1156 }
[ 2830, 3393, 4502, 5847, 1397, 2354, 36068, 27025, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 53290, 5405, 2522, 981, 1443, 51160, 1743, 2829, 33672, 2522, 198, 197, 53290, 4896, 2690, 2522, 981, 1443, 51160, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrincipalRoleWrongMSP(t *testing.T) { msp1, err := setup("testdata/idemix/MSP1OU1", "MSP1OU1") assert.NoError(t, err) id1, err := getDefaultSigner(msp1) assert.NoError(t, err) principalBytes, err := proto.Marshal(&msp.MSPRole{Role: msp.MSPRole_MEMBER, MspIdentifier: "OtherMSP"}) assert.NoError(t, err) principal := &msp.MSPPrincipal{ PrincipalClassification: msp.MSPPrincipal_ROLE, Principal: principalBytes} err = id1.SatisfiesPrincipal(principal) assert.Error(t, err, "Role MSP principal should have failed for user of different MSP") assert.Contains(t, err.Error(), "the identity is a member of a different MSP") }
explode_data.jsonl/46050
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 265 }
[ 2830, 3393, 31771, 9030, 29185, 44, 4592, 1155, 353, 8840, 836, 8, 341, 47691, 79, 16, 11, 1848, 1669, 6505, 445, 92425, 38146, 336, 941, 10270, 4592, 16, 11922, 16, 497, 330, 44, 4592, 16, 11922, 16, 1138, 6948, 35699, 1155, 11, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMarkToCheck(t *testing.T) { i := Incident{ Timer: "24h", CreateAt: "2020-05-17T20:21:27Z", ToCheck: "N", } err := i.MarkToCheck() assert.Equal(t, i.ToCheck, "Y") assert.Nil(t, err) i.CreateAt = time.Now().Format("2006-01-02T15:04:05Z") i.ToCheck = "N" err = i.MarkToCheck() assert.Equal(t, i.ToCheck, "N") assert.Nil(t, err) }
explode_data.jsonl/22834
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 178 }
[ 2830, 3393, 8949, 1249, 3973, 1155, 353, 8840, 836, 8, 341, 8230, 1669, 68735, 515, 197, 197, 10105, 25, 262, 330, 17, 19, 71, 756, 197, 75569, 1655, 25, 330, 17, 15, 17, 15, 12, 15, 20, 12, 16, 22, 51, 17, 15, 25, 17, 16, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRelativeAddress(t *testing.T) { parent := "relationships.xml" template := "relatives-and-associates.xml" xpath := "RelativesAndAssociates/Relatives/Relative[1]/Address" // Relative is alive form := newForm(t, "relationships/relationships-relatives.json", ) form = extractPart(t, form, templateContext(t, parent, template)) snippet := applyForm(t, template, form) assertHas1(t, template, xpath, snippet) // Relative is deceased form = newForm(t, "relative-deceased.json", ) form = extractPart(t, form, templateContext(t, parent, template)) snippet = applyForm(t, template, form) assertHasNone(t, template, xpath, snippet) }
explode_data.jsonl/23953
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 228 }
[ 2830, 3393, 28442, 4286, 1155, 353, 8840, 836, 8, 341, 24804, 1669, 330, 85824, 9028, 698, 22832, 1669, 330, 3748, 5859, 9777, 12, 24313, 973, 9028, 698, 10225, 2343, 1669, 330, 6740, 5859, 3036, 29815, 973, 14, 6740, 5859, 14, 28442, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHandshakeReplayAll(t *testing.T) { sim := setupSimulator(t) for _, m := range modes { testHandshakeReplay(t, sim, 0, m, false) } for _, m := range modes { testHandshakeReplay(t, sim, 0, m, true) } }
explode_data.jsonl/6598
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 91 }
[ 2830, 3393, 2314, 29661, 693, 1363, 2403, 1155, 353, 8840, 836, 8, 341, 1903, 318, 1669, 6505, 14027, 10511, 1155, 692, 2023, 8358, 296, 1669, 2088, 19777, 341, 197, 18185, 2314, 29661, 693, 1363, 1155, 11, 1643, 11, 220, 15, 11, 296,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUpdate(t *testing.T) { ctx := context.Background() r := require.New(t) ctrl := gomock.NewController(t) bootstrapConfig := &config.Bootstrap{ HappyConfigPath: testFilePath, DockerComposeConfigPath: testDockerComposePath, Env: "rdev", } config, err := config.NewHappyConfig(bootstrapConfig) r.NoError(err) dataMap := map[string]string{ "app": config.App(), "env": config.GetEnv(), "instance": "test-stack", } tagMap := map[string]string{ "app": "happy/app", "env": "happy/env", "instance": "happy/instance", "owner": "happy/meta/owner", "priority": "happy/meta/priority", "slice": "happy/meta/slice", "imagetag": "happy/meta/imagetag", "imagetags": "happy/meta/imagetags", "configsecret": "happy/meta/configsecret", "created": "happy/meta/created-at", "updated": "happy/meta/updated-at", } paramMap := map[string]string{ "instance": "stack_name", "slice": "slice", "priority": "priority", "imagetag": "image_tag", "imagetags": "image_tags", "configsecret": "happy_config_secret", } stackMeta := &stack_mgr.StackMeta{ StackName: "test-stack", DataMap: dataMap, TagMap: tagMap, ParamMap: paramMap, } // mock the backend ssmMock := interfaces.NewMockSSMAPI(ctrl) retVal := "[\"stack_1\",\"stack_2\"]" ret := &ssm.GetParameterOutput{ Parameter: &ssmtypes.Parameter{Value: &retVal}, } ssmMock.EXPECT().GetParameter(gomock.Any(), gomock.Any()).Return(ret, nil) // mock the workspace GetTags method, used in setPriority() mockWorkspace1 := mocks.NewMockWorkspace(ctrl) mockWorkspace1.EXPECT().GetTags().Return(map[string]string{"tag-1": "testing-1"}, nil) mockWorkspace2 := mocks.NewMockWorkspace(ctrl) mockWorkspace2.EXPECT().GetTags().Return(map[string]string{"tag-2": "testing-2"}, nil) // mock the executor mockWorkspaceRepo := mocks.NewMockWorkspaceRepoIface(ctrl) first := mockWorkspaceRepo.EXPECT().GetWorkspace(gomock.Any(), gomock.Any()).Return(mockWorkspace1, nil) second := mockWorkspaceRepo.EXPECT().GetWorkspace(gomock.Any(), gomock.Any()).Return(mockWorkspace2, nil) gomock.InOrder(first, second) backend, err := testbackend.NewBackend(ctx, ctrl, config, backend.WithSSMClient(ssmMock)) r.NoError(err) stackMgr := stack_mgr.NewStackService().WithBackend(backend).WithWorkspaceRepo(mockWorkspaceRepo) err = stackMeta.Update(ctx, "test-tag", make(map[string]string), "", stackMgr) r.NoError(err) r.Equal("{}", stackMeta.GetTags()["happy/meta/imagetags"]) err = stackMeta.Update(ctx, "test-tag", map[string]string{"foo": "bar"}, "", stackMgr) r.NoError(err) r.Equal("{\"foo\":\"bar\"}", stackMeta.GetTags()["happy/meta/imagetags"]) }
explode_data.jsonl/3291
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1158 }
[ 2830, 3393, 4289, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 2822, 7000, 1669, 1373, 7121, 1155, 340, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 692, 197, 6281, 2648, 1669, 609, 1676, 13, 45511, 515, 197, 13292, 11144, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServer_Response_Empty_Data_Not_FlowControlled(t *testing.T) { testServerResponse(t, func(w http.ResponseWriter, r *http.Request) error { w.(http.Flusher).Flush() // Nothing; send empty DATA return nil }, func(st *serverTester) { // Handler gets no data quota: if err := st.fr.WriteSettings(Setting{SettingInitialWindowSize, 0}); err != nil { t.Fatal(err) } st.wantSettingsAck() getSlash(st) // make the single request hf := st.wantHeaders() if hf.StreamEnded() { t.Fatal("unexpected END_STREAM flag") } if !hf.HeadersEnded() { t.Fatal("want END_HEADERS flag") } df := st.wantData() if got := len(df.Data()); got != 0 { t.Fatalf("unexpected %d DATA bytes; want 0", got) } if !df.StreamEnded() { t.Fatal("DATA didn't have END_STREAM") } }) }
explode_data.jsonl/71672
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 330 }
[ 2830, 3393, 5475, 65873, 76060, 1595, 17817, 60816, 1400, 10303, 3273, 832, 1155, 353, 8840, 836, 8, 341, 18185, 5475, 2582, 1155, 11, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 1465, 341, 197, 6692, 12832, 1254, 61559, 261, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestChinaIPJson(t *testing.T) { assert := assert.On(t) rule := ParseRule([]byte(`{ "type": "chinaip", "outboundTag": "x" }`)) assert.String(rule.Tag).Equals("x") cond, err := rule.BuildCondition() assert.Error(err).IsNil() assert.Bool(cond.Apply(&proxy.SessionInfo{ Destination: v2net.TCPDestination(v2net.ParseAddress("121.14.1.189"), 80), })).IsTrue() // sina.com.cn assert.Bool(cond.Apply(&proxy.SessionInfo{ Destination: v2net.TCPDestination(v2net.ParseAddress("101.226.103.106"), 80), })).IsTrue() // qq.com assert.Bool(cond.Apply(&proxy.SessionInfo{ Destination: v2net.TCPDestination(v2net.ParseAddress("115.239.210.36"), 80), })).IsTrue() // image.baidu.com assert.Bool(cond.Apply(&proxy.SessionInfo{ Destination: v2net.TCPDestination(v2net.ParseAddress("120.135.126.1"), 80), })).IsTrue() assert.Bool(cond.Apply(&proxy.SessionInfo{ Destination: v2net.TCPDestination(v2net.ParseAddress("8.8.8.8"), 80), })).IsFalse() }
explode_data.jsonl/30774
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 406 }
[ 2830, 3393, 22282, 3298, 5014, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 8071, 1155, 692, 7000, 1111, 1669, 14775, 11337, 10556, 3782, 5809, 515, 262, 330, 1313, 788, 330, 80136, 573, 756, 262, 330, 411, 10891, 5668, 788, 330, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestShouldOverrideHomeEnv(t *testing.T) { for _, tc := range []struct { description string configMap *corev1.ConfigMap expected bool }{{ description: "Default behaviour: A missing disable-home-env-overwrite flag should result in true", configMap: &corev1.ConfigMap{ ObjectMeta: metav1.ObjectMeta{Name: config.GetFeatureFlagsConfigName(), Namespace: system.Namespace()}, Data: map[string]string{}, }, expected: true, }, { description: "Setting disable-home-env-overwrite to false should result in true", configMap: &corev1.ConfigMap{ ObjectMeta: metav1.ObjectMeta{Name: config.GetFeatureFlagsConfigName(), Namespace: system.Namespace()}, Data: map[string]string{ featureFlagDisableHomeEnvKey: "false", }, }, expected: true, }, { description: "Setting disable-home-env-overwrite to true should result in false", configMap: &corev1.ConfigMap{ ObjectMeta: metav1.ObjectMeta{Name: config.GetFeatureFlagsConfigName(), Namespace: system.Namespace()}, Data: map[string]string{ featureFlagDisableHomeEnvKey: "true", }, }, expected: false, }} { t.Run(tc.description, func(t *testing.T) { store := config.NewStore(logtesting.TestLogger(t)) store.OnConfigChanged(tc.configMap) if result := ShouldOverrideHomeEnv(store.ToContext(context.Background())); result != tc.expected { t.Errorf("Expected %t Received %t", tc.expected, result) } }) } }
explode_data.jsonl/82158
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 519 }
[ 2830, 3393, 14996, 2177, 7623, 14359, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 42407, 914, 198, 197, 25873, 2227, 256, 353, 98645, 16, 10753, 2227, 198, 197, 42400, 262, 1807, 198, 197, 15170, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGetOrderStatus(t *testing.T) { t.Parallel() _, err := b.GetOrderStatus(1337) if err == nil { t.Error("Test Failed - GetOrderStatus() error") } }
explode_data.jsonl/79951
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 63 }
[ 2830, 3393, 1949, 4431, 2522, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 6878, 1848, 1669, 293, 2234, 4431, 2522, 7, 16, 18, 18, 22, 340, 743, 1848, 621, 2092, 341, 197, 3244, 6141, 445, 2271, 21379, 481, 2126, 4431...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestAbsCollection_Index(t *testing.T) { intColl := NewIntCollection([]int{1, 2}) foo := intColl.Index(1) i, err := foo.ToInt() if err != nil { t.Fatal("Index 类型错误") } if i != 2 { t.Fatal("Index 值错误") } }
explode_data.jsonl/66429
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 113 }
[ 2830, 3393, 27778, 6482, 50361, 1155, 353, 8840, 836, 8, 341, 2084, 15265, 1669, 1532, 1072, 6482, 10556, 396, 90, 16, 11, 220, 17, 3518, 197, 7975, 1669, 526, 15265, 18338, 7, 16, 340, 8230, 11, 1848, 1669, 15229, 15071, 741, 743, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestIteratorPrev(t *testing.T) { const n = 100 l := NewSkiplist(arenaSize, new(y.DefaultKeyComparator)) defer l.DecrRef() it := l.NewIterator() defer it.Close() require.False(t, it.Valid()) it.SeekToFirst() require.False(t, it.Valid()) for i := 0; i < n; i++ { l.Put(y.KeyWithTs([]byte(fmt.Sprintf("%05d", i)), 0), y.ValueStruct{Value: newValue(i), Meta: 0, UserMeta: 0}) } it.SeekToLast() for i := n - 1; i >= 0; i-- { require.True(t, it.Valid()) v := it.Value() require.EqualValues(t, newValue(i), v.Value) it.Prev() } require.False(t, it.Valid()) }
explode_data.jsonl/49655
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 264 }
[ 2830, 3393, 11951, 33528, 1155, 353, 8840, 836, 8, 341, 4777, 308, 284, 220, 16, 15, 15, 198, 8810, 1669, 1532, 50, 6642, 39934, 7, 30527, 1695, 11, 501, 7021, 13275, 1592, 38658, 1171, 16867, 326, 22442, 81, 3945, 741, 23374, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestConnectionIDs(t *testing.T) { testutils.WithTestServer(t, nil, func(t testing.TB, ts *testutils.TestServer) { var inbound, outbound []uint32 relayFunc := func(outgoing bool, f *Frame) *Frame { if outgoing { outbound = append(outbound, f.Header.ID) } else { inbound = append(inbound, f.Header.ID) } return f } relay, shutdown := testutils.FrameRelay(t, ts.HostPort(), relayFunc) defer shutdown() ctx, cancel := NewContext(time.Second) defer cancel() s2 := ts.NewServer(nil) require.NoError(t, s2.Ping(ctx, relay), "Ping failed") assert.Equal(t, []uint32{1, 2}, outbound, "Unexpected outbound IDs") assert.Equal(t, []uint32{1, 2}, inbound, "Unexpected outbound IDs") // We want to reuse the same connection for the rest of the test which // only makes sense when the relay is not used. if ts.HasRelay() { return } inbound = nil outbound = nil // We will reuse the inbound connection, but since the inbound connection // hasn't originated any outbound requests, we'll use id 1. require.NoError(t, ts.Server().Ping(ctx, s2.PeerInfo().HostPort), "Ping failed") assert.Equal(t, []uint32{1}, outbound, "Unexpected outbound IDs") assert.Equal(t, []uint32{1}, inbound, "Unexpected outbound IDs") }) }
explode_data.jsonl/78201
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 478 }
[ 2830, 3393, 4526, 30466, 1155, 353, 8840, 836, 8, 341, 18185, 6031, 26124, 2271, 5475, 1155, 11, 2092, 11, 2915, 1155, 7497, 836, 33, 11, 10591, 353, 1944, 6031, 8787, 5475, 8, 341, 197, 2405, 64943, 11, 73795, 3056, 2496, 18, 17, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInventoryCommand(t *testing.T) { var s state r := strings.NewReader(exampleStateFile) err := s.read(r) assert.NoError(t, err) // Run the command, capture the output var stdout, stderr bytes.Buffer exitCode := cmdInventory(&stdout, &stderr, &s) assert.Equal(t, 0, exitCode) assert.Equal(t, "", stderr.String()) assert.Equal(t, expectedInventoryOutput, stdout.String()) }
explode_data.jsonl/58809
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 146 }
[ 2830, 3393, 22319, 4062, 1155, 353, 8840, 836, 8, 341, 2405, 274, 1584, 198, 7000, 1669, 9069, 68587, 66203, 1397, 1703, 340, 9859, 1669, 274, 4125, 2601, 340, 6948, 35699, 1155, 11, 1848, 692, 197, 322, 6452, 279, 3210, 11, 12322, 27...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestErrors_Error(t *testing.T) { // Direct testing by manually constructing Error objects. t.Run("custom error", func(t *testing.T) { err := &errors.Error{ErrorToken: mockToken(999, "", 6, 7), Err: fmt.Errorf("source on fire")} if err == nil { t.Fatalf("failed to produce an error") } assertEqual(t, "6:7: error: source on fire", err.Error()) }) t.Run("no tokens", func(t *testing.T) { err := &errors.Error{ErrorToken: mockToken(888, "biscuit", 10, 12), ExpectedTokens: []string{}} assertEqual(t, "10:12: error: unexpected additional tokens; got: \"biscuit\"", err.Error()) }) t.Run("unexpected EOF", func(t *testing.T) { err := &errors.Error{ErrorToken: mockToken(token.EOF, "", 7, 11), ExpectedTokens: []string{"something-else"}} assertEqual(t, "7:11: error: expected something-else; got: end-of-file", err.Error()) }) t.Run("nominal error", func(t *testing.T) { err := &errors.Error{ErrorToken: mockToken(100, "42", 7, 6), ExpectedTokens: []string{"var", "let", "struct"}} assertEqual(t, "7:6: error: expected one of var, let or struct; got: \"42\"", err.Error()) }) }
explode_data.jsonl/29098
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 416 }
[ 2830, 3393, 13877, 28651, 1155, 353, 8840, 836, 8, 341, 197, 322, 7139, 7497, 553, 20083, 49353, 4600, 6171, 624, 3244, 16708, 445, 9163, 1465, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 9859, 1669, 609, 7650, 6141, 90, 1454, 3323,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestProject_String(t *testing.T) { v := Project{ ID: Int64(0), URL: String(""), HTMLURL: String(""), ColumnsURL: String(""), OwnerURL: String(""), Name: String(""), Body: String(""), Number: Int(0), State: String(""), CreatedAt: &Timestamp{}, UpdatedAt: &Timestamp{}, NodeID: String(""), Creator: &User{}, } want := `github.Project{ID:0, URL:"", HTMLURL:"", ColumnsURL:"", OwnerURL:"", Name:"", Body:"", Number:0, State:"", CreatedAt:github.Timestamp{0001-01-01 00:00:00 +0000 UTC}, UpdatedAt:github.Timestamp{0001-01-01 00:00:00 +0000 UTC}, NodeID:"", Creator:github.User{}}` if got := v.String(); got != want { t.Errorf("Project.String = %v, want %v", got, want) } }
explode_data.jsonl/33268
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 331 }
[ 2830, 3393, 7849, 31777, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 5787, 515, 197, 29580, 25, 260, 1333, 21, 19, 7, 15, 1326, 197, 79055, 25, 286, 923, 445, 4461, 197, 197, 5835, 3144, 25, 262, 923, 445, 4461, 197, 197, 13965, 314...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEntry_SelectSnapRight(t *testing.T) { e, window := setupSelection(false) defer teardownImageTest(window) c := window.Canvas() assert.Equal(t, 1, e.CursorRow) assert.Equal(t, 5, e.CursorColumn) test.AssertImageMatches(t, "entry/selection_initial.png", c.Capture()) typeKeys(e, keyShiftLeftUp, fyne.KeyRight) assert.Equal(t, 1, e.CursorRow) assert.Equal(t, 5, e.CursorColumn) test.AssertImageMatches(t, "entry/selection_snap_right.png", c.Capture()) }
explode_data.jsonl/57320
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 188 }
[ 2830, 3393, 5874, 58073, 61871, 5979, 1155, 353, 8840, 836, 8, 341, 7727, 11, 3241, 1669, 6505, 11177, 3576, 340, 16867, 49304, 1906, 2271, 15906, 340, 1444, 1669, 3241, 54121, 2822, 6948, 12808, 1155, 11, 220, 16, 11, 384, 29929, 3102,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDecoder_EnumInvalidSymbol(t *testing.T) { defer ConfigTeardown() data := []byte{0x04} schema := `{"type":"enum", "name": "test", "symbols": ["foo", "bar"]}` dec, _ := avro.NewDecoder(schema, bytes.NewReader(data)) var got string err := dec.Decode(&got) assert.Error(t, err) }
explode_data.jsonl/22093
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 119 }
[ 2830, 3393, 20732, 62, 10766, 7928, 15090, 1155, 353, 8840, 836, 8, 341, 16867, 5532, 6639, 37496, 2822, 8924, 1669, 3056, 3782, 90, 15, 87, 15, 19, 532, 1903, 3416, 1669, 1565, 4913, 1313, 3252, 9018, 497, 330, 606, 788, 330, 1944, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeriveChainID(t *testing.T) { cases := []struct { name string chainID *big.Int expected *big.Int }{ {"0x1c", hexutil.MustDecodeBig("0x1c"), big.NewInt(0)}, {"0x29", hexutil.MustDecodeBig("0x29"), big.NewInt(3)}, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { assert.EqualValues(t, tc.expected, deriveChainID(tc.chainID)) }) } }
explode_data.jsonl/78291
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 176 }
[ 2830, 3393, 22171, 533, 18837, 915, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 197, 8819, 915, 220, 353, 16154, 7371, 198, 197, 42400, 353, 16154, 7371, 198, 197, 59403, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGap_requestDrop(t *testing.T) { var private protos.ProtocolData dns := newDns(testing.Verbose()) q := sophosTxtTcp.request[:10] packet := newPacket(forward, q) tcptuple := testTcpTuple() private = dns.Parse(packet, tcptuple, tcp.TcpDirectionOriginal, private) private, drop := dns.GapInStream(tcptuple, tcp.TcpDirectionOriginal, 10, private) assert.Equal(t, true, drop) dns.ReceivedFin(tcptuple, tcp.TcpDirectionOriginal, private) client := dns.results.(*publish.ChanTransactions) close(client.Channel) mapStr := <-client.Channel assert.Nil(t, mapStr, "No result should have been published.") }
explode_data.jsonl/68709
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 230 }
[ 2830, 3393, 12868, 7893, 19871, 1155, 353, 8840, 836, 8, 341, 2405, 869, 1724, 436, 54096, 1043, 198, 2698, 4412, 1669, 501, 35, 4412, 8623, 287, 42505, 8297, 2398, 18534, 1669, 18701, 436, 35629, 77536, 8223, 3447, 16, 15, 921, 68802, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultiErrorMultipleErrors(t *testing.T) { err := NewMultiError() for _, errMsg := range []string{"foo", "bar", "baz"} { err = err.Add(errors.New(errMsg)) } err = err.Add(nil) final := err.FinalError() require.NotNil(t, final) require.Equal(t, final.Error(), "foo\nbar\nbaz") last := err.LastError() require.NotNil(t, last) require.Equal(t, last.Error(), "baz") require.False(t, err.Empty()) require.Equal(t, 3, err.NumErrors()) }
explode_data.jsonl/27154
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 186 }
[ 2830, 3393, 20358, 1454, 32089, 13877, 1155, 353, 8840, 836, 8, 341, 9859, 1669, 1532, 20358, 1454, 741, 2023, 8358, 60078, 1669, 2088, 3056, 917, 4913, 7975, 497, 330, 2257, 497, 330, 42573, 9207, 341, 197, 9859, 284, 1848, 1904, 38881...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSaveToFileAndnewDeckFromFile(t *testing.T) { fileName := "_decktesting" os.Remove(fileName) d := newDeck() d.saveToFile(fileName) loadedDeck := newDeckFromFile(fileName) if len(loadedDeck) != 16 { t.Errorf("Expected 16 cards in deck, got %v", len(loadedDeck)) } os.Remove(fileName) }
explode_data.jsonl/479
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 122 }
[ 2830, 3393, 8784, 41550, 3036, 931, 39368, 43633, 1155, 353, 8840, 836, 8, 341, 17661, 675, 1669, 9000, 33425, 8840, 698, 25078, 13270, 23014, 692, 2698, 1669, 501, 39368, 741, 2698, 5681, 41550, 23014, 692, 197, 15589, 39368, 1669, 501, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGroup(t *testing.T) { test.DropPrivilege(t) defer test.ResetPrivilege(t) var gids []int uid := os.Getuid() _, err := Group("/fake", uid, gids) if err == nil { t.Errorf("should have failed with bad group file") } _, err = Group("/etc/group", uid, gids) if err != nil { t.Errorf("should have passed with correct group file") } // with an empty file f, err := ioutil.TempFile("", "empty-group-") if err != nil { t.Error(err) } emptyGroup := f.Name() defer os.Remove(emptyGroup) f.Close() _, err = Group(emptyGroup, uid, gids) if err != nil { t.Error(err) } }
explode_data.jsonl/74250
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 248 }
[ 2830, 3393, 2808, 1155, 353, 8840, 836, 8, 341, 18185, 21688, 32124, 42769, 1155, 340, 16867, 1273, 36660, 32124, 42769, 1155, 692, 2405, 342, 3365, 3056, 396, 198, 197, 2423, 1669, 2643, 2234, 2423, 2822, 197, 6878, 1848, 1669, 5737, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGlobalDriver(t *testing.T) { foo := DriverDef{Name: "foo"} globalRegistry = newRegistry() if err := Register(foo); err != nil { t.Errorf("Register = %v, expected nil", err) } d := Driver("foo") if d.Empty() { t.Errorf("driver.Empty = true, expected false") } d = Driver("bar") if !d.Empty() { t.Errorf("driver.Empty = false, expected true") } }
explode_data.jsonl/15480
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 143 }
[ 2830, 3393, 11646, 11349, 1155, 353, 8840, 836, 8, 341, 197, 7975, 1669, 14577, 2620, 63121, 25, 330, 7975, 16707, 18842, 15603, 284, 501, 15603, 2822, 743, 1848, 1669, 8451, 71880, 1215, 1848, 961, 2092, 341, 197, 3244, 13080, 445, 869...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_MapContainsPossibleKey(t *testing.T) { dtest.C(t, func(t *dtest.T) { m := d.Map{ "name": "guo", "NickName": "john", } t.Assert(dutil.MapContainsPossibleKey(m, "name"), true) t.Assert(dutil.MapContainsPossibleKey(m, "NAME"), true) t.Assert(dutil.MapContainsPossibleKey(m, "nickname"), true) t.Assert(dutil.MapContainsPossibleKey(m, "nick name"), true) t.Assert(dutil.MapContainsPossibleKey(m, "nick_name"), true) t.Assert(dutil.MapContainsPossibleKey(m, "nick-name"), true) t.Assert(dutil.MapContainsPossibleKey(m, "nick.name"), true) t.Assert(dutil.MapContainsPossibleKey(m, "none"), false) }) }
explode_data.jsonl/16857
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 288 }
[ 2830, 3393, 56992, 23805, 65222, 1592, 1155, 353, 8840, 836, 8, 341, 2698, 1944, 727, 1155, 11, 2915, 1155, 353, 67, 1944, 836, 8, 341, 197, 2109, 1669, 294, 10104, 515, 298, 197, 31486, 788, 257, 330, 8717, 78, 756, 298, 197, 1, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOpAnd(t *testing.T) { assert := assert.New(t) jl := NewEmpty() AddOpAnd(jl) TestCases{ // http://jsonlogic.com/operations.html {Logic: `{"and":[true,true]}`, Data: `null`, Result: true}, {Logic: `{"and":[true,false]}`, Data: `null`, Result: false}, {Logic: `{"and":[true,"a",3]}`, Data: `null`, Result: float64(3)}, {Logic: `{"and":[true,"",3]}`, Data: `null`, Result: ""}, // Zero param. {Logic: `{"and":[]}`, Data: `null`, Err: true}, }.Run(assert, jl) }
explode_data.jsonl/38300
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 207 }
[ 2830, 3393, 7125, 3036, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 12428, 75, 1669, 1532, 3522, 741, 37972, 7125, 3036, 3325, 75, 340, 73866, 37302, 515, 197, 197, 322, 1758, 1110, 2236, 24225, 905, 14, 38163, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAlgorithm_Sanity(t *testing.T) { layerSize := 50 cachedLayers := 100 alg := NewAlgorithm(uint32(layerSize), uint32(cachedLayers)) l := createGenesisLayer() alg.HandleIncomingLayer(l) for i := 0; i < 11-1; i++ { lyr := createFullPointingLayer(l, layerSize) start := time.Now() alg.HandleIncomingLayer(lyr) log.Info("Time to process layer: %v ", time.Since(start)) l = lyr } }
explode_data.jsonl/779
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 158 }
[ 2830, 3393, 27847, 1098, 38270, 1155, 353, 8840, 836, 8, 341, 65986, 1695, 1669, 220, 20, 15, 198, 1444, 3854, 40235, 1669, 220, 16, 15, 15, 271, 197, 23881, 1669, 1532, 27847, 8488, 18, 17, 32585, 1695, 701, 2622, 18, 17, 1337, 385...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestValidateAndBuildNodeBIdKeyPlmnIdValidationFailure(t *testing.T) { nodeType := "ffff" nbId := "aaaa" _, err := ValidateAndBuildNodeBIdKey(nodeType, "", nbId) assert.NotNil(t, err) assert.IsType(t, &ValidationError{}, err) assert.Equal(t, "#utils.ValidateAndBuildNodeBIdKey - an empty plmnId received", err.Error()) }
explode_data.jsonl/33042
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 127 }
[ 2830, 3393, 17926, 3036, 11066, 1955, 33, 764, 1592, 2120, 21775, 764, 13799, 17507, 1155, 353, 8840, 836, 8, 341, 20831, 929, 1669, 330, 7238, 698, 9038, 65, 764, 1669, 330, 28458, 698, 197, 6878, 1848, 1669, 23282, 3036, 11066, 1955, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseValidMessage(t *testing.T) { // a non-json heroku log entry, err := Parse([]byte(`89 <45>1 2016-10-15T08:59:08.723822+00:00 host heroku web.1 - State changed from up to down`)) assert.NoError(t, err) assert.Equal(t, "{\"heroku_app\":\"heroku\",\"heroku_process\":\"web.1\",\"message\":\"State changed from up to down\"}", entry.Message) assert.WithinDuration(t, time.Date(2016, 10, 15, 8, 59, 8, 723822000, time.UTC), entry.Time, time.Microsecond) // a json yonomi log entry2, err2 := Parse([]byte(`89 <45>1 2016-10-15T08:59:08.723822+00:00 host app web.2 - {"name":"yonomi-api-prod_web","hostname":"1c8e812c-2b16-42ad-b08c-c6fb30080412","pid":86,"namespace":"middleware.user_agent","level":20,"req.clientdata":{},"msg":"parseUserAgent","time":"2020-09-17T15:41:38.426Z","v":0}`)) assert.NoError(t, err2) assert.Equal(t, "{\"heroku_app\":\"app\",\"heroku_process\":\"web.2\",\"name\":\"yonomi-api-prod_web\",\"hostname\":\"1c8e812c-2b16-42ad-b08c-c6fb30080412\",\"pid\":86,\"namespace\":\"middleware.user_agent\",\"level\":20,\"req.clientdata\":{},\"msg\":\"parseUserAgent\",\"time\":\"2020-09-17T15:41:38.426Z\",\"v\":0}", entry2.Message) assert.WithinDuration(t, time.Date(2016, 10, 15, 8, 59, 8, 723822000, time.UTC), entry2.Time, time.Microsecond) }
explode_data.jsonl/34024
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 531 }
[ 2830, 3393, 14463, 4088, 2052, 1155, 353, 8840, 836, 8, 341, 197, 322, 264, 2477, 56080, 1059, 16493, 1487, 198, 48344, 11, 1848, 1669, 14775, 10556, 3782, 5809, 23, 24, 366, 19, 20, 29, 16, 220, 17, 15, 16, 21, 12, 16, 15, 12, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_gap_in_headers(t *testing.T) { http := httpModForTests(nil) data1 := []byte("HTTP/1.1 200 OK\r\n" + "Date: Tue, 14 Aug 2012 22:31:45 GMT\r\n" + "Expires: -1\r\n" + "Cache-Control: private, max-age=0\r\n" + "Content-Type: text/html; charset=UTF-8\r\n") st := &stream{data: data1, message: new(message)} ok, complete := testParseStream(http, st, 0) assert.Equal(t, true, ok) assert.Equal(t, false, complete) ok, complete = http.messageGap(st, 5) assert.Equal(t, false, ok) assert.Equal(t, false, complete) }
explode_data.jsonl/16514
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 236 }
[ 2830, 3393, 51790, 1243, 26719, 1155, 353, 8840, 836, 8, 341, 28080, 1669, 1758, 4459, 2461, 18200, 27907, 692, 8924, 16, 1669, 3056, 3782, 445, 9230, 14, 16, 13, 16, 220, 17, 15, 15, 10402, 12016, 1699, 1, 3610, 197, 197, 1, 1916, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFinalizeWorkloadScopes(t *testing.T) { namespace := "ns" errMock := errors.New("mock error") workload := &unstructured.Unstructured{} workload.SetAPIVersion("workload.oam.dev") workload.SetKind("workloadKind") workload.SetNamespace(namespace) workload.SetName("workload-example") workload.SetUID(types.UID("workload-uid")) ctx := context.Background() scope, _ := util.Object2Unstructured(&v1alpha2.HealthScope{ ObjectMeta: metav1.ObjectMeta{ Name: "scope-example", Namespace: namespace, }, TypeMeta: metav1.TypeMeta{ APIVersion: "scope.oam.dev/v1alpha2", Kind: "scopeKind", }, Spec: v1alpha2.HealthScopeSpec{ WorkloadReferences: []v1alpha1.TypedReference{ { APIVersion: workload.GetAPIVersion(), Kind: workload.GetKind(), Name: workload.GetName(), }, }, }, }) scopeDefinition := v1alpha2.ScopeDefinition{ TypeMeta: metav1.TypeMeta{ Kind: "ScopeDefinition", APIVersion: "scopeDef.oam.dev", }, ObjectMeta: metav1.ObjectMeta{ Name: "scope-example.scope.oam.dev", Namespace: namespace, }, Spec: v1alpha2.ScopeDefinitionSpec{ Reference: v1alpha2.DefinitionReference{ Name: "scope-example.scope.oam.dev", }, WorkloadRefsPath: "spec.workloadRefs", }, } ac := v1alpha2.ApplicationConfiguration{ ObjectMeta: metav1.ObjectMeta{ Finalizers: []string{workloadScopeFinalizer}, }, Status: v1alpha2.ApplicationConfigurationStatus{ Workloads: []v1alpha2.WorkloadStatus{ { Reference: v1alpha1.TypedReference{ APIVersion: workload.GetAPIVersion(), Kind: workload.GetKind(), Name: workload.GetName(), }, Scopes: []v1alpha2.WorkloadScope{ { Reference: v1alpha1.TypedReference{ APIVersion: scope.GetAPIVersion(), Kind: scope.GetKind(), Name: scope.GetName(), }, }, }, }, }, }, } cases := []struct { caseName string applicator apply.Applicator rawClient client.Client wantErr error wantFinalizers []string }{ { caseName: "Finalization successes", applicator: ApplyFn(func(_ context.Context, o runtime.Object, _ ...apply.ApplyOption) error { return nil }), rawClient: &test.MockClient{ MockGet: func(ctx context.Context, key types.NamespacedName, obj runtime.Object) error { if key.Name == scope.GetName() { scope := obj.(*unstructured.Unstructured) refs := []interface{}{ map[string]interface{}{ "apiVersion": workload.GetAPIVersion(), "kind": workload.GetKind(), "name": workload.GetName(), }, } if err := fieldpath.Pave(scope.UnstructuredContent()).SetValue("spec.workloadRefs", refs); err == nil { return err } return nil } if scopeDef, ok := obj.(*v1alpha2.ScopeDefinition); ok { *scopeDef = scopeDefinition return nil } return nil }, MockUpdate: func(ctx context.Context, obj runtime.Object, opts ...client.UpdateOption) error { return nil }, }, wantErr: nil, wantFinalizers: []string{}, }, { caseName: "Finalization fails for error", applicator: ApplyFn(func(_ context.Context, o runtime.Object, _ ...apply.ApplyOption) error { return nil }), rawClient: &test.MockClient{ MockGet: func(ctx context.Context, key types.NamespacedName, obj runtime.Object) error { return errMock }, MockUpdate: func(ctx context.Context, obj runtime.Object, opts ...client.UpdateOption) error { return nil }, }, wantErr: errors.Wrapf(errMock, errFmtApplyScope, scope.GetAPIVersion(), scope.GetKind(), scope.GetName()), wantFinalizers: []string{workloadScopeFinalizer}, }, } for _, tc := range cases { t.Run(tc.caseName, func(t *testing.T) { acTest := ac mapper := mock.NewMockDiscoveryMapper() w := workloads{applicator: tc.applicator, rawClient: tc.rawClient, dm: mapper} err := w.Finalize(ctx, &acTest) if diff := cmp.Diff(tc.wantErr, err, test.EquateErrors()); diff != "" { t.Errorf("\n%s\nw.Apply(...): -want error, +got error:\n%s", tc.caseName, diff) } if diff := cmp.Diff(tc.wantFinalizers, acTest.ObjectMeta.Finalizers); diff != "" { t.Errorf("\n%s\nw.Apply(...): -want error, +got error:\n%s", tc.caseName, diff) } }) } }
explode_data.jsonl/16873
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1918 }
[ 2830, 3393, 19357, 551, 6776, 1078, 3326, 18523, 1155, 353, 8840, 836, 8, 341, 56623, 1669, 330, 4412, 698, 9859, 11571, 1669, 5975, 7121, 445, 16712, 1465, 1138, 97038, 1078, 1669, 609, 359, 51143, 10616, 51143, 16094, 97038, 1078, 4202,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetSubpathFromConfig(t *testing.T) { testCases := []struct { Description string SiteURL *string ExpectedError bool ExpectedSubpath string }{ { "empty SiteURL", sToP(""), false, "/", }, { "invalid SiteURL", sToP("cache_object:foo/bar"), true, "", }, { "nil SiteURL", nil, false, "/", }, { "no trailing slash", sToP("http://localhost:8065"), false, "/", }, { "trailing slash", sToP("http://localhost:8065/"), false, "/", }, { "subpath, no trailing slash", sToP("http://localhost:8065/subpath"), false, "/subpath", }, { "trailing slash", sToP("http://localhost:8065/subpath/"), false, "/subpath", }, } for _, testCase := range testCases { t.Run(testCase.Description, func(t *testing.T) { config := &model.Config{ ServiceSettings: model.ServiceSettings{ SiteURL: testCase.SiteURL, }, } subpath, err := utils.GetSubpathFromConfig(config) if testCase.ExpectedError { require.Error(t, err) } else { require.NoError(t, err) } require.Equal(t, testCase.ExpectedSubpath, subpath) }) } }
explode_data.jsonl/26125
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 573 }
[ 2830, 3393, 1949, 3136, 2343, 3830, 2648, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 47414, 257, 914, 198, 197, 7568, 632, 3144, 260, 353, 917, 198, 197, 197, 18896, 1454, 256, 1807, 198, 197, 197, 18896, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStackOutputsJSON(t *testing.T) { e := ptesting.NewEnvironment(t) defer func() { if !t.Failed() { e.DeleteEnvironment() } }() e.ImportDirectory(filepath.Join("stack_outputs", "nodejs")) e.RunCommand("yarn", "link", "@pulumi/pulumi") e.RunCommand("pulumi", "login", "--cloud-url", e.LocalURL()) e.RunCommand("pulumi", "stack", "init", "stack-outs") e.RunCommand("pulumi", "up", "--non-interactive", "--skip-preview") stdout, _ := e.RunCommand("pulumi", "stack", "output", "--json") assert.Equal(t, `{ "foo": 42, "xyz": "ABC" } `, stdout) }
explode_data.jsonl/76350
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 241 }
[ 2830, 3393, 4336, 61438, 5370, 1155, 353, 8840, 836, 8, 341, 7727, 1669, 281, 8840, 7121, 12723, 1155, 340, 16867, 2915, 368, 341, 197, 743, 753, 83, 991, 5687, 368, 341, 298, 7727, 18872, 12723, 741, 197, 197, 532, 197, 69826, 7727, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestKubeadmConfigSecretCreatedStatusNotPatched(t *testing.T) { g := NewWithT(t) cluster := newCluster("cluster") cluster.Status.InfrastructureReady = true cluster.Status.ControlPlaneInitialized = true cluster.Spec.ControlPlaneEndpoint = clusterv1.APIEndpoint{Host: "100.105.150.1", Port: 6443} controlPlaneInitMachine := newControlPlaneMachine(cluster, "control-plane-init-machine") initConfig := newControlPlaneInitKubeadmConfig(controlPlaneInitMachine, "control-plane-init-config") workerMachine := newWorkerMachine(cluster) workerJoinConfig := newWorkerJoinKubeadmConfig(workerMachine) objects := []client.Object{ cluster, workerMachine, workerJoinConfig, } objects = append(objects, createSecrets(t, cluster, initConfig)...) myclient := helpers.NewFakeClientWithScheme(setupScheme(), objects...) k := &KubeadmConfigReconciler{ Client: myclient, KubeadmInitLock: &myInitLocker{}, remoteClientGetter: fakeremote.NewClusterClient, } request := ctrl.Request{ NamespacedName: client.ObjectKey{ Namespace: "default", Name: "worker-join-cfg", }, } secret := &corev1.Secret{ ObjectMeta: metav1.ObjectMeta{ Name: workerJoinConfig.Name, Namespace: workerJoinConfig.Namespace, Labels: map[string]string{ clusterv1.ClusterLabelName: cluster.Name, }, OwnerReferences: []metav1.OwnerReference{ { APIVersion: bootstrapv1.GroupVersion.String(), Kind: "KubeadmConfig", Name: workerJoinConfig.Name, UID: workerJoinConfig.UID, Controller: pointer.BoolPtr(true), }, }, }, Data: map[string][]byte{ "value": nil, }, Type: clusterv1.ClusterSecretType, } err := myclient.Create(ctx, secret) g.Expect(err).ToNot(HaveOccurred()) result, err := k.Reconcile(ctx, request) g.Expect(err).NotTo(HaveOccurred()) g.Expect(result.Requeue).To(BeFalse()) g.Expect(result.RequeueAfter).To(Equal(time.Duration(0))) cfg, err := getKubeadmConfig(myclient, "worker-join-cfg") g.Expect(err).NotTo(HaveOccurred()) g.Expect(cfg.Status.Ready).To(BeTrue()) g.Expect(cfg.Status.DataSecretName).NotTo(BeNil()) g.Expect(cfg.Status.ObservedGeneration).NotTo(BeNil()) }
explode_data.jsonl/44323
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 871 }
[ 2830, 3393, 42, 392, 3149, 76, 2648, 19773, 11694, 2522, 2623, 47, 34244, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1532, 2354, 51, 1155, 692, 197, 18855, 1669, 501, 28678, 445, 18855, 1138, 197, 18855, 10538, 40435, 19202, 284, 830, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestApiTest_AddsContentTypeHeaderToRequest(t *testing.T) { handler := http.NewServeMux() handler.HandleFunc("/hello", func(w http.ResponseWriter, r *http.Request) { if r.Header["Content-Type"][0] != "application/x-www-form-urlencoded" { w.WriteHeader(http.StatusBadRequest) return } w.WriteHeader(http.StatusOK) }) apitest.New(). Handler(handler). Post("/hello"). ContentType("application/x-www-form-urlencoded"). Body(`name=John`). Expect(t). Status(http.StatusOK). End() }
explode_data.jsonl/54792
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 205 }
[ 2830, 3393, 6563, 2271, 21346, 82, 29504, 4047, 1249, 1900, 1155, 353, 8840, 836, 8, 341, 53326, 1669, 1758, 7121, 60421, 44, 2200, 741, 53326, 63623, 4283, 14990, 497, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestLiteral(t *testing.T) { t.Run("NewLiteral", func(t *testing.T) { // nolint:gocritic is := is.New(t) l, err := NewLiteral("") is.True(err != nil) // empty literal is not allowed is.Equal(l, Literal{}) l, err = NewLiteral("some text") is.NoErr(err) is.Equal(l.String(), "\"some text\"") is.Equal(l.RawValue(), "some text") }) t.Run("Literal.Equal", func(t *testing.T) { // nolint:gocritic is := is.New(t) l, err := NewLiteralInferred("123") is.NoErr(err) is.True(l.Equal(Literal{str: "123", DataType: xsdString})) is.True(!l.Equal(Literal{str: "1234", DataType: xsdString})) is.True(!l.Equal(&Literal{str: "123", lang: "nl-NL", DataType: xsdString})) is.True(!l.Equal(NewAnonNode())) // different types can never be equal is.True(!l.Equal(Literal{str: "123", DataType: xsdBoolean})) is.Equal(l.Type(), TermLiteral) }) t.Run("Literal.String", func(t *testing.T) { // nolint:gocritic is := is.New(t) l, err := NewLiteralInferred("123") is.NoErr(err) is.Equal(l.String(), "\"123\"") tests := []struct { value string lang string dt *IRI want string }{ {value: "123", want: "\"123\""}, {value: "123", lang: "nl-NL", want: "\"123\"@nl-NL"}, {value: "true", dt: xsdBoolean, want: "\"true\"^^http://www.w3.org/2001/XMLSchema#boolean"}, } for _, tt := range tests { tt := tt var l Literal var err error switch { case tt.lang != "": l, err = NewLiteralWithLang(tt.value, tt.lang) is.NoErr(err) // case tt.dt != nil: l, err = NewLiteralWithType(tt.value, tt.dt) is.NoErr(err) default: l, err = NewLiteral(tt.value) is.NoErr(err) } is.Equal(l.String(), tt.want) is.Equal(l.RawValue(), tt.value) } }) t.Run("NewLiteralInferred", func(t *testing.T) { tests := []struct { input interface{} dt *IRI errString string }{ {1, xsdInteger, ""}, {int64(1), xsdInteger, ""}, {int32(1), xsdInteger, ""}, {3.14, xsdDouble, ""}, {float32(3.14), xsdDouble, ""}, {float64(3.14), xsdDouble, ""}, {time.Now(), xsdDateTime, ""}, {true, xsdBoolean, ""}, {false, xsdBoolean, ""}, {"a", xsdString, ""}, {[]byte("123"), xsdByte, ""}, {struct{ a, b string }{"1", "2"}, &IRI{}, `cannot infer XSD datatype from struct { a string; b string }{a:"1", b:"2"}`}, {"", xsdString, "invalid literal value: cannot be empty"}, } for _, tt := range tests { tt := tt l, err := NewLiteralInferred(tt.input) if err != nil { if tt.errString == "" { t.Errorf("NewLiteral(%#v) failed with %v; want no error", tt.input, err) continue } if tt.errString != err.Error() { t.Errorf("NewLiteral(%#v) failed with %v; want %v", tt.input, err, tt.errString) continue } } if err == nil && tt.errString != "" { t.Errorf("NewLiteral(%#v) => <no error>; want error %v", tt.input, tt.errString) continue } if err == nil && l.DataType != tt.dt { t.Errorf("NewLiteral(%#v).DataType => got %v; want %v", tt.input, l.DataType, tt.dt) } } }) t.Run("NewLiteralWithType", func(t *testing.T) { tests := []struct { dataType *IRI errWant string }{ {nil, "cannot be nil"}, {xsdBoolean, ""}, {&IRI{str: "http://www.w3.org/1999/02/22-rdf-syntax-ns#Unknown"}, "unsupported Literal.DataType IRI"}, } for _, tt := range tests { value := "some text" _, err := NewLiteralWithType(value, tt.dataType) if err != nil { if tt.errWant == "" { t.Errorf("NewLiteralWithType(%s, %#v) failed with %v; want no error", value, tt.dataType, err) continue } if !strings.Contains(err.Error(), tt.errWant) { t.Errorf("NewLiteralWithType(%s, %#v) failed with %v; want %v", value, tt.dataType, err, tt.errWant) continue } } if err == nil && tt.errWant != "" { t.Errorf("NewLiteralWithType(%s, %#v) => <no error>; want error %v", value, tt.dataType, tt.errWant) continue } } }) t.Run("NewWithLanguage", func(t *testing.T) { tests := []struct { value string tag string errWant string }{ {"some text", "", ""}, {"", "en", ""}, {"", "en-GB", ""}, {"", "nb-no2", ""}, {"", "no-no-a", "invalid language tag: only one '-' allowed"}, {"", "1", "invalid language tag: unexpected character: '1'"}, {"", "fr-ø", "invalid language tag: unexpected character: 'ø'"}, {"", "en-", "invalid language tag: trailing '-' disallowed"}, {"", "-en", "invalid language tag: must start with a letter"}, } for _, tt := range tests { if tt.value == "" { tt.value = "string" } l, err := NewLiteralWithLang(tt.value, tt.tag) if err != nil { if tt.errWant == "" { t.Errorf("NewLiteralWithLang(%s, %#v) failed with %v; want no error", tt.value, tt.tag, err) continue } if !strings.Contains(err.Error(), tt.errWant) { t.Errorf("NewLiteralWithLang(%s, %#v) failed with %v; want %v", tt.value, tt.tag, err, tt.errWant) continue } } if err == nil && tt.errWant != "" { t.Errorf("NewLiteralWithLang(%s, %#v) => <no error>; want error %v", tt.value, tt.tag, tt.errWant) continue } if err == nil && tt.tag != l.Lang() { t.Errorf("NewLiteralWithLang(%s, %#v) => got %s; want %v", tt.value, tt.tag, l.Lang(), tt.tag) } } }) }
explode_data.jsonl/48552
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2569 }
[ 2830, 3393, 17350, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 3564, 17350, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 197, 322, 308, 337, 396, 70418, 509, 49208, 198, 197, 19907, 1669, 374, 7121, 1155, 692, 197, 8810, 11, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRSAPublicKeyInvalidBytes(t *testing.T) { t.Parallel() rsaKey := &rsaPublicKey{nil} b, err := rsaKey.Bytes() if err == nil { t.Fatal("It must fail in this case") } if len(b) != 0 { t.Fatal("It must be nil") } }
explode_data.jsonl/29281
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 102 }
[ 2830, 3393, 11451, 2537, 475, 1592, 7928, 7078, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 41231, 64, 1592, 1669, 609, 60869, 61822, 90, 8385, 532, 2233, 11, 1848, 1669, 68570, 1592, 36868, 741, 743, 1848, 621, 2092, 341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMergeProfs(t *testing.T) { tests := []struct { in [][]*cover.Profile want []*cover.Profile }{ // empty {in: nil, want: nil}, // The number of profiles is 1 {in: [][]*cover.Profile{{{FileName: "name1"}}}, want: []*cover.Profile{{FileName: "name1"}}}, // merge profile blocks { in: [][]*cover.Profile{ {}, // skip first empty profiles. { { FileName: "name1", Blocks: []cover.ProfileBlock{ {StartLine: 1, StartCol: 1, Count: 1}, }, }, { FileName: "name2", Blocks: []cover.ProfileBlock{ {StartLine: 1, StartCol: 1, Count: 0}, }, }, }, {}, // skip first empty profiles. { { FileName: "name1", Blocks: []cover.ProfileBlock{ {StartLine: 1, StartCol: 1, Count: 1}, }, }, { FileName: "name2", Blocks: []cover.ProfileBlock{ {StartLine: 1, StartCol: 1, Count: 1}, }, }, }, }, want: []*cover.Profile{ { FileName: "name1", Blocks: []cover.ProfileBlock{ {StartLine: 1, StartCol: 1, Count: 2}, }, }, { FileName: "name2", Blocks: []cover.ProfileBlock{ {StartLine: 1, StartCol: 1, Count: 1}, }, }, }, }, } for _, tt := range tests { if got := mergeProfs(tt.in); !reflect.DeepEqual(got, tt.want) { t.Errorf("mergeProfs(%#v) = %#v, want %#v", tt.in, got, tt.want) } } }
explode_data.jsonl/29822
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 739 }
[ 2830, 3393, 52096, 1336, 3848, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 17430, 256, 52931, 9, 3688, 59872, 198, 197, 50780, 29838, 3688, 59872, 198, 197, 59403, 197, 197, 322, 4287, 198, 197, 197, 90, 258, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUnpack(t *testing.T) { type want struct { output string err error } tests := []struct { name string fs afero.Fs root string want want }{ { // unpack should fail to find the install.yaml file name: "EmptyExtensionDir", fs: func() afero.Fs { fs := afero.NewMemMapFs() fs.MkdirAll("ext-dir", 0755) return fs }(), root: "ext-dir", want: want{output: "", err: &os.PathError{Op: "open", Path: "ext-dir/install.yaml", Err: afero.ErrFileNotFound}}, }, { name: "SimpleExtension", fs: func() afero.Fs { fs := afero.NewMemMapFs() fs.MkdirAll("ext-dir", 0755) afero.WriteFile(fs, "ext-dir/icon.jpg", []byte("mock-icon-data"), 0644) afero.WriteFile(fs, "ext-dir/app.yaml", []byte(simpleAppFile), 0644) afero.WriteFile(fs, "ext-dir/install.yaml", []byte(simpleInstallFile), 0644) afero.WriteFile(fs, "ext-dir/rbac.yaml", []byte(simpleRBACFile), 0644) crdDir := "ext-dir/resources/samples.upbound.io/mytype/v1alpha1" fs.MkdirAll(crdDir, 0755) afero.WriteFile(fs, filepath.Join(crdDir, "mytype.v1alpha1.crd.yaml"), []byte(simpleCRDFile), 0644) return fs }(), root: "ext-dir", want: want{output: expectedSimpleExtensionOutput, err: nil}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := doUnpack(tt.fs, tt.root) if diff := cmp.Diff(err, tt.want.err, test.EquateErrors()); diff != "" { t.Errorf("doUnpack() want error != got error:\n%s", diff) } if diff := cmp.Diff(got, tt.want.output); diff != "" { t.Errorf("doUnpack() got != want:\n%v", diff) } }) } }
explode_data.jsonl/74161
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 758 }
[ 2830, 3393, 1806, 4748, 1155, 353, 8840, 836, 8, 341, 13158, 1366, 2036, 341, 197, 21170, 914, 198, 197, 9859, 262, 1465, 198, 197, 630, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 53584, 256, 264, 802, 78, 991, 82, 198...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestModifyConfig(t *testing.T) { e, err := newSchemaGenerator(createGraph(false)) require.NoError(t, err) e.relaySpec = false cfg, err := e.genModels() require.NoError(t, err) expected := map[string]string{ "Todo": "example.com.Todo", "Group": "example.com.Group", "GroupWithSort": "example.com.GroupWithSort", } require.Equal(t, expected, cfg) }
explode_data.jsonl/63033
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 164 }
[ 2830, 3393, 44427, 2648, 1155, 353, 8840, 836, 8, 341, 7727, 11, 1848, 1669, 501, 8632, 12561, 32602, 11212, 3576, 1171, 17957, 35699, 1155, 11, 1848, 692, 7727, 1327, 6651, 8327, 284, 895, 198, 50286, 11, 1848, 1669, 384, 22822, 16969,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1