text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestBotUA(t *testing.T) { var fail []string for _, b := range bots { r := &http.Request{Header: make(http.Header)} r.Header.Add("User-Agent", b) if IsNot(Bot(r)) { fail = append(fail, b) } } if len(fail) > 0 { t.Errorf("%d failed:\n%s", len(fail), strings.Join(fail, "\n")) } }
explode_data.jsonl/82221
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 136 }
[ 2830, 3393, 23502, 17782, 1155, 353, 8840, 836, 8, 341, 2405, 3690, 3056, 917, 198, 2023, 8358, 293, 1669, 2088, 49529, 341, 197, 7000, 1669, 609, 1254, 9659, 90, 4047, 25, 1281, 19886, 15753, 10569, 197, 7000, 15753, 1904, 445, 1474, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestReadArtifact_NoRun_NotFound(t *testing.T) { store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) defer store.Close() manager := NewResourceManager(store) _, err := manager.ReadArtifact("run-1", "node-1", "artifact-1") assert.True(t, util.IsUserErrorCodeMatch(err, codes.NotFound)) }
explode_data.jsonl/28400
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 110 }
[ 2830, 3393, 4418, 85578, 36989, 6727, 60816, 6650, 1155, 353, 8840, 836, 8, 341, 57279, 1669, 1532, 52317, 2959, 2043, 2195, 62396, 67811, 7121, 52317, 1462, 2461, 44338, 2398, 16867, 3553, 10421, 741, 92272, 1669, 1532, 32498, 31200, 692, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetActions(t *testing.T) { t.Skip() client := NewClient(&Config{ URL: "https://api-kylin.eosasia.one", Debug: true, }) resp, err := client.GetActions("helloworld54", 0, 1000) if err != nil { t.Error(err) } _ = resp }
explode_data.jsonl/58353
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 106 }
[ 2830, 3393, 1949, 12948, 1155, 353, 8840, 836, 8, 341, 3244, 57776, 741, 25291, 1669, 1532, 2959, 2099, 2648, 515, 197, 79055, 25, 256, 330, 2428, 1110, 2068, 12, 7891, 3732, 1734, 436, 35159, 30973, 756, 197, 34424, 25, 830, 345, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTicker_S14(t *testing.T) { product := "btc_" + common.NativeToken timeMap := GetTimes() // open kline15s1 := prepareKlineMx(product, 15*60, 100.0, 100.0, 100.0, 100.0, []float64{100}, timeMap["-24h"], timeMap["-30m"]-60*15*5) kline15s2 := prepareKlineMx(product, 15*60, 99.0, 99.0, 99.0, 99.0, []float64{100}, timeMap["-24h"], timeMap["-30m"]-60*15*4) kline15s3 := prepareKlineMx(product, 15*60, 220.0, 220.0, 220.0, 220.0, []float64{100}, timeMap["-24h"], timeMap["-30m"]-60*15*3) kline15s4 := prepareKlineMx(product, 15*60, 99.0, 99.0, 99.0, 99.0, []float64{100}, timeMap["-24h"], timeMap["-30m"]-60*15*2) // close kline15s5 := prepareKlineMx(product, 15*60, 98.0, 98.0, 98.0, 98.0, []float64{100}, timeMap["-24h"], timeMap["-30m"]-60*15*1) kline1s := []interface{}{} matches := []*types.MatchResult{} fakeLatestTickers := &map[string]*types.Ticker{} klines15s := []interface{}{} klines15s = append(klines15s, kline15s1...) klines15s = append(klines15s, kline15s2...) klines15s = append(klines15s, kline15s3...) klines15s = append(klines15s, kline15s4...) klines15s = append(klines15s, kline15s5...) err := simpleCaseRunner(t, product, nil, timeMap["-24h"], timeMap["now"]+1, klines15s, kline1s, matches, aTicker(product, 100.0, 98.0, 220.0, 98.0, 98.0, 500.0), fakeLatestTickers) assert.True(t, err == nil) }
explode_data.jsonl/37820
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 610 }
[ 2830, 3393, 87278, 1098, 16, 19, 1155, 353, 8840, 836, 8, 1476, 69288, 1669, 330, 92634, 18771, 488, 4185, 68624, 3323, 198, 21957, 2227, 1669, 2126, 18889, 741, 197, 322, 1787, 198, 16463, 1056, 16, 20, 82, 16, 1669, 10549, 42, 1056,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClusterRoleBindingGenerate(t *testing.T) { tests := []struct { name string params map[string]interface{} expected *rbacv1beta1.ClusterRoleBinding expectErr bool }{ { name: "valid case 1", params: map[string]interface{}{ "name": "foo", "clusterrole": "admin", "user": []string{"user"}, "group": []string{"group"}, "serviceaccount": []string{"ns1:name1"}, }, expected: &rbacv1beta1.ClusterRoleBinding{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", }, RoleRef: rbacv1beta1.RoleRef{ APIGroup: rbacv1beta1.GroupName, Kind: "ClusterRole", Name: "admin", }, Subjects: []rbacv1beta1.Subject{ { APIGroup: rbacv1beta1.GroupName, Kind: rbacv1beta1.UserKind, Name: "user", }, { APIGroup: rbacv1beta1.GroupName, Kind: rbacv1beta1.GroupKind, Name: "group", }, { Kind: rbacv1beta1.ServiceAccountKind, APIGroup: "", Namespace: "ns1", Name: "name1", }, }, }, expectErr: false, }, { name: "valid case 2", params: map[string]interface{}{ "name": "foo", "clusterrole": "admin", "user": []string{"user1", "user2"}, "group": []string{"group1", "group2"}, "serviceaccount": []string{"ns1:name1", "ns2:name2"}, }, expected: &rbacv1beta1.ClusterRoleBinding{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", }, RoleRef: rbacv1beta1.RoleRef{ APIGroup: rbacv1beta1.GroupName, Kind: "ClusterRole", Name: "admin", }, Subjects: []rbacv1beta1.Subject{ { APIGroup: rbacv1beta1.GroupName, Kind: rbacv1beta1.UserKind, Name: "user1", }, { APIGroup: rbacv1beta1.GroupName, Kind: rbacv1beta1.UserKind, Name: "user2", }, { APIGroup: rbacv1beta1.GroupName, Kind: rbacv1beta1.GroupKind, Name: "group1", }, { APIGroup: rbacv1beta1.GroupName, Kind: rbacv1beta1.GroupKind, Name: "group2", }, { Kind: rbacv1beta1.ServiceAccountKind, APIGroup: "", Namespace: "ns1", Name: "name1", }, { Kind: rbacv1beta1.ServiceAccountKind, APIGroup: "", Namespace: "ns2", Name: "name2", }, }, }, expectErr: false, }, { name: "valid case 3", params: map[string]interface{}{ "name": "foo", "clusterrole": "admin", }, expected: &rbacv1beta1.ClusterRoleBinding{ ObjectMeta: metav1.ObjectMeta{ Name: "foo", }, RoleRef: rbacv1beta1.RoleRef{ APIGroup: rbacv1beta1.GroupName, Kind: "ClusterRole", Name: "admin", }, }, expectErr: false, }, { name: "invalid serviceaccount, expected format: <namespace:name>", params: map[string]interface{}{ "name": "role", "clusterrole": "admin", "user": []string{"user"}, "group": []string{"group"}, "serviceaccount": []string{"ns1-name1"}, }, expectErr: true, }, { name: "name must be specified", params: map[string]interface{}{ "name": "", "clusterrole": "admin", "user": []string{"user"}, "group": []string{"group"}, "serviceaccount": []string{"ns1:name1"}, }, expectErr: true, }, { name: "clusterrole must be specified", params: map[string]interface{}{ "name": "foo", "clusterrole": "", "user": []string{"user"}, "group": []string{"group"}, "serviceaccount": []string{"ns1:name1"}, }, expectErr: true, }, { name: "expected user []string", params: map[string]interface{}{ "name": "role", "clusterrole": "admin", "user": "user", "group": []string{"group"}, "serviceaccount": []string{"ns1:name1"}, }, expectErr: true, }, { name: "expected group []string", params: map[string]interface{}{ "name": "role", "clusterrole": "admin", "user": []string{"user"}, "group": "group", "serviceaccount": []string{"ns1:name1"}, }, expectErr: true, }, { name: "expected serviceaccount []string", params: map[string]interface{}{ "name": "role", "clusterrole": "admin", "user": []string{"user"}, "group": []string{"group"}, "serviceaccount": "ns1", }, expectErr: true, }, } generator := ClusterRoleBindingGeneratorV1{} for i := range tests { obj, err := generator.Generate(tests[i].params) if !tests[i].expectErr && err != nil { t.Errorf("[%d] unexpected error: %v", i, err) } if tests[i].expectErr && err != nil { continue } if tests[i].expectErr && err == nil { t.Errorf("[%s] expect error, got nil", tests[i].name) } if !reflect.DeepEqual(obj.(*rbacv1beta1.ClusterRoleBinding), tests[i].expected) { t.Errorf("\n[%s] want:\n%#v\ngot:\n%#v", tests[i].name, tests[i].expected, obj.(*rbacv1beta1.ClusterRoleBinding)) } } }
explode_data.jsonl/47307
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2798 }
[ 2830, 3393, 28678, 9030, 15059, 31115, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 414, 914, 198, 197, 25856, 262, 2415, 14032, 31344, 16094, 197, 42400, 220, 353, 10681, 580, 85, 16, 19127, 16, 72883, 9030, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestSaveImage(t *testing.T) { dbQuery1 := "select image_id from image where original_hash = $1" dbQuery2 := "select register_image($1::bytea, $2::text, $3::bytea)" pngImgData, err := ioutil.ReadFile("testdata/image.png") require.NoError(t, err) sumPngImg := sha256.Sum256(pngImgData) pngImgHash := sumPngImg[:] svgImgData, err := ioutil.ReadFile("testdata/image.svg") require.NoError(t, err) sumSvgImg := sha256.Sum256(svgImgData) svgImgHash := sumSvgImg[:] ctx := context.Background() t.Run("successful png image registration", func(t *testing.T) { db := &tests.DBMock{} db.On("QueryRow", ctx, dbQuery1, pngImgHash).Return(nil, pgx.ErrNoRows) for _, version := range []string{"1x", "2x", "3x", "4x"} { db.On("QueryRow", ctx, dbQuery2, pngImgHash, version, mock.Anything).Return("pngImgID", nil) } s := NewImageStore(db) imageID, err := s.SaveImage(ctx, pngImgData) require.NoError(t, err) assert.Equal(t, "pngImgID", imageID) db.AssertExpectations(t) }) t.Run("successful svg image registration", func(t *testing.T) { db := &tests.DBMock{} db.On("QueryRow", ctx, dbQuery1, svgImgHash).Return(nil, pgx.ErrNoRows) db.On("QueryRow", ctx, dbQuery2, svgImgHash, "svg", mock.Anything).Return("svgImgID", nil) s := NewImageStore(db) imageID, err := s.SaveImage(ctx, svgImgData) require.NoError(t, err) assert.Equal(t, "svgImgID", imageID) db.AssertExpectations(t) }) t.Run("try to register existing png image", func(t *testing.T) { db := &tests.DBMock{} db.On("QueryRow", ctx, dbQuery1, pngImgHash).Return("existingImageID", nil) s := NewImageStore(db) imageID, err := s.SaveImage(ctx, pngImgData) require.NoError(t, err) assert.Equal(t, "existingImageID", imageID) db.AssertExpectations(t) }) t.Run("database error calling get_image_id", func(t *testing.T) { db := &tests.DBMock{} db.On("QueryRow", ctx, dbQuery1, pngImgHash).Return(nil, tests.ErrFakeDatabaseFailure) s := NewImageStore(db) imageID, err := s.SaveImage(ctx, pngImgData) assert.Equal(t, tests.ErrFakeDatabaseFailure, err) assert.Empty(t, imageID) db.AssertExpectations(t) }) t.Run("database error calling register_image", func(t *testing.T) { db := &tests.DBMock{} db.On("QueryRow", ctx, dbQuery1, pngImgHash).Return(nil, pgx.ErrNoRows) db.On("QueryRow", ctx, dbQuery2, pngImgHash, "1x", mock.Anything).Return(nil, tests.ErrFakeDatabaseFailure) s := NewImageStore(db) imageID, err := s.SaveImage(ctx, pngImgData) assert.Equal(t, tests.ErrFakeDatabaseFailure, err) assert.Empty(t, imageID) db.AssertExpectations(t) }) }
explode_data.jsonl/53777
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1113 }
[ 2830, 3393, 8784, 1906, 1155, 353, 8840, 836, 8, 341, 20939, 2859, 16, 1669, 330, 1742, 2168, 842, 504, 2168, 1380, 4024, 8950, 284, 400, 16, 698, 20939, 2859, 17, 1669, 330, 1742, 4161, 4954, 699, 16, 486, 3782, 64, 11, 400, 17, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCustomDef(t *testing.T) { callErr, funcErr, assert, callBuffer, funcBuffer := testOpenAPITypeWriter(t, ` package foo import openapi "k8s.io/kube-openapi/pkg/common" type Blah struct { } func (_ Blah) OpenAPIDefinition() openapi.OpenAPIDefinition { return openapi.OpenAPIDefinition{ Schema: spec.Schema{ SchemaProps: spec.SchemaProps{ Type: []string{"string"}, Format: "date-time", }, }, } } `) if callErr != nil { t.Fatal(callErr) } if funcErr != nil { t.Fatal(funcErr) } assert.Equal(`"base/foo.Blah": foo.Blah{}.OpenAPIDefinition(), `, callBuffer.String()) assert.Equal(``, funcBuffer.String()) }
explode_data.jsonl/3354
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 277 }
[ 2830, 3393, 10268, 2620, 1155, 353, 8840, 836, 8, 341, 67288, 7747, 11, 2915, 7747, 11, 2060, 11, 1618, 4095, 11, 2915, 4095, 1669, 1273, 5002, 7082, 929, 6492, 1155, 11, 22074, 1722, 15229, 271, 474, 1787, 2068, 330, 74, 23, 82, 42...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestValidGetInboundMessagesParams(t *testing.T) { t.Run("ValidGetInboundMessagesParams", func(t *testing.T) { test := GetInboundSMSParams{ Limit: 1, } err := test.Validate() require.NoError(t, err) }) }
explode_data.jsonl/13710
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 94 }
[ 2830, 3393, 4088, 1949, 641, 10891, 15820, 4870, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 4088, 1949, 641, 10891, 15820, 4870, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 18185, 1669, 2126, 641, 10891, 65565, 4870, 515, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsQualifiedName(t *testing.T) { successCases := []string{ "simple", "now-with-dashes", "1-starts-with-num", "1234", "simple/simple", "now-with-dashes/simple", "now-with-dashes/now-with-dashes", "now.with.dots/simple", "now-with.dashes-and.dots/simple", "1-num.2-num/3-num", "1234/5678", "1.2.3.4/5678", "Uppercase_Is_OK_123", "example.com/Uppercase_Is_OK_123", "requests.storage-foo", strings.Repeat("a", 63), strings.Repeat("a", 253) + "/" + strings.Repeat("b", 63), } for i := range successCases { if errs := IsQualifiedName(successCases[i]); len(errs) != 0 { t.Errorf("case[%d]: %q: expected success: %v", i, successCases[i], errs) } } errorCases := []string{ "nospecialchars%^=@", "cantendwithadash-", "-cantstartwithadash-", "only/one/slash", "Example.com/abc", "example_com/abc", "example.com/", "/simple", strings.Repeat("a", 64), strings.Repeat("a", 254) + "/abc", } for i := range errorCases { if errs := IsQualifiedName(errorCases[i]); len(errs) == 0 { t.Errorf("case[%d]: %q: expected failure", i, errorCases[i]) } } }
explode_data.jsonl/11824
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 524 }
[ 2830, 3393, 3872, 92251, 1155, 353, 8840, 836, 8, 341, 30553, 37302, 1669, 3056, 917, 515, 197, 197, 1, 22944, 756, 197, 197, 1, 3328, 26189, 1737, 14051, 756, 197, 197, 1, 16, 18935, 82, 26189, 31194, 756, 197, 197, 1, 16, 17, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestValidateConfig_ErrorOnInvalidKeys(t *testing.T) { with.Logging(t, func(harness *with.LoggingHarness) { cfg := defaultProductionConfig() cfg.SetNodeAddress(defaultNodeAddress()) cfg.SetNodePrivateKey(wrongPrivateKey()) require.Error(t, ValidateNodeLogic(cfg)) }) }
explode_data.jsonl/17018
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 98 }
[ 2830, 3393, 17926, 2648, 28651, 1925, 7928, 8850, 1155, 353, 8840, 836, 8, 341, 46948, 41945, 1155, 11, 2915, 3203, 23518, 353, 4197, 41945, 74248, 8, 341, 197, 50286, 1669, 1638, 44967, 2648, 741, 197, 50286, 4202, 1955, 4286, 18978, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPoolStress(t *testing.T) { const P = 10 N := int(1e6) if testing.Short() { N /= 100 } var p Pool done := make(chan bool) for i := 0; i < P; i++ { go func() { var v interface{} = 0 for j := 0; j < N; j++ { if v == nil { v = 0 } p.Put(v) v = p.Get() if v != nil && v.(int) != 0 { t.Errorf("expect 0, got %v", v) break } } done <- true }() } for i := 0; i < P; i++ { <-done } }
explode_data.jsonl/51812
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 252 }
[ 2830, 3393, 10551, 623, 673, 1155, 353, 8840, 836, 8, 341, 4777, 393, 284, 220, 16, 15, 198, 18317, 1669, 526, 7, 16, 68, 21, 340, 743, 7497, 55958, 368, 341, 197, 18317, 16455, 220, 16, 15, 15, 198, 197, 532, 2405, 281, 22728, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestExtractInvalidDistributedContextFromHTTPReq(t *testing.T) { props := propagation.New(propagation.WithExtractors(correlation.CorrelationContext{})) tests := []struct { name string header string hasKVs []label.KeyValue }{ { name: "no key values", header: "header1", }, { name: "invalid header with existing context", header: "header2", hasKVs: []label.KeyValue{ label.String("key1", "val1"), label.String("key2", "val2"), }, }, { name: "empty header value", header: "", hasKVs: []label.KeyValue{ label.String("key1", "val1"), label.String("key2", "val2"), }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { req, _ := http.NewRequest("GET", "http://example.com", nil) req.Header.Set("otcorrelations", tt.header) ctx := correlation.NewContext(context.Background(), tt.hasKVs...) wantCorCtx := correlation.MapFromContext(ctx) ctx = propagation.ExtractHTTP(ctx, props, req.Header) gotCorCtx := correlation.MapFromContext(ctx) if gotCorCtx.Len() != wantCorCtx.Len() { t.Errorf( "Got and Want CorCtx are not the same size %d != %d", gotCorCtx.Len(), wantCorCtx.Len(), ) } totalDiff := "" wantCorCtx.Foreach(func(keyValue label.KeyValue) bool { val, _ := gotCorCtx.Value(keyValue.Key) diff := cmp.Diff(keyValue, label.KeyValue{Key: keyValue.Key, Value: val}, cmp.AllowUnexported(label.Value{})) if diff != "" { totalDiff += diff + "\n" } return true }) }) } }
explode_data.jsonl/30215
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 669 }
[ 2830, 3393, 28959, 7928, 35, 25146, 1972, 3830, 9230, 27234, 1155, 353, 8840, 836, 8, 341, 77691, 1669, 53643, 7121, 30638, 27137, 26124, 28959, 1087, 52620, 22221, 63560, 22221, 1972, 6257, 1171, 78216, 1669, 3056, 1235, 341, 197, 11609, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestConfigFindFoundByEmptyPrefix(t *testing.T) { confFile := configfile(`{"firefox":{"default":"49.0","versions":{"49.0":{}}}}`) defer os.Remove(confFile) conf := config.NewConfig() conf.Load(confFile, logConfPath) _, v, ok := conf.Find("firefox", "") AssertThat(t, ok, Is{true}) AssertThat(t, v, EqualTo{"49.0"}) }
explode_data.jsonl/37923
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 131 }
[ 2830, 3393, 2648, 9885, 6650, 1359, 3522, 14335, 1155, 353, 8840, 836, 8, 341, 67850, 1703, 1669, 2193, 1192, 5809, 4913, 97912, 22317, 2258, 3252, 19, 24, 13, 15, 2198, 28290, 22317, 19, 24, 13, 15, 788, 90, 3417, 3417, 24183, 16867,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExample2(t *testing.T) { assert := assert.New(t) rancher := NewRancherMockClient() rancher.AddEnvironment(client.Project{Name: "prod", Resource: client.Resource{Id: "1a5"}}) rancher.AddEnvironment(client.Project{Name: "dev", Resource: client.Resource{Id: "1a6"}}) prod1 := client.Host{Hostname: "prod01.mysite.com", AccountId: "1a5"} prod2 := client.Host{Hostname: "prod02.mysite.com", AccountId: "1a5"} system := client.Stack{Name: "systemstack", AccountId: "1a5", Resource: client.Resource{Id: "2a1"}, ServiceIds: []string{"3a1"}, System: true} app1 := client.Stack{Name: "app1", AccountId: "1a5", Resource: client.Resource{Id: "2a2"}, ServiceIds: []string{"a11", "a12"}} app2 := client.Stack{Name: "app2", AccountId: "1a5", Resource: client.Resource{Id: "2a3"}, ServiceIds: []string{"a21", "a22"}} sys_serv := client.Service{Name: "system-service", AccountId: "1a5", Resource: client.Resource{Id: "3a1"}, StackId: "2a1", System: true, LaunchConfig: &client.LaunchConfig{Labels: map[string]interface{}{}}} app_serv11 := client.Service{Name: "app1-service1", AccountId: "1a5", Resource: client.Resource{Id: "a11"}, StackId: "2a2", LaunchConfig: &client.LaunchConfig{Labels: map[string]interface{}{}}} app_serv12 := client.Service{Name: "app1-service2", AccountId: "1a5", Resource: client.Resource{Id: "a12"}, StackId: "2a2", LaunchConfig: &client.LaunchConfig{Labels: map[string]interface{}{"monitor": "true"}}} app_serv21 := client.Service{Name: "app2-service1", AccountId: "1a5", Resource: client.Resource{Id: "a21"}, StackId: "2a3", LaunchConfig: &client.LaunchConfig{Labels: map[string]interface{}{}}} app_serv22 := client.Service{Name: "app2-service2", AccountId: "1a5", Resource: client.Resource{Id: "a22"}, StackId: "2a3", LaunchConfig: &client.LaunchConfig{Labels: map[string]interface{}{}}} rancher.AddStack(system) rancher.AddStack(app1) rancher.AddStack(app2) rancher.AddService(sys_serv) rancher.AddService(app_serv11) rancher.AddService(app_serv12) rancher.AddService(app_serv21) rancher.AddService(app_serv22) filterHosts := "*" filterStacks := "-*,%SYSTEM,%HAS_SERVICE(monitor=true)" filterServices := "-*,%SYSTEM,monitor=true" assert.True(filterHost(rancher, prod1, filterHosts)) assert.True(filterHost(rancher, prod2, filterHosts)) assert.True(filterStack(rancher, system, filterStacks)) assert.True(filterStack(rancher, app1, filterStacks)) assert.False(filterStack(rancher, app2, filterStacks)) assert.True(filterService(rancher, sys_serv, filterServices)) assert.False(filterService(rancher, app_serv11, filterServices)) assert.True(filterService(rancher, app_serv12, filterServices)) assert.False(filterService(rancher, app_serv21, filterServices)) assert.False(filterService(rancher, app_serv22, filterServices)) }
explode_data.jsonl/48868
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1021 }
[ 2830, 3393, 13314, 17, 1155, 353, 8840, 836, 8, 1476, 6948, 1669, 2060, 7121, 1155, 340, 7000, 3497, 261, 1669, 1532, 49, 3497, 261, 11571, 2959, 2822, 7000, 3497, 261, 1904, 12723, 12805, 30944, 63121, 25, 330, 19748, 497, 11765, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrintRuntimeClass(t *testing.T) { tests := []struct { rc nodeapi.RuntimeClass expected []metav1.TableRow }{ { rc: nodeapi.RuntimeClass{ ObjectMeta: metav1.ObjectMeta{ Name: "rc1", CreationTimestamp: metav1.Time{Time: time.Now().Add(1.9e9)}, }, Handler: "h1", }, expected: []metav1.TableRow{{Cells: []interface{}{"rc1", "h1", "0s"}}}, }, { rc: nodeapi.RuntimeClass{ ObjectMeta: metav1.ObjectMeta{ Name: "rc2", CreationTimestamp: metav1.Time{Time: time.Now().Add(-3e11)}, }, Handler: "h2", }, expected: []metav1.TableRow{{Cells: []interface{}{"rc2", "h2", "5m"}}}, }, } for i, test := range tests { rows, err := printRuntimeClass(&test.rc, printers.GenerateOptions{}) if err != nil { t.Fatal(err) } for i := range rows { rows[i].Object.Object = nil } if !reflect.DeepEqual(test.expected, rows) { t.Errorf("%d mismatch: %s", i, diff.ObjectReflectDiff(test.expected, rows)) } } }
explode_data.jsonl/21634
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 487 }
[ 2830, 3393, 8994, 15123, 1957, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 30295, 981, 2436, 2068, 16706, 1957, 198, 197, 42400, 3056, 4059, 402, 16, 18257, 3102, 198, 197, 59403, 197, 197, 515, 298, 30295, 25, 24...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestVersion(t *testing.T) { text := ` +---------------------------+ | SVELTE | v3.0.0 | | SVETLANA_VERSION | v0.0.1 | +---------------------------+ ` SetEnvVars(text) tests := []Test{ {got: os.Getenv("SVELTE"), want: "v3.0.0"}, {got: os.Getenv("SVETLANA_VERSION"), want: "v0.0.1"}, } for _, test := range tests { expect.DeepEqual(t, test.got, test.want) } }
explode_data.jsonl/63319
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 176 }
[ 2830, 3393, 5637, 1155, 353, 8840, 836, 8, 341, 15425, 1669, 22074, 10, 771, 18088, 16930, 91, 328, 11810, 2446, 1843, 760, 348, 18, 13, 15, 13, 15, 9248, 91, 17481, 1348, 23408, 32, 10678, 760, 348, 15, 13, 15, 13, 16, 9248, 10, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestClient_CoinsMarkets(t *testing.T) { t.Run("Default", func(t *testing.T) { ms, err := client.CoinsMarkets(CoinsMarketsParams{ VsCurrency: "usd", PerPage: 42, Sparkline: true, PriceChangePercentage: "1h,24h,7d", }) require.NoError(t, err) require.Equal(t, 42, len(ms)) first := ms[0] require.Equal(t, 168, len(first.SparklineIn7D.Price)) require.NotNil(t, first.PriceChangePercentage1HInCurrency) require.NotNil(t, first.PriceChangePercentage24HInCurrency) require.NotNil(t, first.PriceChangePercentage7DInCurrency) for _, v := range ms { require.NotEmpty(t, v.Id) require.NotEmpty(t, v.Symbol) require.NotEmpty(t, v.Name) require.NotEmpty(t, v.Image) require.NotEmpty(t, v.CurrentPrice) require.NotEmpty(t, v.MarketCap) //require.NotEmpty(t, v.TotalVolume) //require.NotEmpty(t, v.CirculatingSupply) //require.NotEmpty(t, v.TotalSupply) //require.NotEmpty(t, v.MaxSupply) require.NotNil(t, v.SparklineIn7D) } }) t.Run("Ids", func(t *testing.T) { ms, err := client.CoinsMarkets(CoinsMarketsParams{VsCurrency: "usd", Ids: []string{"polkadot", "solana"}}) require.NoError(t, err) require.Equal(t, 2, len(ms)) }) }
explode_data.jsonl/37790
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 577 }
[ 2830, 3393, 2959, 920, 68798, 8949, 1415, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 3675, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 47691, 11, 1848, 1669, 2943, 52114, 1330, 8949, 1415, 3025, 68798, 8949, 1415, 4870, 515, 29...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestShutdownRanReconnection(t *testing.T) { _, _, readerMock, writerMock, ranReconnectionManager, _ := initRanLostConnectionTest(t) origNodebInfo := &entities.NodebInfo{RanName: ranName, GlobalNbId: &entities.GlobalNbId{PlmnId: "xxx", NbId: "yyy"}, ConnectionStatus: entities.ConnectionStatus_SHUT_DOWN} var rnibErr error readerMock.On("GetNodeb", ranName).Return(origNodebInfo, rnibErr) err := ranReconnectionManager.ReconnectRan(ranName) assert.Nil(t, err) readerMock.AssertCalled(t, "GetNodeb", ranName) writerMock.AssertNotCalled(t, "UpdateNodebInfo") }
explode_data.jsonl/42898
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 62004, 49, 276, 693, 7742, 1155, 353, 8840, 836, 8, 341, 197, 6878, 8358, 6604, 11571, 11, 6916, 11571, 11, 10613, 693, 7742, 2043, 11, 716, 1669, 2930, 49, 276, 47253, 4526, 2271, 1155, 692, 197, 4670, 1955, 65, 1731, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateServerHooks_InvalidInput(t *testing.T) { var tests = []struct { name string hooksInput map[string][]models.Hook }{ {"One bad hook, one script", map[string][]models.Hook{"NotHook": { { Location: "script-location", Timeout: "10", Runas: "user-name", }, }}, }, {"One hook, multiple scripts, bad timeout", map[string][]models.Hook{"ApplicationStop": { { Location: "script-location", Timeout: "10", Runas: "user-name", }, { Location: "script-location", Timeout: "3600", Runas: "user-name", }, }}, }, {"One hook, multiple scripts, missing location value", map[string][]models.Hook{"ApplicationStop": { { Location: "", Timeout: "10", Runas: "user-name", }, { Location: "script-location", Timeout: "10", Runas: "user-name", }, }}, }, {"One hook, multiple scripts, missing location key", map[string][]models.Hook{"ApplicationStop": { { Timeout: "10", Runas: "user-name", }, { Location: "script-location", Timeout: "10", Runas: "user-name", }, }}, }, {"Multiple hooks, multiple scripts, one bad hook", map[string][]models.Hook{"ApplicationStop": { { Location: "script-location", Timeout: "10", Runas: "user-name", }, { Location: "script-location", Timeout: "10", Runas: "user-name", }, }, "NotHook": { { Location: "script-location", Timeout: "10", Runas: "user-name", }, }}, }, } for _, test := range tests { output := validateServerHooks(test.hooksInput) if output == true { t.Errorf("The validateServerHooks function succeeded but should have failed for: %v", test) } } }
explode_data.jsonl/71211
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 880 }
[ 2830, 3393, 17926, 5475, 67769, 62, 7928, 2505, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 3056, 1235, 341, 197, 11609, 981, 914, 198, 197, 9598, 14685, 2505, 2415, 14032, 45725, 6507, 3839, 1941, 198, 197, 59403, 197, 197, 4913, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRecursiveStructure(t *testing.T) { protest.AllowRecording(t) withTestProcess("testvariables2", t, func(p *proc.Target, fixture protest.Fixture) { assertNoError(p.Continue(), t, "Continue()") v := evalVariable(p, t, "aas") t.Logf("v: %v\n", v) }) }
explode_data.jsonl/56230
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 108 }
[ 2830, 3393, 78542, 22952, 1155, 353, 8840, 836, 8, 341, 197, 776, 1944, 29081, 52856, 1155, 340, 46948, 2271, 7423, 445, 1944, 18616, 17, 497, 259, 11, 2915, 1295, 353, 15782, 35016, 11, 12507, 8665, 991, 12735, 8, 341, 197, 6948, 275...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRedeemWinningTickets_MultipleTicketsFromMultipleSessions(t *testing.T) { sender, b, v, ts, faceValue, winProb, sig := newRecipientFixtureOrFatal(t) secret := [32]byte{3} r := NewRecipientWithSecret(RandAddress(), b, v, ts, secret, faceValue, winProb) // Config stub validator with valid winning tickets v.SetIsWinningTicket(true) require := require.New(t) params0 := ticketParamsOrFatal(t, r, sender) ticket0 := newTicket(sender, params0, 1) sessionID0, won, err := r.ReceiveTicket(ticket0, sig, params0.Seed) require.Nil(err) require.True(won) params1 := ticketParamsOrFatal(t, r, sender) ticket1 := newTicket(sender, params1, 1) sessionID1, won, err := r.ReceiveTicket(ticket1, sig, params1.Seed) require.Nil(err) require.True(won) require.NotEqual(sessionID0, sessionID1) err = r.RedeemWinningTickets([]string{sessionID0, sessionID1}) assert := assert.New(t) assert.Nil(err) used, err := b.IsUsedTicket(ticket0) require.Nil(err) assert.True(used) used, err = b.IsUsedTicket(ticket1) require.Nil(err) assert.True(used) recipientRand0 := genRecipientRand(sender, secret, params0.Seed) recipientRand1 := genRecipientRand(sender, secret, params1.Seed) _, ok := r.(*recipient).invalidRands.Load(recipientRand0.String()) assert.True(ok) _, ok = r.(*recipient).invalidRands.Load(recipientRand1.String()) assert.True(ok) _, ok = r.(*recipient).senderNonces[recipientRand0.String()] assert.False(ok) _, ok = r.(*recipient).senderNonces[recipientRand1.String()] assert.False(ok) }
explode_data.jsonl/44768
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 592 }
[ 2830, 3393, 6033, 68, 336, 16970, 1229, 55321, 1245, 12229, 55321, 3830, 32089, 59062, 1155, 353, 8840, 836, 8, 341, 1903, 1659, 11, 293, 11, 348, 11, 10591, 11, 3579, 1130, 11, 3164, 36980, 11, 8366, 1669, 501, 74432, 18930, 2195, 62...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSignVerifyKeyRing(t *testing.T) { dir := t.TempDir() kb, err := New("keybasename", "test", dir, nil) require.NoError(t, err) algo := hd.Secp256k1 n1, n2, n3 := "some dude", "a dudette", "dude-ish" // create two users and get their info i1, _, err := kb.NewMnemonic(n1, English, sdk.FullFundraiserPath, algo) require.Nil(t, err) i2, _, err := kb.NewMnemonic(n2, English, sdk.FullFundraiserPath, algo) require.Nil(t, err) // let's try to sign some messages d1 := []byte("my first message") d2 := []byte("some other important info!") d3 := []byte("feels like I forgot something...") // try signing both data with both .. s11, pub1, err := kb.Sign(n1, d1) require.Nil(t, err) require.Equal(t, i1.GetPubKey(), pub1) s12, pub1, err := kb.Sign(n1, d2) require.Nil(t, err) require.Equal(t, i1.GetPubKey(), pub1) s21, pub2, err := kb.Sign(n2, d1) require.Nil(t, err) require.Equal(t, i2.GetPubKey(), pub2) s22, pub2, err := kb.Sign(n2, d2) require.Nil(t, err) require.Equal(t, i2.GetPubKey(), pub2) // let's try to validate and make sure it only works when everything is proper cases := []struct { key types.PubKey data []byte sig []byte valid bool }{ // proper matches {i1.GetPubKey(), d1, s11, true}, // change data, pubkey, or signature leads to fail {i1.GetPubKey(), d2, s11, false}, {i2.GetPubKey(), d1, s11, false}, {i1.GetPubKey(), d1, s21, false}, // make sure other successes {i1.GetPubKey(), d2, s12, true}, {i2.GetPubKey(), d1, s21, true}, {i2.GetPubKey(), d2, s22, true}, } for i, tc := range cases { valid := tc.key.VerifySignature(tc.data, tc.sig) require.Equal(t, tc.valid, valid, "%d", i) } // Now try to sign data with a secret-less key // Import a public key armor, err := kb.ExportPubKeyArmor(n2) require.NoError(t, err) require.NoError(t, kb.Delete(n2)) require.NoError(t, kb.ImportPubKey(n3, armor)) i3, err := kb.Key(n3) require.NoError(t, err) require.Equal(t, i3.GetName(), n3) _, _, err = kb.Sign(n3, d3) require.Error(t, err) require.Equal(t, "cannot sign with offline keys", err.Error()) }
explode_data.jsonl/73435
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 913 }
[ 2830, 3393, 7264, 32627, 1592, 43466, 1155, 353, 8840, 836, 8, 341, 48532, 1669, 259, 65009, 6184, 2822, 16463, 65, 11, 1848, 1669, 1532, 445, 792, 42953, 497, 330, 1944, 497, 5419, 11, 2092, 340, 17957, 35699, 1155, 11, 1848, 340, 69...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestKeylevelValidation(t *testing.T) { t.Parallel() // Scenario: we validate a transaction that writes // to a key that contains key-level validation params. // We simulate policy check success and failure vpMetadataKey := pb.MetaDataKeys_VALIDATION_PARAMETER.String() mr := &mockState{GetStateMetadataRv: map[string][]byte{vpMetadataKey: []byte("EP")}, GetPrivateDataMetadataByHashRv: map[string][]byte{vpMetadataKey: []byte("EP")}} ms := &mockStateFetcher{FetchStateRv: mr} pm := &KeyLevelValidationParameterManagerImpl{PolicyTranslator: &mockTranslator{}, StateFetcher: ms} pe := &mockPolicyEvaluator{} validator := NewKeyLevelValidator(NewV13Evaluator(pe, pm), pm) rwsb := rwsetBytes(t, "cc") prp := []byte("barf") block := buildBlockWithTxs(buildTXWithRwset(rwsetUpdatingMetadataFor("cc", "key")), buildTXWithRwset(rwsetUpdatingMetadataFor("cc", "key"))) validator.PreValidate(1, block) endorsements := []*pb.Endorsement{ { Signature: []byte("signature"), Endorser: []byte("endorser"), }, } go func() { validator.PostValidate("cc", 1, 0, fmt.Errorf("")) }() err := validator.Validate("cc", 1, 1, rwsb, prp, []byte("CCEP"), endorsements) assert.NoError(t, err) pe.EvaluateRV = fmt.Errorf("policy evaluation error") err = validator.Validate("cc", 1, 1, rwsb, prp, []byte("CCEP"), endorsements) assert.Error(t, err) assert.IsType(t, &errors.VSCCEndorsementPolicyError{}, err) }
explode_data.jsonl/80485
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 541 }
[ 2830, 3393, 6608, 967, 889, 13799, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 322, 58663, 25, 582, 9593, 264, 7745, 429, 13914, 198, 197, 322, 311, 264, 1376, 429, 5610, 1376, 11591, 10519, 3628, 624, 197, 322, 1205, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUploadOrderMultiFailureLeaveParts(t *testing.T) { s, ops, _ := loggingSvc(emptyList) s.Handlers.Send.PushBack(func(r *request.Request) { switch data := r.Data.(type) { case *s3.UploadPartOutput: if *data.ETag == "ETAG2" { r.HTTPResponse.StatusCode = 400 } } }) mgr := s3manager.NewUploaderWithClient(s, func(u *s3manager.Uploader) { u.Concurrency = 1 u.LeavePartsOnError = true }) _, err := mgr.Upload(&s3manager.UploadInput{ Bucket: aws.String("Bucket"), Key: aws.String("Key"), Body: bytes.NewReader(make([]byte, 1024*1024*12)), }) if err == nil { t.Error("Expected error, but receievd nil") } if e, a := []string{"CreateMultipartUpload", "UploadPart", "UploadPart"}, *ops; !reflect.DeepEqual(e, a) { t.Errorf("Expected %v, but received %v", e, a) } }
explode_data.jsonl/55641
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 341 }
[ 2830, 3393, 13844, 4431, 20358, 17507, 21833, 28921, 1155, 353, 8840, 836, 8, 341, 1903, 11, 27132, 11, 716, 1669, 8392, 92766, 24216, 852, 340, 1903, 35308, 9254, 20176, 34981, 3707, 18552, 2601, 353, 2035, 9659, 8, 341, 197, 8961, 821...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFQMNtoOpenAPIName(t *testing.T) { var tests = []struct { input string expected string }{ {"/test", "/test"}, {"/{test}", "/{test}"}, {"/{test=prefix/*}", "/{test}"}, {"/{test=prefix/that/has/multiple/parts/to/it/*}", "/{test}"}, {"/{test1}/{test2}", "/{test1}/{test2}"}, {"/{test1}/{test2}/", "/{test1}/{test2}/"}, } reg := descriptor.NewRegistry() reg.SetUseJSONNamesForFields(false) for _, data := range tests { actual := templateToOpenAPIPath(data.input, reg, generateFieldsForJSONReservedName(), generateMsgsForJSONReservedName()) if data.expected != actual { t.Errorf("Expected templateToOpenAPIPath(%v) = %v, actual: %v", data.input, data.expected, actual) } } reg.SetUseJSONNamesForFields(true) for _, data := range tests { actual := templateToOpenAPIPath(data.input, reg, generateFieldsForJSONReservedName(), generateMsgsForJSONReservedName()) if data.expected != actual { t.Errorf("Expected templateToOpenAPIPath(%v) = %v, actual: %v", data.input, data.expected, actual) } } }
explode_data.jsonl/32802
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 415 }
[ 2830, 3393, 37, 48, 55181, 983, 5002, 7082, 675, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 3056, 1235, 341, 197, 22427, 262, 914, 198, 197, 42400, 914, 198, 197, 59403, 197, 197, 90, 3115, 1944, 497, 3521, 1944, 7115, 197, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestMaterializerNoGoodVindex(t *testing.T) { ms := &vtctldatapb.MaterializeSettings{ Workflow: "workflow", SourceKeyspace: "sourceks", TargetKeyspace: "targetks", TableSettings: []*vtctldatapb.TableMaterializeSettings{{ TargetTable: "t1", SourceExpression: "select * from t1", CreateDdl: "t1ddl", }}, } env := newTestMaterializerEnv(t, ms, []string{"0"}, []string{"-80", "80-"}) defer env.close() vs := &vschemapb.Keyspace{ Sharded: true, Vindexes: map[string]*vschemapb.Vindex{ "lookup_unique": { Type: "lookup_unique", }, }, Tables: map[string]*vschemapb.Table{ "t1": { ColumnVindexes: []*vschemapb.ColumnVindex{{ Column: "c1", Name: "lookup_unique", }}, }, }, } if err := env.topoServ.SaveVSchema(context.Background(), "targetks", vs); err != nil { t.Fatal(err) } env.tmc.expectVRQuery(200, mzSelectFrozenQuery, &sqltypes.Result{}) env.tmc.expectVRQuery(210, mzSelectFrozenQuery, &sqltypes.Result{}) err := env.wr.Materialize(context.Background(), ms) require.EqualError(t, err, "could not find a vindex to compute keyspace id for table t1") }
explode_data.jsonl/61881
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 509 }
[ 2830, 3393, 13415, 3135, 2753, 15216, 53, 1252, 1155, 353, 8840, 836, 8, 341, 47691, 1669, 609, 9708, 302, 507, 266, 391, 65, 44253, 551, 6086, 515, 197, 197, 62768, 25, 981, 330, 56249, 756, 197, 197, 3608, 8850, 1306, 25, 330, 242...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestExternalStageCreate(t *testing.T) { r := require.New(t) in := map[string]interface{}{ "name": "test_stage", "database": "test_db", "url": "s3://com.example.bucket/prefix", "schema": "test_schema", "comment": "great comment", } d := schema.TestResourceDataRaw(t, resources.Stage().Schema, in) r.NotNil(d) WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { mock.ExpectExec( `^CREATE STAGE "test_db"."test_schema"."test_stage" URL = 's3://com.example.bucket/prefix' COMMENT = 'great comment'$`, ).WillReturnResult(sqlmock.NewResult(1, 1)) expectReadStage(mock) expectReadStageShow(mock) err := resources.CreateStage(d, db) r.NoError(err) }) }
explode_data.jsonl/59821
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 304 }
[ 2830, 3393, 25913, 19398, 4021, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1373, 7121, 1155, 692, 17430, 1669, 2415, 14032, 31344, 67066, 197, 197, 31486, 788, 257, 330, 1944, 35238, 756, 197, 197, 1, 12216, 788, 330, 1944, 8685, 756, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIncrementAfterDeleteKeyValueString(t *testing.T) { const key1 = "" const key2 = "x" m := make(map[string]string) m[key1] = "99" delete(m, key1) m[key2] += "1" if n2 := m[key2]; n2 != "1" { t.Errorf("appended '1' to empty (nil) string, got %s", n2) } }
explode_data.jsonl/19932
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 126 }
[ 2830, 3393, 38311, 6025, 6435, 72082, 703, 1155, 353, 8840, 836, 8, 341, 4777, 1376, 16, 284, 8389, 4777, 1376, 17, 284, 330, 87, 1837, 2109, 1669, 1281, 9147, 14032, 30953, 340, 2109, 8157, 16, 60, 284, 330, 24, 24, 698, 15618, 125...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestImageManifestMerge(t *testing.T) { imj := `{"name": "example.com/test"}` im := &ImageManifest{} if im.UnmarshalJSON([]byte(imj)) == nil { t.Fatal("Manifest JSON without acKind and acVersion unmarshalled successfully") } im = BlankImageManifest() err := im.UnmarshalJSON([]byte(imj)) if err != nil { t.Errorf("unexpected error: %v", err) } }
explode_data.jsonl/74172
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 143 }
[ 2830, 3393, 1906, 38495, 52096, 1155, 353, 8840, 836, 8, 341, 54892, 73, 1669, 1565, 4913, 606, 788, 330, 8687, 905, 12697, 9207, 3989, 54892, 1669, 609, 1906, 38495, 31483, 743, 732, 38097, 5370, 10556, 3782, 25107, 73, 593, 621, 2092,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPromParseErrors(t *testing.T) { cases := []struct { input string err string }{ { input: "a", err: "expected value after metric, got \"MNAME\"", }, { input: "a{b='c'} 1\n", err: "expected label value, got \"INVALID\"", }, { input: "a{b=\n", err: "expected label value, got \"INVALID\"", }, { input: "a{\xff=\"foo\"} 1\n", err: "expected label name, got \"INVALID\"", }, { input: "a{b=\"\xff\"} 1\n", err: "invalid UTF-8 label value", }, { input: "a true\n", err: "strconv.ParseFloat: parsing \"true\": invalid syntax", }, { input: "something_weird{problem=\"", err: "expected label value, got \"INVALID\"", }, { input: "empty_label_name{=\"\"} 0", err: "expected label name, got \"EQUAL\"", }, { input: "foo 1_2\n", err: "unsupported character in float", }, { input: "foo 0x1p-3\n", err: "unsupported character in float", }, { input: "foo 0x1P-3\n", err: "unsupported character in float", }, { input: "foo 0 1_2\n", err: "expected next entry after timestamp, got \"MNAME\"", }, { input: `{a="ok"} 1`, err: `"INVALID" is not a valid start token`, }, } for i, c := range cases { p := NewPromParser([]byte(c.input)) var err error for err == nil { _, err = p.Next() } require.Error(t, err) require.Equal(t, c.err, err.Error(), "test %d", i) } }
explode_data.jsonl/65245
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 688 }
[ 2830, 3393, 35186, 14463, 13877, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 22427, 914, 198, 197, 9859, 256, 914, 198, 197, 59403, 197, 197, 515, 298, 22427, 25, 330, 64, 756, 298, 9859, 25, 256, 330, 7325...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSugaredLogger(t *testing.T) { testCases := []struct { name string want *zap.SugaredLogger wantErr bool }{ { name: "should return error: not initialized", want: nil, wantErr: true, }, { name: "should return the initialized sugared logger", want: zap.NewExample().Sugar(), wantErr: false, }, } for i := range testCases { tc := testCases[i] t.Run(tc.name, func(t *testing.T) { // sugar is global logger variable in the flog package sugar = tc.want got, err := SugaredLogger() if tc.wantErr { assert.Error(t, err) assert.IsType(t, ErrNotInitialized, err) return } assert.NotNil(t, got) assert.Equal(t, tc.want, got) }) } }
explode_data.jsonl/73945
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 334 }
[ 2830, 3393, 50, 768, 1605, 7395, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 50780, 262, 353, 92371, 808, 768, 1605, 7395, 198, 197, 50780, 7747, 1807, 198, 197, 59403, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMatchEntities(t *testing.T) { entity1 := &corev3.EntityConfig{ Metadata: &corev2.ObjectMeta{ Name: "entity1", Namespace: "default", Labels: map[string]string{"proxy_type": "switch"}, }, EntityClass: "proxy", } entity2 := &corev3.EntityConfig{ Metadata: &corev2.ObjectMeta{ Name: "entity2", Namespace: "default", Labels: map[string]string{"proxy_type": "sensor"}, }, Deregister: true, EntityClass: "proxy", } entity3 := &corev3.EntityConfig{ Metadata: &corev2.ObjectMeta{ Name: "entity3", Namespace: "default", }, EntityClass: "agent", } tests := []struct { name string entityAttributes []string entities []corev3.Resource want []*corev3.EntityConfig }{ { name: "standard string attribute", entityAttributes: []string{`entity.name == "entity1"`}, entities: []corev3.Resource{entity1, entity2, entity3}, want: []*corev3.EntityConfig{entity1}, }, { name: "standard bool attribute", entityAttributes: []string{`entity.deregister == true`}, entities: []corev3.Resource{entity1, entity2, entity3}, want: []*corev3.EntityConfig{entity2}, }, { name: "nested standard attribute", entityAttributes: []string{`entity.metadata.name == "entity1"`}, entities: []corev3.Resource{entity1, entity2, entity3}, want: []*corev3.EntityConfig{entity1}, }, { name: "multiple matches", entityAttributes: []string{`entity.entity_class == "proxy"`}, entities: []corev3.Resource{entity1, entity2, entity3}, want: []*corev3.EntityConfig{entity1, entity2}, }, { name: "invalid expression", entityAttributes: []string{`foo &&`}, entities: []corev3.Resource{entity1, entity2, entity3}, }, { name: "multiple entity attributes", entityAttributes: []string{ `entity.entity_class == "proxy"`, `entity.metadata.labels.proxy_type == "sensor"`, }, entities: []corev3.Resource{entity1, entity2, entity3}, want: []*corev3.EntityConfig{entity2}, }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { p := &corev2.ProxyRequests{ EntityAttributes: tc.entityAttributes, } cacher := cachev2.NewFromResources(tc.entities, true) got := matchEntities(cacher.Get("default"), p) if len(got) != len(tc.want) { t.Errorf("Expected %d entities, got %d", len(tc.want), len(got)) return } for i := range tc.want { if !reflect.DeepEqual(got[i], tc.want[i]) { t.Errorf("MatchEntities() = %v, want %v", got, tc.want) return } } }) } }
explode_data.jsonl/69786
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1261 }
[ 2830, 3393, 8331, 15828, 1155, 353, 8840, 836, 8, 341, 52987, 16, 1669, 609, 98645, 18, 9899, 2648, 515, 197, 9209, 7603, 25, 609, 98645, 17, 80222, 515, 298, 21297, 25, 414, 330, 2996, 16, 756, 298, 90823, 25, 330, 2258, 756, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestStartStop(t *testing.T) { n := addrmgr.New("teststartstop", lookupFunc) n.Start() err := n.Stop() if err != nil { t.Fatalf("Address Manager failed to stop: %v", err) } }
explode_data.jsonl/49087
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 75 }
[ 2830, 3393, 3479, 10674, 1155, 353, 8840, 836, 8, 341, 9038, 1669, 912, 8719, 901, 7121, 445, 1944, 2468, 9495, 497, 18615, 9626, 340, 9038, 12101, 741, 9859, 1669, 308, 30213, 741, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 42...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestGracefulClose(t *testing.T) { testutils.WithTestServer(t, nil, func(t testing.TB, ts *testutils.TestServer) { ch2 := ts.NewServer(nil) hp2 := ch2.PeerInfo().HostPort defer ch2.Close() ctx, cancel := NewContext(time.Second) defer cancel() assert.NoError(t, ts.Server().Ping(ctx, hp2), "Ping from ch1 -> ch2 failed") assert.NoError(t, ch2.Ping(ctx, ts.HostPort()), "Ping from ch2 -> ch1 failed") // No stats for pings. ts.AssertRelayStats(relaytest.NewMockStats()) }) }
explode_data.jsonl/78197
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 201 }
[ 2830, 3393, 86543, 1262, 7925, 1155, 353, 8840, 836, 8, 341, 18185, 6031, 26124, 2271, 5475, 1155, 11, 2092, 11, 2915, 1155, 7497, 836, 33, 11, 10591, 353, 1944, 6031, 8787, 5475, 8, 341, 197, 23049, 17, 1669, 10591, 7121, 5475, 27907...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_DefaultIstioPerformer_Version(t *testing.T) { kubeConfig := "kubeConfig" log := logger.NewLogger(false) t.Run("should not proceed if the istio version could not be resolved", func(t *testing.T) { // given factory := &workspacemocks.Factory{} factory.On("Get", mock.AnythingOfType("string")).Return(&chart.KymaWorkspace{ResourceDir: "../test_files"}, nil) cmdResolver := TestCommanderResolver{err: errors.New("istioctl not found")} proxy := proxymocks.IstioProxyReset{} provider := clientsetmocks.Provider{} wrapper := NewDefaultIstioPerformer(cmdResolver, &proxy, &provider) // when ver, err := wrapper.Version(factory, "version", "istio-test", kubeConfig, log) // then require.Empty(t, ver) require.Error(t, err) require.Equal(t, "istioctl not found", err.Error()) }) t.Run("should not proceed if the version command output returns an empty string", func(t *testing.T) { // given cmder := istioctlmocks.Commander{} factory := &workspacemocks.Factory{} factory.On("Get", mock.AnythingOfType("string")).Return(&chart.KymaWorkspace{ResourceDir: "../test_files"}, nil) cmder.On("Version", mock.AnythingOfType("string"), mock.AnythingOfType("*zap.SugaredLogger")).Return([]byte(""), nil) cmdResolver := TestCommanderResolver{cmder: &cmder} proxy := proxymocks.IstioProxyReset{} provider := clientsetmocks.Provider{} wrapper := NewDefaultIstioPerformer(cmdResolver, &proxy, &provider) // when ver, err := wrapper.Version(factory, "version", "istio-test", kubeConfig, log) // then require.Empty(t, ver) require.Error(t, err) require.Contains(t, err.Error(), "command is empty") }) t.Run("should not proceed if the targetVersion is not found", func(t *testing.T) { // given factory := &workspacemocks.Factory{} factory.On("Get", mock.AnythingOfType("string")).Return(&chart.KymaWorkspace{}, nil) cmder := istioctlmocks.Commander{} cmder.On("Version", mock.AnythingOfType("string"), mock.AnythingOfType("*zap.SugaredLogger")).Return([]byte(""), nil) cmdResolver := TestCommanderResolver{cmder: &cmder} proxy := proxymocks.IstioProxyReset{} provider := clientsetmocks.Provider{} wrapper := NewDefaultIstioPerformer(cmdResolver, &proxy, &provider) // when ver, err := wrapper.Version(factory, "version", "istio-test", kubeConfig, log) // then require.Empty(t, ver) require.Error(t, err) require.Contains(t, err.Error(), "Target Version could not be found") }) t.Run("should get only the client version when istio is not yet installed on the cluster", func(t *testing.T) { // given factory := &workspacemocks.Factory{} factory.On("Get", mock.AnythingOfType("string")).Return(&chart.KymaWorkspace{ResourceDir: "../test_files"}, nil) cmder := istioctlmocks.Commander{} cmder.On("Version", mock.AnythingOfType("string"), mock.AnythingOfType("*zap.SugaredLogger")).Return([]byte(istioctlMockSimpleVersion), nil) cmdResolver := TestCommanderResolver{cmder: &cmder} proxy := proxymocks.IstioProxyReset{} provider := clientsetmocks.Provider{} wrapper := NewDefaultIstioPerformer(cmdResolver, &proxy, &provider) // when ver, err := wrapper.Version(factory, "version", "istio-test", kubeConfig, log) // then require.EqualValues(t, IstioStatus{ClientVersion: "1.11.2", TargetVersion: "1.2.3-solo-fips-distroless", TargetPrefix: "anything/anything"}, ver) require.NoError(t, err) cmder.AssertCalled(t, "Version", mock.AnythingOfType("string"), mock.AnythingOfType("*zap.SugaredLogger")) cmder.AssertNumberOfCalls(t, "Version", 1) }) t.Run("should get all the expected versions when istio installed on the cluster", func(t *testing.T) { // given factory := &workspacemocks.Factory{} factory.On("Get", mock.AnythingOfType("string")).Return(&chart.KymaWorkspace{ResourceDir: "../test_files"}, nil) cmder := istioctlmocks.Commander{} cmder.On("Version", mock.AnythingOfType("string"), mock.AnythingOfType("*zap.SugaredLogger")).Return([]byte(istioctlMockCompleteVersion), nil) cmdResolver := TestCommanderResolver{cmder: &cmder} proxy := proxymocks.IstioProxyReset{} provider := clientsetmocks.Provider{} wrapper := NewDefaultIstioPerformer(cmdResolver, &proxy, &provider) // when ver, err := wrapper.Version(factory, "version", "istio-test", kubeConfig, log) // then require.EqualValues(t, IstioStatus{ClientVersion: "1.11.1", TargetVersion: "1.2.3-solo-fips-distroless", TargetPrefix: "anything/anything", PilotVersion: "1.11.1", DataPlaneVersion: "1.11.1"}, ver) require.NoError(t, err) cmder.AssertCalled(t, "Version", mock.AnythingOfType("string"), mock.AnythingOfType("*zap.SugaredLogger")) cmder.AssertNumberOfCalls(t, "Version", 1) }) }
explode_data.jsonl/67284
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1730 }
[ 2830, 3393, 60336, 40, 267, 815, 3889, 34527, 85217, 1155, 353, 8840, 836, 8, 1476, 16463, 3760, 2648, 1669, 330, 97717, 2648, 698, 6725, 1669, 5925, 7121, 7395, 3576, 692, 3244, 16708, 445, 5445, 537, 10354, 421, 279, 5999, 815, 2319, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFactory_AcquirePage(t *testing.T) { defer func() { _ = fileutil.RemoveDir(testPath) mkDirFunc = fileutil.MkDirIfNotExist mapFileFunc = fileutil.RWMap }() // case 1: new factory err mkDirFunc = func(path string) error { return fmt.Errorf("err") } fct, err := NewFactory(testPath, 128) assert.Error(t, err) assert.Nil(t, fct) mkDirFunc = fileutil.MkDirIfNotExist // case 2: new factory success fct, err = NewFactory(testPath, 128) assert.NoError(t, err) assert.NotNil(t, fct) // case 3: acquire page success page1, err := fct.AcquirePage(0) assert.NoError(t, err) assert.NotNil(t, page1) p1, ok := fct.GetPage(0) assert.True(t, ok) assert.Equal(t, p1, page1) p1, ok = fct.GetPage(10) assert.False(t, ok) assert.Nil(t, p1) // get duplicate page page2, err := fct.AcquirePage(0) assert.NoError(t, err) assert.Equal(t, page1, page2) // case 4: get page err mapFileFunc = func(filePath string, size int) ([]byte, error) { return nil, fmt.Errorf("err") } page2, err = fct.AcquirePage(2) assert.Error(t, err) assert.Nil(t, page2) mapFileFunc = fileutil.RWMap assert.Equal(t, int64(128), fct.Size()) err = fct.Close() assert.NoError(t, err) // case 5: acquire page after close page2, err = fct.AcquirePage(2) assert.Equal(t, errFactoryClosed, err) assert.Nil(t, page2) // case 6: release page after close err = fct.ReleasePage(0) assert.Equal(t, errFactoryClosed, err) }
explode_data.jsonl/53071
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 615 }
[ 2830, 3393, 4153, 1566, 66, 984, 2665, 1155, 353, 8840, 836, 8, 341, 16867, 2915, 368, 341, 197, 197, 62, 284, 1034, 1314, 13270, 6184, 8623, 1820, 340, 197, 2109, 74, 6184, 9626, 284, 1034, 1314, 1321, 74, 6184, 2679, 45535, 198, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContactService_Add_WhenAddFails(t *testing.T) { // Arrange contactRepository := &mocks.ContactRepository{} errToReturn := apierror.New(http.StatusInternalServerError, "") contactRepository.On("Add", mock.Anything, mock.Anything).Return(errToReturn) sut := NewContactService(contactRepository) ctx := context.TODO() contact := &entities.Contact{} // Act response, err := sut.Add(ctx, contact) // Assert assert.Nil(t, response) assert.NotNil(t, err) assert.Equal(t, errToReturn, err) contactRepository.AssertNumberOfCalls(t, "Add", 1) contactRepository.AssertCalled(t, "Add", ctx, contact) }
explode_data.jsonl/59314
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 223 }
[ 2830, 3393, 8732, 1860, 21346, 62, 4498, 2212, 37, 6209, 1155, 353, 8840, 836, 8, 341, 197, 322, 40580, 198, 197, 6287, 4624, 1669, 609, 16712, 82, 53975, 4624, 16094, 9859, 1249, 5598, 1669, 6330, 841, 7121, 19886, 66760, 11, 14676, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOpenFlagsApproverFilter(t *testing.T) { rule := &rules.RuleDefinition{ ID: "test_rule", Expression: `open.flags & (O_SYNC | O_NOCTTY) > 0`, } test, err := newTestModule(nil, []*rules.RuleDefinition{rule}, testOpts{wantProbeEvents: true}) if err != nil { t.Fatal(err) } defer test.Close() fd1, testFile1, err := openTestFile(test, "test-ofa-1", syscall.O_CREAT|syscall.O_NOCTTY) if err != nil { t.Fatal(err) } defer syscall.Close(fd1) defer os.Remove(testFile1) if _, err := waitForOpenProbeEvent(test, testFile1); err != nil { t.Fatal(err) } fd2, testFile2, err := openTestFile(test, "test-ofa-1", syscall.O_SYNC) if err != nil { t.Fatal(err) } defer syscall.Close(fd2) if _, err := waitForOpenProbeEvent(test, testFile2); err != nil { t.Fatal(err) } fd3, testFile3, err := openTestFile(test, "test-ofa-1", syscall.O_RDONLY) if err != nil { t.Fatal(err) } defer syscall.Close(fd3) if event, err := waitForOpenProbeEvent(test, testFile3); err == nil { t.Fatalf("shouldn't get an event: %+v", event) } }
explode_data.jsonl/61817
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 480 }
[ 2830, 3393, 5002, 9195, 28588, 423, 5632, 1155, 353, 8840, 836, 8, 341, 7000, 1111, 1669, 609, 21977, 63961, 10398, 515, 197, 29580, 25, 260, 330, 1944, 21124, 756, 197, 197, 9595, 25, 1565, 2508, 27203, 609, 320, 46, 39189, 760, 506,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestSetDBConnection_Error(t *testing.T) { manager := GetManagerInstance() clearManager() manager.SetDBConnection(&database.MockDBConnection{ GetAllClustersReturnValue: []database.ClusterModel{ { Name: "one", Color: 1, }, }, GetAllClustersReturnErr: true, }) if len(manager.clusterCache) != 0 { t.Error("manager is not empty on db fail") } }
explode_data.jsonl/24672
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 1649, 3506, 4526, 28651, 1155, 353, 8840, 836, 8, 341, 92272, 1669, 2126, 2043, 2523, 741, 40408, 2043, 2822, 92272, 4202, 3506, 4526, 2099, 12216, 24664, 3506, 4526, 515, 197, 37654, 2403, 94992, 40426, 25, 3056, 12216, 72883...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEventChannel_close(t *testing.T) { data := make([]byte, 0) send := newSender(&data) eventChannel := NewEventChannel(send, 15000, 15000, 2*time.Hour) eventChannel.buffer([]byte("one")) eventChannel.buffer([]byte("two")) eventChannel.buffer([]byte("three")) eventChannel.Close() time.Sleep(10 * time.Millisecond) assert.Equal(t, string(data), "onetwothree") }
explode_data.jsonl/43920
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 136 }
[ 2830, 3393, 1556, 9629, 12704, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 1281, 10556, 3782, 11, 220, 15, 340, 32817, 1669, 501, 20381, 2099, 691, 692, 28302, 9629, 1669, 1532, 1556, 9629, 47617, 11, 220, 16, 20, 15, 15, 15, 11, 220,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBuildUpdate(t *testing.T) { testServer(t, func(c *stdsdk.Client, p *structs.MockProvider) { b1 := fxBuild b2 := structs.Build{} opts := structs.BuildUpdateOptions{ Ended: options.Time(time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC)), Logs: options.String("logs"), Manifest: options.String("manifest"), Release: options.String("release1"), Started: options.Time(time.Date(2018, 1, 1, 0, 0, 0, 0, time.UTC)), Status: options.String("status"), } ro := stdsdk.RequestOptions{ Params: stdsdk.Params{ "ended": "20180101.000000.000000000", "logs": "logs", "manifest": "manifest", "release": "release1", "started": "20180101.000000.000000000", "status": "status", }, } p.On("BuildUpdate", "app1", "build1", opts).Return(&b1, nil) err := c.Put("/apps/app1/builds/build1", ro, &b2) require.NoError(t, err) require.Equal(t, b1, b2) }) }
explode_data.jsonl/71427
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 426 }
[ 2830, 3393, 11066, 4289, 1155, 353, 8840, 836, 8, 341, 18185, 5475, 1155, 11, 2915, 1337, 353, 1834, 51295, 11716, 11, 281, 353, 1235, 82, 24664, 5179, 8, 341, 197, 2233, 16, 1669, 33219, 11066, 198, 197, 2233, 17, 1669, 62845, 25212,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNative_SkipObject(t *testing.T) { p := 0 s := `"asdf": "wqer"},` __skip_object(&s, &p, &types.StateMachine{}) assert.Equal(t, p, 15) }
explode_data.jsonl/60924
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 76 }
[ 2830, 3393, 20800, 1098, 13389, 1190, 1155, 353, 8840, 836, 8, 341, 262, 281, 1669, 220, 15, 198, 262, 274, 1669, 53305, 76615, 788, 330, 86, 80, 261, 14345, 3989, 262, 1304, 20599, 5314, 2099, 82, 11, 609, 79, 11, 609, 9242, 18942,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestUnsupportedSliceType(t *testing.T) { type config struct { WontWork []map[int]int `env:"WONTWORK"` } os.Setenv("WONTWORK", "1,2,3") defer os.Clearenv() cfg := &config{} assert.Error(t, env.Parse(cfg)) }
explode_data.jsonl/7491
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 97 }
[ 2830, 3393, 41884, 33236, 929, 1155, 353, 8840, 836, 8, 341, 13158, 2193, 2036, 341, 197, 17300, 544, 6776, 3056, 2186, 18640, 63025, 1565, 3160, 2974, 54, 10232, 18470, 8805, 197, 630, 25078, 4202, 3160, 445, 54, 10232, 18470, 497, 330...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCompositeLit3(t *testing.T) { gopClTest(t, ` type Config struct { A int } func foo(conf *Config) { } func bar(conf ...Config) { } foo({A: 1}) bar({A: 2}) `, `package main type Config struct { A int } func foo(conf *Config) { } func bar(conf ...Config) { } func main() { foo(&Config{A: 1}) bar(Config{A: 2}) } `) }
explode_data.jsonl/73630
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 41685, 68954, 18, 1155, 353, 8840, 836, 8, 341, 3174, 453, 5066, 2271, 1155, 11, 22074, 1313, 5532, 2036, 341, 22985, 526, 198, 630, 2830, 15229, 29879, 353, 2648, 8, 341, 630, 2830, 3619, 29879, 2503, 2648, 8, 341, 630, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExternalDocRef(t *testing.T) { cases := []struct { DocRef ExternalDocumentRef StringVal string }{ {ExternalDocumentRef{ID: "", URI: "", Checksums: map[string]string{}}, ""}, {ExternalDocumentRef{ID: "", URI: "http://example.com/", Checksums: map[string]string{"SHA256": "d3b53860aa08e5c7ea868629800eaf78856f6ef3bcd4a2f8c5c865b75f6837c8"}}, ""}, {ExternalDocumentRef{ID: "test-id", URI: "", Checksums: map[string]string{"SHA256": "d3b53860aa08e5c7ea868629800eaf78856f6ef3bcd4a2f8c5c865b75f6837c8"}}, ""}, {ExternalDocumentRef{ID: "test-id", URI: "http://example.com/", Checksums: map[string]string{}}, ""}, { ExternalDocumentRef{ ID: "test-id", URI: "http://example.com/", Checksums: map[string]string{"SHA256": "d3b53860aa08e5c7ea868629800eaf78856f6ef3bcd4a2f8c5c865b75f6837c8"}, }, "DocumentRef-test-id http://example.com/ SHA256: d3b53860aa08e5c7ea868629800eaf78856f6ef3bcd4a2f8c5c865b75f6837c8", }, } for _, tc := range cases { require.Equal(t, tc.StringVal, tc.DocRef.String()) } }
explode_data.jsonl/7738
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 476 }
[ 2830, 3393, 25913, 9550, 3945, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 197, 9550, 3945, 262, 30936, 7524, 3945, 198, 197, 4980, 2208, 914, 198, 197, 59403, 197, 197, 90, 25913, 7524, 3945, 90, 915, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCAConfigBCCSPSecurityEnabled(t *testing.T) { backend, err := config.FromFile(configTestFilePath)() if err != nil { t.Fatal("Failed to get config backend") } customBackend := getCustomBackend(backend...) cryptoConfig := ConfigFromBackend(customBackend).(*Config) // Test BCCSP security is enabled val, ok := customBackend.Lookup("client.BCCSP.security.enabled") if !ok || val == nil { t.Fatal("expected valid value") } if val.(bool) != cryptoConfig.IsSecurityEnabled() { t.Fatal("Incorrect BCCSP Security enabled flag") } }
explode_data.jsonl/58376
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 189 }
[ 2830, 3393, 5049, 2648, 33, 3706, 4592, 15352, 5462, 1155, 353, 8840, 836, 8, 341, 197, 20942, 11, 1848, 1669, 2193, 11439, 1703, 8754, 2271, 19090, 8, 741, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 445, 9408, 311, 633, 2193, 19163...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestTable_MultiQueries(t *testing.T) { tempDir, err := ioutil.TempDir("", "table-downloads-multi-queries") require.NoError(t, err) defer func() { require.NoError(t, os.RemoveAll(tempDir)) }() objectStoragePath := filepath.Join(tempDir, objectsStorageDirName) testDBs := map[string]testutil.DBRecords{ "db1": { Start: 0, NumRecords: 10, }, "db2": { Start: 10, NumRecords: 10, }, "db3": { Start: 20, NumRecords: 10, }, } testutil.SetupDBTablesAtPath(t, "test", objectStoragePath, testDBs, true) table, _, stopFunc := buildTestTable(t, "test", tempDir) defer func() { stopFunc() }() // build queries each looking for specific value from all the dbs var queries []chunk.IndexQuery for i := 5; i < 25; i++ { queries = append(queries, chunk.IndexQuery{ValueEqual: []byte(strconv.Itoa(i))}) } // query the loaded table to see if it has right data. testutil.TestSingleTableQuery(t, queries, table, 5, 20) }
explode_data.jsonl/81247
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 403 }
[ 2830, 3393, 2556, 1245, 7068, 55261, 1155, 353, 8840, 836, 8, 341, 16280, 6184, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 2005, 14875, 32685, 95669, 12, 42835, 1138, 17957, 35699, 1155, 11, 1848, 692, 16867, 2915, 368, 341, 197, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetAuthorizedOAuthAppsForUser(t *testing.T) { th := Setup().InitBasic() defer th.TearDown() Client := th.Client AdminClient := th.SystemAdminClient enableOAuth := th.App.Config().ServiceSettings.EnableOAuthServiceProvider defer func() { th.App.UpdateConfig(func(cfg *model.Config) { cfg.ServiceSettings.EnableOAuthServiceProvider = enableOAuth }) }() th.App.UpdateConfig(func(cfg *model.Config) { *cfg.ServiceSettings.EnableOAuthServiceProvider = true }) oapp := &model.OAuthApp{Name: GenerateTestAppName(), Homepage: "https://nowhere.com", Description: "test", CallbackUrls: []string{"https://nowhere.com"}} rapp, resp := AdminClient.CreateOAuthApp(oapp) CheckNoError(t, resp) authRequest := &model.AuthorizeRequest{ ResponseType: model.AUTHCODE_RESPONSE_TYPE, ClientId: rapp.Id, RedirectUri: rapp.CallbackUrls[0], Scope: "", State: "123", } _, resp = Client.AuthorizeOAuthApp(authRequest) CheckNoError(t, resp) apps, resp := Client.GetAuthorizedOAuthAppsForUser(th.BasicUser.Id, 0, 1000) CheckNoError(t, resp) found := false for _, a := range apps { if a.Id == rapp.Id { found = true } if a.ClientSecret != "" { t.Fatal("not sanitized") } } if !found { t.Fatal("missing app") } _, resp = Client.GetAuthorizedOAuthAppsForUser(th.BasicUser2.Id, 0, 1000) CheckForbiddenStatus(t, resp) _, resp = Client.GetAuthorizedOAuthAppsForUser("junk", 0, 1000) CheckBadRequestStatus(t, resp) Client.Logout() _, resp = Client.GetAuthorizedOAuthAppsForUser(th.BasicUser.Id, 0, 1000) CheckUnauthorizedStatus(t, resp) _, resp = AdminClient.GetAuthorizedOAuthAppsForUser(th.BasicUser.Id, 0, 1000) CheckNoError(t, resp) }
explode_data.jsonl/30131
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 644 }
[ 2830, 3393, 1949, 60454, 57850, 53602, 2461, 1474, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1005, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 741, 71724, 1669, 270, 11716, 198, 197, 7210, 2959, 1669, 270, 16620, 7210, 2959, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestChannelsService_ListUser(t *testing.T) { setup() defer teardown() mux.HandleFunc("/channels/1/users", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") testFormURLValues(t, r, values{ "page": "1", "per_page": "2", }) fmt.Fprint(w, `{"data": [{"name": "Test"}]}`) }) users, _, err := client.Channels.ListUser("1", OptPage(1), OptPerPage(2)) if err != nil { t.Errorf("Channels.ListUser returned unexpected error: %v", err) } want := []*User{{Name: "Test"}} if !reflect.DeepEqual(users, want) { t.Errorf("Channels.ListUser returned %+v, want %+v", users, want) } }
explode_data.jsonl/49786
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 267 }
[ 2830, 3393, 35925, 1860, 27104, 1474, 1155, 353, 8840, 836, 8, 341, 84571, 741, 16867, 49304, 2822, 2109, 2200, 63623, 4283, 32425, 14, 16, 19178, 497, 2915, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 18185, 3523, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMountReaddir(t *testing.T) { ts := NewTestCase(t) defer ts.Cleanup() fs := pathfs.NewPathNodeFs(pathfs.NewLoopbackFileSystem(ts.orig), nil) code := ts.connector.Mount(ts.rootNode(), "mnt", fs.Root(), nil) if !code.Ok() { t.Fatal("mount should succeed") } entries, err := ioutil.ReadDir(ts.mnt) if err != nil { t.Fatalf("ReadDir failed: %v", err) } if len(entries) != 1 || entries[0].Name() != "mnt" { t.Error("wrong readdir result", entries) } ts.pathFs.Unmount("mnt") }
explode_data.jsonl/9155
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 210 }
[ 2830, 3393, 16284, 693, 44525, 1155, 353, 8840, 836, 8, 341, 57441, 1669, 1532, 16458, 1155, 340, 16867, 10591, 727, 60639, 2822, 53584, 1669, 1815, 3848, 7121, 1820, 1955, 48300, 5581, 3848, 7121, 14620, 1419, 50720, 35864, 68837, 701, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestSliceIndex(t *testing.T) { for _, item := range []struct { haystack []string needle string want int }{ {[]string{"foo", "bar", "baz"}, "go", -1}, {[]string{"中文输入法", "输入法", "中文"}, "中文", 2}, } { got := gslice.SliceIndex(len(item.haystack), func(i int) bool { return item.haystack[i] == item.needle }) require.Equal(t, item.want, got, item.haystack) } }
explode_data.jsonl/8978
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 193 }
[ 2830, 3393, 33236, 1552, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1509, 1669, 2088, 3056, 1235, 341, 197, 9598, 352, 7693, 3056, 917, 198, 197, 197, 59519, 256, 914, 198, 197, 50780, 257, 526, 198, 197, 59403, 197, 197, 90, 1294, 9...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestResolveDIDDocFromMessage(t *testing.T) { prov := getProvider(t) mtps := []string{transport.MediaTypeDIDCommV2Profile, transport.MediaTypeRFC0019EncryptedEnvelope} for _, mtp := range mtps { t.Run(fmt.Sprintf("success with media type profile: %s", mtp), func(t *testing.T) { ctx := getContext(t, &prov, kms.ED25519Type, kms.X25519ECDHKWType, mtp) docIn := mockdiddoc.GetMockDIDDoc(t) att, err := ctx.didDocAttachment(docIn, "") require.NoError(t, err) doc, err := ctx.resolveDidDocFromMessage(docIn.ID, att) require.NoError(t, err) require.Equal(t, docIn.ID, doc.ID) }) t.Run(fmt.Sprintf("success - public resolution with media type profile: %s", mtp), func(t *testing.T) { ctx := getContext(t, &prov, kms.ED25519Type, kms.X25519ECDHKWType, mtp) docIn := mockdiddoc.GetMockDIDDoc(t) docIn.ID = "did:remote:abc" ctx.vdRegistry = &mockvdr.MockVDRegistry{ResolveValue: docIn} doc, err := ctx.resolveDidDocFromMessage(docIn.ID, nil) require.NoError(t, err) require.Equal(t, docIn.ID, doc.ID) }) t.Run(fmt.Sprintf("failure - can't do public resolution with media type profile: %s", mtp), func(t *testing.T) { ctx := getContext(t, &prov, kms.ED25519Type, kms.X25519ECDHKWType, mtp) docIn := mockdiddoc.GetMockDIDDoc(t) docIn.ID = "did:remote:abc" ctx.vdRegistry = &mockvdr.MockVDRegistry{ResolveErr: fmt.Errorf("resolve error")} _, err := ctx.resolveDidDocFromMessage(docIn.ID, nil) require.Error(t, err) require.Contains(t, err.Error(), "failed to resolve public did") }) t.Run(fmt.Sprintf("failure - can't parse did with media type profile: %s", mtp), func(t *testing.T) { ctx := getContext(t, &prov, kms.ED25519Type, kms.X25519ECDHKWType, mtp) _, err := ctx.resolveDidDocFromMessage("blah blah", nil) require.Error(t, err) require.Contains(t, err.Error(), "failed to parse did") }) t.Run(fmt.Sprintf("failure - missing attachment for private did with media type profile: %s", mtp), func(t *testing.T) { ctx := getContext(t, &prov, kms.ED25519Type, kms.X25519ECDHKWType, mtp) _, err := ctx.resolveDidDocFromMessage("did:peer:abcdefg", nil) require.Error(t, err) require.Contains(t, err.Error(), "missing did_doc~attach") }) t.Run(fmt.Sprintf("failure - bad base64 data in attachment with media type profile: %s", mtp), func(t *testing.T) { ctx := getContext(t, &prov, kms.ED25519Type, kms.X25519ECDHKWType, mtp) att := decorator.Attachment{Data: decorator.AttachmentData{Base64: "!@#$%^&*"}} _, err := ctx.resolveDidDocFromMessage("did:peer:abcdefg", &att) require.Error(t, err) require.Contains(t, err.Error(), "failed to parse base64 attachment data") }) t.Run(fmt.Sprintf("failure - attachment contains encoded broken document with media type profile: %s", mtp), func(t *testing.T) { ctx := getContext(t, &prov, kms.ED25519Type, kms.X25519ECDHKWType, mtp) att := decorator.Attachment{ Data: decorator.AttachmentData{ Base64: base64.StdEncoding.EncodeToString([]byte("abcdefg")), }, } _, err := ctx.resolveDidDocFromMessage("did:peer:abcdefg", &att) require.Error(t, err) require.Contains(t, err.Error(), "failed to parse did document") }) t.Run(fmt.Sprintf("success - interop mode with media type profile: %s", mtp), func(t *testing.T) { ctx := getContext(t, &prov, kms.ED25519Type, kms.X25519ECDHKWType, mtp) docIn := mockdiddoc.GetMockDIDDoc(t) docIn.ID = "did:sov:abcdefg" att, err := ctx.didDocAttachment(docIn, "") require.NoError(t, err) ctx.doACAPyInterop = true doc, err := ctx.resolveDidDocFromMessage(docIn.ID, att) require.NoError(t, err) require.Equal(t, docIn.ID, doc.ID) }) t.Run(fmt.Sprintf("failure - can't store document locally with media type profile: %s", mtp), func(t *testing.T) { ctx := getContext(t, &prov, kms.ED25519Type, kms.X25519ECDHKWType, mtp) ctx.vdRegistry = &mockvdr.MockVDRegistry{CreateErr: fmt.Errorf("create error")} docIn := mockdiddoc.GetMockDIDDoc(t) att, err := ctx.didDocAttachment(docIn, "") require.NoError(t, err) _, err = ctx.resolveDidDocFromMessage(docIn.ID, att) require.Error(t, err) require.Contains(t, err.Error(), "failed to store provided did document") }) } }
explode_data.jsonl/2105
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1856 }
[ 2830, 3393, 56808, 35, 915, 9550, 3830, 2052, 1155, 353, 8840, 836, 8, 341, 197, 42947, 1669, 633, 5179, 1155, 340, 2109, 96031, 1669, 3056, 917, 90, 26445, 63714, 35, 915, 17977, 53, 17, 8526, 11, 7557, 63714, 64371, 15, 15, 16, 24...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_reportStatusCouldntSave(t *testing.T) { ctx := log.NewSyncLogger(log.NewLogfmtLogger(os.Stdout)) ext := createTestVMExtension() c := cmd{nil, "Install", true, 99} ext.HandlerEnv.StatusFolder = "./yabamonster" ext.RequestedSequenceNumber = 45 err := reportStatus(ctx, ext, status.StatusSuccess, c, "msg") require.Error(t, err) }
explode_data.jsonl/18573
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 129 }
[ 2830, 3393, 14813, 2522, 12895, 406, 8784, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 1487, 7121, 12154, 7395, 12531, 7121, 2201, 12501, 7395, 9638, 83225, 1171, 95450, 1669, 1855, 2271, 11187, 12049, 741, 1444, 1669, 5439, 90, 8385, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOverlappingRCs(t *testing.T) { c := clientset.NewForConfigOrDie(&restclient.Config{Host: "", ContentConfig: restclient.ContentConfig{GroupVersion: testapi.Default.GroupVersion()}}) for i := 0; i < 5; i++ { manager := NewReplicationManager(c, controller.NoResyncPeriodFunc, 10, 0) manager.podStoreSynced = alwaysReady // Create 10 rcs, shuffled them randomly and insert them into the rc manager's store var controllers []*api.ReplicationController for j := 1; j < 10; j++ { controllerSpec := newReplicationController(1) controllerSpec.CreationTimestamp = unversioned.Date(2014, time.December, j, 0, 0, 0, 0, time.Local) controllerSpec.Name = string(util.NewUUID()) controllers = append(controllers, controllerSpec) } shuffledControllers := shuffle(controllers) for j := range shuffledControllers { manager.rcStore.Store.Add(shuffledControllers[j]) } // Add a pod and make sure only the oldest rc is synced pods := newPodList(nil, 1, api.PodPending, controllers[0], "pod") rcKey := getKey(controllers[0], t) manager.addPod(&pods.Items[0]) queueRC, _ := manager.queue.Get() if queueRC != rcKey { t.Fatalf("Expected to find key %v in queue, found %v", rcKey, queueRC) } } }
explode_data.jsonl/37497
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 436 }
[ 2830, 3393, 1918, 90619, 7380, 82, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 2943, 746, 7121, 2461, 2648, 2195, 18175, 2099, 3927, 2972, 10753, 90, 9296, 25, 7342, 8883, 2648, 25, 2732, 2972, 12614, 2648, 90, 2808, 5637, 25, 1273, 206...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestShorten(test *testing.T) { t := preflight.Unit(test) path := "captainslog/caller_test.TestShorten" // should return the most specific path possible t.Expect(caller.Shorten(path, 15)).Equals("TestShorten") t.Expect(caller.Shorten(path, 11)).Equals("TestShort..") t.Expect(caller.Shorten(path, 30)).Equals("caller_test.TestShorten") }
explode_data.jsonl/72050
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 130 }
[ 2830, 3393, 12472, 268, 8623, 353, 8840, 836, 8, 341, 3244, 1669, 855, 38390, 25159, 8623, 692, 26781, 1669, 330, 43203, 1735, 839, 2899, 13956, 4452, 8787, 12472, 268, 1837, 197, 322, 1265, 470, 279, 1429, 3151, 1815, 3204, 198, 3244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetAllCoinsInfo(t *testing.T) { t.Parallel() if !areTestAPIKeysSet() && !mockTests { t.Skip("API keys not set") } _, err := b.GetAllCoinsInfo(context.Background()) if err != nil { t.Error(err) } }
explode_data.jsonl/76674
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 93 }
[ 2830, 3393, 1949, 2403, 69602, 1731, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 743, 753, 546, 2271, 7082, 8850, 1649, 368, 1009, 753, 16712, 18200, 341, 197, 3244, 57776, 445, 7082, 6894, 537, 738, 1138, 197, 532, 197, 687...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestMove(t *testing.T) { tc := setup(t, "move", false) defer tc.cleanup() fmt.Printf("Test: Shards really move ...\n") tc.join(0) ck := tc.clerk() // insert one key per shard for i := 0; i < shardmaster.NShards; i++ { ck.Put(string('0'+i), string('0'+i)) } // add group 1. tc.join(1) time.Sleep(5 * time.Second) // check that keys are still there. for i := 0; i < shardmaster.NShards; i++ { if ck.Get(string('0'+i)) != string('0'+i) { t.Fatalf("missing key/value") } } // remove sockets from group 0. for _, port := range tc.groups[0].ports { os.Remove(port) } count := int32(0) var mu sync.Mutex for i := 0; i < shardmaster.NShards; i++ { go func(me int) { myck := tc.clerk() v := myck.Get(string('0' + me)) if v == string('0'+me) { mu.Lock() atomic.AddInt32(&count, 1) mu.Unlock() } else { t.Fatalf("Get(%v) yielded %v\n", me, v) } }(i) } time.Sleep(10 * time.Second) ccc := atomic.LoadInt32(&count) if ccc > shardmaster.NShards/3 && ccc < 2*(shardmaster.NShards/3) { fmt.Printf(" ... Passed\n") } else { t.Fatalf("%v keys worked after killing 1/2 of groups; wanted %v", ccc, shardmaster.NShards/2) } }
explode_data.jsonl/18806
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 557 }
[ 2830, 3393, 9860, 1155, 353, 8840, 836, 8, 341, 78255, 1669, 6505, 1155, 11, 330, 3397, 497, 895, 340, 16867, 17130, 87689, 2822, 11009, 19367, 445, 2271, 25, 1417, 2347, 2167, 3271, 98760, 77, 5130, 78255, 5446, 7, 15, 692, 197, 377,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCancelPending_IgnoreEvent(t *testing.T) { ignore := []string{ core.EventCron, core.EventCustom, core.EventPromote, core.EventRollback, core.EventTag, } for _, event := range ignore { s := new(service) err := s.CancelPending(noContext, nil, &core.Build{Event: event}) if err != nil { t.Errorf("Expect cancel skipped for event type %s", event) } } }
explode_data.jsonl/31278
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 9269, 32027, 7959, 10289, 1556, 1155, 353, 8840, 836, 8, 341, 197, 13130, 1669, 3056, 917, 515, 197, 71882, 6904, 34, 2248, 345, 197, 71882, 6904, 10268, 345, 197, 71882, 6904, 35186, 1272, 345, 197, 71882, 6904, 32355, 1419...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAuthorization_EgressGateway(t *testing.T) { framework.NewTest(t). Run(func(ctx framework.TestContext) { ns := namespace.NewOrFail(t, ctx, namespace.Config{ Prefix: "v1beta1-egress-gateway", Inject: true, }) var a, b, c echo.Instance echoboot.NewBuilder(ctx). With(&a, util.EchoConfig("a", ns, false, nil)). With(&b, echo.Config{ Service: "b", Namespace: ns, Subsets: []echo.SubsetConfig{{}}, Ports: []echo.Port{ { Name: "http", Protocol: protocol.HTTP, ServicePort: 8090, }, }, }). With(&c, util.EchoConfig("c", ns, false, nil)). BuildOrFail(t) args := map[string]string{ "Namespace": ns.Name(), "RootNamespace": rootNamespace, } policies := tmpl.EvaluateAllOrFail(t, args, file.AsStringOrFail(t, "testdata/authz/v1beta1-egress-gateway.yaml.tmpl")) ctx.Config().ApplyYAMLOrFail(t, "", policies...) defer ctx.Config().DeleteYAMLOrFail(t, "", policies...) cases := []struct { name string path string code string body string host string from echo.Workload token string }{ { name: "allow path to company.com", path: "/allow", code: response.StatusCodeOK, body: "handled-by-egress-gateway", host: "www.company.com", from: getWorkload(a, t), }, { name: "deny path to company.com", path: "/deny", code: response.StatusCodeForbidden, body: "RBAC: access denied", host: "www.company.com", from: getWorkload(a, t), }, { name: "allow service account a to a-only.com over mTLS", path: "/", code: response.StatusCodeOK, body: "handled-by-egress-gateway", host: "a-only.com", from: getWorkload(a, t), }, { name: "deny service account c to a-only.com over mTLS", path: "/", code: response.StatusCodeForbidden, body: "RBAC: access denied", host: "a-only.com", from: getWorkload(c, t), }, { name: "allow a with JWT to jwt-only.com over mTLS", path: "/", code: response.StatusCodeOK, body: "handled-by-egress-gateway", host: "jwt-only.com", from: getWorkload(a, t), token: jwt.TokenIssuer1, }, { name: "allow c with JWT to jwt-only.com over mTLS", path: "/", code: response.StatusCodeOK, body: "handled-by-egress-gateway", host: "jwt-only.com", from: getWorkload(c, t), token: jwt.TokenIssuer1, }, { name: "deny c with wrong JWT to jwt-only.com over mTLS", path: "/", code: response.StatusCodeForbidden, body: "RBAC: access denied", host: "jwt-only.com", from: getWorkload(c, t), token: jwt.TokenIssuer2, }, { name: "allow service account a with JWT to jwt-and-a-only.com over mTLS", path: "/", code: response.StatusCodeOK, body: "handled-by-egress-gateway", host: "jwt-and-a-only.com", from: getWorkload(a, t), token: jwt.TokenIssuer1, }, { name: "deny service account c with JWT to jwt-and-a-only.com over mTLS", path: "/", code: response.StatusCodeForbidden, body: "RBAC: access denied", host: "jwt-and-a-only.com", from: getWorkload(c, t), token: jwt.TokenIssuer1, }, { name: "deny service account a with wrong JWT to jwt-and-a-only.com over mTLS", path: "/", code: response.StatusCodeForbidden, body: "RBAC: access denied", host: "jwt-and-a-only.com", from: getWorkload(a, t), token: jwt.TokenIssuer2, }, } for _, tc := range cases { request := &epb.ForwardEchoRequest{ // Use a fake IP to make sure the request is handled by our test. Url: fmt.Sprintf("http://10.4.4.4%s", tc.path), Count: 1, Headers: []*epb.Header{ { Key: "Host", Value: tc.host, }, }, } if tc.token != "" { request.Headers = append(request.Headers, &epb.Header{ Key: "Authorization", Value: "Bearer " + tc.token, }) } t.Run(tc.name, func(t *testing.T) { retry.UntilSuccessOrFail(t, func() error { responses, err := tc.from.ForwardEcho(context.TODO(), request) if err != nil { return err } if len(responses) < 1 { return fmt.Errorf("received no responses from request to %s", tc.path) } if tc.code != responses[0].Code { return fmt.Errorf("want status %s but got %s", tc.code, responses[0].Code) } if !strings.Contains(responses[0].Body, tc.body) { return fmt.Errorf("want %q in body but not found: %s", tc.body, responses[0].Body) } return nil }, retry.Delay(250*time.Millisecond), retry.Timeout(30*time.Second)) }) } }) }
explode_data.jsonl/41497
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2407 }
[ 2830, 3393, 18124, 2089, 2483, 40709, 1155, 353, 8840, 836, 8, 341, 1166, 5794, 7121, 2271, 1155, 4292, 197, 85952, 18552, 7502, 12626, 8787, 1972, 8, 341, 298, 84041, 1669, 4473, 7121, 46059, 1155, 11, 5635, 11, 4473, 10753, 515, 571, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestDecodeMissingCRC(t *testing.T) { block, rest := Decode(clearsignInput3) if block == nil { t.Fatal("failed to decode PGP signature missing a CRC") } if len(rest) > 0 { t.Fatalf("Decode should not have any remaining data left: %s", rest) } if _, err := packet.Read(block.ArmoredSignature.Body); err != nil { t.Error(err) } if _, err := packet.Read(block.ArmoredSignature.Body); err != io.EOF { t.Error(err) } }
explode_data.jsonl/47773
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 171 }
[ 2830, 3393, 32564, 25080, 83339, 1155, 353, 8840, 836, 8, 341, 47996, 11, 2732, 1669, 50194, 1337, 1547, 7752, 2505, 18, 340, 743, 2504, 621, 2092, 341, 197, 3244, 26133, 445, 16091, 311, 16895, 393, 24430, 11957, 7402, 264, 29395, 1138...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGenerateTopicNameWithDot(t *testing.T) { expected := "knative-eventing-channel.channel-namespace.channel-name" actual := TopicName(".", "channel-namespace", "channel-name") if expected != actual { t.Errorf("Expected '%s'. Actual '%s'", expected, actual) } }
explode_data.jsonl/15506
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 94 }
[ 2830, 3393, 31115, 26406, 675, 2354, 34207, 1155, 353, 8840, 836, 8, 341, 42400, 1669, 330, 19095, 1388, 39687, 287, 53868, 16195, 12, 2231, 16195, 11494, 698, 88814, 1669, 32911, 675, 64217, 330, 10119, 12, 2231, 497, 330, 10119, 11494, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestParseInterfaceName(t *testing.T) { ifi := InterfaceInfo{Interface: &net.Interface{}} for i, tt := range []struct { b []byte error }{ {[]byte{0, 'e', 'n', '0'}, errInvalidExtension}, {[]byte{4, 'e', 'n', '0'}, nil}, {[]byte{7, 'e', 'n', '0', 0xff, 0xff, 0xff, 0xff}, errInvalidExtension}, {[]byte{8, 'e', 'n', '0', 0xff, 0xff, 0xff}, errMessageTooShort}, } { if _, err := ifi.parseName(tt.b); err != tt.error { t.Errorf("#%d: got %v; want %v", i, err, tt.error) } } }
explode_data.jsonl/30586
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 234 }
[ 2830, 3393, 14463, 5051, 675, 1155, 353, 8840, 836, 8, 341, 743, 72, 1669, 20019, 1731, 90, 5051, 25, 609, 4711, 41065, 6257, 532, 2023, 600, 11, 17853, 1669, 2088, 3056, 1235, 341, 197, 2233, 3056, 3782, 198, 197, 18290, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFileRepositoryChild(t *testing.T) { f := &mobileFileRepo{} repository.Register("file", f) p, _ := storage.Child(storage.NewFileURI("/foo/bar"), "baz") assert.Equal(t, "file:///foo/bar/baz", p.String()) p, _ = storage.Child(storage.NewFileURI("/foo/bar/"), "baz") assert.Equal(t, "file:///foo/bar/baz", p.String()) uri, _ := storage.ParseURI("content://thing") p, err := storage.Child(uri, "new") assert.NotNil(t, err) assert.Nil(t, p) }
explode_data.jsonl/47722
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 192 }
[ 2830, 3393, 1703, 4624, 3652, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 609, 14933, 1703, 25243, 16094, 17200, 3099, 19983, 445, 1192, 497, 282, 692, 3223, 11, 716, 1669, 5819, 28506, 52463, 7121, 1703, 10301, 4283, 7975, 49513, 3975, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCrudSegments(t *testing.T) { var res middleware.Responder db := entity.NewTestDB() c := &crud{} defer db.Close() defer gostub.StubFunc(&getDB, db).Reset() c.CreateFlag(flag.CreateFlagParams{ Body: &models.CreateFlagRequest{ Description: util.StringPtr("funny flag"), }, }) // step 1. it should be able to create segment res = c.CreateSegment(segment.CreateSegmentParams{ FlagID: int64(1), Body: &models.CreateSegmentRequest{ Description: util.StringPtr("segment1"), RolloutPercent: util.Int64Ptr(int64(100)), }, }) assert.NotZero(t, res.(*segment.CreateSegmentOK).Payload) res = c.CreateSegment(segment.CreateSegmentParams{ FlagID: int64(1), Body: &models.CreateSegmentRequest{ Description: util.StringPtr("segment2"), RolloutPercent: util.Int64Ptr(int64(100)), }, }) assert.NotZero(t, res.(*segment.CreateSegmentOK).Payload) // step 2. it should be able to find the segments res = c.FindSegments(segment.FindSegmentsParams{FlagID: int64(1)}) assert.NotZero(t, len(res.(*segment.FindSegmentsOK).Payload)) // step 3. it should be able to put the segment res = c.PutSegment(segment.PutSegmentParams{ FlagID: int64(1), SegmentID: int64(1), Body: &models.PutSegmentRequest{ Description: util.StringPtr("segment1"), RolloutPercent: util.Int64Ptr(int64(0)), }, }) assert.NotZero(t, res.(*segment.PutSegmentOK).Payload.ID) // step 4. it should be able to reorder the segments res = c.PutSegmentsReorder(segment.PutSegmentsReorderParams{ FlagID: int64(1), Body: &models.PutSegmentReorderRequest{ SegmentIDs: []int64{int64(2), int64(1)}, }, }) assert.NotZero(t, res.(*segment.PutSegmentsReorderOK)) // step 5. it should have the correct order of segments res = c.FindSegments(segment.FindSegmentsParams{FlagID: int64(1)}) assert.Equal(t, int64(2), res.(*segment.FindSegmentsOK).Payload[0].ID) // step 6. it should be able to delete the segment res = c.DeleteSegment(segment.DeleteSegmentParams{ FlagID: int64(1), SegmentID: int64(2), }) assert.NotZero(t, res.(*segment.DeleteSegmentOK)) }
explode_data.jsonl/19451
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 834 }
[ 2830, 3393, 92061, 64813, 1155, 353, 8840, 836, 8, 341, 2405, 592, 29679, 8377, 20328, 198, 20939, 1669, 5387, 7121, 2271, 3506, 741, 1444, 1669, 609, 53569, 31483, 16867, 2927, 10421, 741, 16867, 67934, 392, 7758, 392, 9626, 2099, 455, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNoFiles(t *testing.T) { tests := []testRun{{ Name: "file", URL: "file.txt", Status: http.StatusNotFound, Expected: "Not Found\n", }, { Name: "dir", URL: "dir/", Status: http.StatusNotFound, Expected: "Not Found\n", }} opt := newTestOpt() opt.Serve = true opt.Files = "" testServer(t, tests, &opt) }
explode_data.jsonl/12967
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 167 }
[ 2830, 3393, 2753, 10809, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1944, 6727, 90, 515, 197, 21297, 25, 257, 330, 1192, 756, 197, 79055, 25, 414, 330, 1192, 3909, 756, 197, 58321, 25, 256, 1758, 10538, 10372, 345, 197, 197, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewRoute(t *testing.T) { route := &Route{ id: uuid.New().String(), service: "helloworld", port: "8080", deployment: "helloworld", namespace: "helloworld", domains: []string{"helloworld.io"}, } routeWithDefaultPort := *route routeWithDefaultPort.port = "80" testCases := []struct { id string svc string port string deploy string ns string domains []string want *Route errWanted bool }{ { // route ok route.id, route.service, route.port, route.deployment, route.namespace, route.domains, route, false, }, { // service missing route.id, "", route.port, route.deployment, route.namespace, route.domains, route, true, }, { // deployment name missing route.id, route.service, route.port, "", route.namespace, route.domains, route, true, }, { // namespace missing route.id, route.service, route.port, route.deployment, "", route.domains, route, true, }, { // domains nil route.id, route.service, route.port, route.deployment, route.namespace, nil, route, true, }, { // domains empty route.id, route.service, route.port, route.deployment, route.namespace, []string{}, route, true, }, { // port empty -> must default to "80" route.id, route.service, "", route.deployment, route.namespace, route.domains, &routeWithDefaultPort, false, }, } for _, tc := range testCases { got, errGot := NewRoute(tc.id, tc.svc, tc.port, tc.deploy, tc.ns, tc.domains, true, nil, nil) if tc.errWanted != (errGot != nil) { t.Errorf("CreateRoute(tc %s) = %v, errWanted = %t", tc.id, errGot, tc.errWanted) } if errGot == nil && !got.isEqual(tc.want) { t.Errorf("CreateRoute(tc %s) - got and want does not match", tc.id) } } }
explode_data.jsonl/67778
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 927 }
[ 2830, 3393, 3564, 4899, 1155, 353, 8840, 836, 8, 341, 7000, 2133, 1669, 609, 4899, 515, 197, 15710, 25, 260, 16040, 7121, 1005, 703, 3148, 197, 52934, 25, 262, 330, 71, 95292, 756, 197, 52257, 25, 981, 330, 23, 15, 23, 15, 756, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestList(t *testing.T) { storage, server := newStorage(t) defer server.Terminate(t) defer storage.CustomResource.Store.DestroyFunc() test := registrytest.New(t, storage.CustomResource.Store) test.TestList(validNewCustomResource()) }
explode_data.jsonl/71526
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 80 }
[ 2830, 3393, 852, 1155, 353, 8840, 836, 8, 341, 197, 16172, 11, 3538, 1669, 501, 5793, 1155, 340, 16867, 3538, 836, 261, 34016, 1155, 340, 16867, 5819, 27649, 4783, 38047, 57011, 9626, 741, 18185, 1669, 19424, 1944, 7121, 1155, 11, 5819,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestParsePosition3(t *testing.T) { var err error var o *owlfunctional.Ontology // parser.TokenLog = true // Data with unknown prefix in line 144 (counting from 1) // and col 109, provided the leading tab counts as 1 column o, err = OntologyFromReader(strings.NewReader(` Prefix(:=<urn:absolute:similix.de/similixadmin#>) Prefix(hello:=<urn:absolute:similix.de/similixadmin#>) Prefix(xsd:=<http://www.w3.org/2001/XMLSchema#>) Ontology(<urn:absolute:test.de> Declaration(Class(hello:FishbonePizza)) Declaration(Class(:AmericanHotPizza)) Declaration(Class(:AmericanaPizza)) Declaration(Class(:AnchovyTopping)) Declaration(Class(:CaloriePizza)) Declaration(Class(:CaperTopping)) Declaration(Class(:Cheese)) Declaration(Class(:CheesePizza)) Declaration(Class(:CheeseTopping)) Declaration(Class(:CheesyPizza)) Declaration(Class(:DeepPanBase)) Declaration(Class(:Food)) Declaration(Class(:GreenPepperTopping)) Declaration(Class(:HamTopping)) Declaration(Class(:HighCaloriePizza)) Declaration(Class(:Hot)) Declaration(Class(:InterestingPizza)) Declaration(Class(:JalapenoPepperTopping)) Declaration(Class(:LowCaloriePizza)) Declaration(Class(:MargheritaPizza)) Declaration(Class(:MeatTopping)) Declaration(Class(:Medium)) Declaration(Class(:Mild)) Declaration(Class(:Mozzarella)) Declaration(Class(:MozzarellaTopping)) Declaration(Class(:MushroomTopping)) Declaration(Class(:NamedPizza)) Declaration(Class(:NonVegetarianPizza)) Declaration(Class(:OliveTopping)) Declaration(Class(:OnionTopping)) Declaration(Class(:Parmesan)) Declaration(Class(:ParmesanTopping)) Declaration(Class(:PepperTopping)) Declaration(Class(:PepperoniTopping)) Declaration(Class(:Pizza)) Declaration(Class(:PizzaBase)) Declaration(Class(:PrawnTopping)) Declaration(Class(:RedPepperTopping)) Declaration(Class(:SalamiTopping)) Declaration(Class(:SeafoodTopping)) Declaration(Class(:SohoPizza)) Declaration(Class(:SpicyBeefTopping)) Declaration(Class(:SpicyPizza)) Declaration(Class(:SpicynessValuePartition)) Declaration(Class(:ThinAndCrispyBase)) Declaration(Class(:TomatoTopping)) Declaration(Class(:Topping)) Declaration(Class(:TunaTopping)) Declaration(Class(:VegetableTopping)) Declaration(Class(:VegetarianPizza)) Declaration(ObjectProperty(:hasBase)) Declaration(ObjectProperty(:hasIngredient)) Declaration(ObjectProperty(:hasSpicyness)) Declaration(ObjectProperty(:hasTopping)) Declaration(ObjectProperty(:isBaseOf)) Declaration(ObjectProperty(:isIngredientOf)) Declaration(ObjectProperty(:isToppingOf)) Declaration(DataProperty(:hasCaloricContentValue)) Declaration(NamedIndividual(:MyKäseEiPizza)) Declaration(NamedIndividual(:MyMargherita)) Declaration(NamedIndividual(:MyQuattroFormaggio)) Declaration(NamedIndividual(:MySauerkrautpizza)) ############################ # Object Properties ############################ # Object Property: :hasBase (:hasBase) SubObjectPropertyOf(:hasBase :hasIngredient) InverseObjectProperties(:hasBase :isBaseOf) FunctionalObjectProperty(:hasBase) ObjectPropertyDomain(:hasBase :Pizza) ObjectPropertyRange(:hasBase :PizzaBase) # Object Property: :hasIngredient (:hasIngredient) InverseObjectProperties(:hasIngredient :isIngredientOf) TransitiveObjectProperty(:hasIngredient) # Object Property: :hasSpicyness (:hasSpicyness) FunctionalObjectProperty(:hasSpicyness) ObjectPropertyRange(:hasSpicyness :SpicynessValuePartition) # Object Property: :hasTopping (:hasTopping) SubObjectPropertyOf(:hasTopping :hasIngredient) InverseObjectProperties(:hasTopping :isToppingOf) ObjectPropertyDomain(:hasTopping :Pizza) ObjectPropertyRange(:hasTopping :Topping) # Object Property: :isBaseOf (:isBaseOf) IrreflexiveObjectProperty(:isBaseOf) # Object Property: :isToppingOf (:isToppingOf) IrreflexiveObjectProperty(:isToppingOf) ############################ # Data Properties ############################ # Data Property: :hasCaloricContentValue (:hasCaloricContentValue) FunctionalDataProperty(:hasCaloricContentValue) DataPropertyDomain(:hasCaloricContentValue :Food) DataPropertyRange(:hasCaloricContentValue xsd:integer) ############################ # Classes ############################ # Class: hello:FishbonePizza (hello:FishbonePizza) SubClassOf(hello:FishbonePizza :NamedPizza) SubClassOf(hello:FishbonePizza ObjectSomeValuesFrom(:hasTopping :TomatoTopping)) # Class: :AmericanHotPizza (:AmericanHotPizza) SubClassOf(:AmericanHotPizza :CheesyPizza) SubClassOf(:AmericanHotPizza :NamedPizza) SubClassOf(:AmericanHotPizza ObjectSomeValuesFrom(:hasTopping :JalapenoPepperTopping)) SubClassOf(:AmericanHotPizza ObjectSomeValuesFrom(:hasTopping :ParmesanTopping)) SubClassOf(:AmericanHotPizza ObjectSomeValuesFrom(:hasTopping :PepperoniTopping)) SubClassOf(:AmericanHotPizza ObjectSomeValuesFrom(:hasTopping :TomatoTopping)) # Class: :AmericanaPizza (:AmericanaPizza) SubClassOf(:AmericanaPizza :CheesyPizza) SubClassOf(:AmericanaPizza :NamedPizza) SubClassOf(:AmericanaPizza ObjectSomeValuesFrom(:hasTopping :ParmesanTopping)) SubClassOf(:AmericanaPizza ObjectSomeValuesFrom(:hasTopping :PepperoniTopping)) SubClassOf(:AmericanaPizza ObjectSomeValuesFrom(:hasTopping :TomatoTopping)) SubClassOf(:AmericanaPizza ObjectAllValuesFrom(:hasTopping ObjectUnionOf(:ParmesanTopping :PepperoniTopping wrongPrefix:TomatoTopping))) `), "Testsource") fmt.Println(err, o.About()) if err == nil { t.Fatal() } pos := err.(*parser.PErr).AfterPos if pos.LineNo1() != 145 { t.Fatal(pos, err) } if pos.GetCurrentLineHead() != ` SubClassOf(:AmericanaPizza ObjectAllValuesFrom(:hasTopping ObjectUnionOf(:ParmesanTopping :PepperoniTopping ` { t.Fatal("linehead=" + pos.GetCurrentLineHead() + "<<") } if pos.ColNo1() != 110 { // count tab as 1 col t.Fatal(pos) } }
explode_data.jsonl/44456
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2098 }
[ 2830, 3393, 14463, 3812, 18, 1155, 353, 8840, 836, 8, 341, 2405, 1848, 1465, 198, 2405, 297, 353, 9605, 49228, 8382, 406, 2449, 271, 197, 322, 6729, 32277, 2201, 284, 830, 271, 197, 322, 2885, 448, 9788, 9252, 304, 1555, 220, 16, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestValidateSidecarDependsOn(t *testing.T) { mockSidecarName := "sidecar" mockWorkloadName := "frontend" testCases := map[string]struct { inSidecar *manifest.SidecarConfig allSidecars map[string]*manifest.SidecarConfig wantErr error }{ "no sidecar dependencies": { inSidecar: &manifest.SidecarConfig{}, wantErr: nil, }, "working set essential sidecar with container dependency": { inSidecar: &manifest.SidecarConfig{ DependsOn: map[string]string{ "sidecar1": "START", }, }, allSidecars: map[string]*manifest.SidecarConfig{ "sidecar1": { Essential: aws.Bool(true), }, }, wantErr: nil, }, "working implied essential container with container dependency": { inSidecar: &manifest.SidecarConfig{ DependsOn: map[string]string{ "frontend": "START", }, }, allSidecars: map[string]*manifest.SidecarConfig{ "sidecar": {}, }, wantErr: nil, }, "working non-essential sidecar with container dependency": { inSidecar: &manifest.SidecarConfig{ DependsOn: map[string]string{ "sidecar2": "COMPLETE", }, }, allSidecars: map[string]*manifest.SidecarConfig{ "sidecar": {}, "sidecar2": { Essential: aws.Bool(false), }, }, wantErr: nil, }, "error when sidecar container dependency status is invalid": { inSidecar: &manifest.SidecarConfig{ DependsOn: map[string]string{ "sidecar2": "END", }, }, allSidecars: map[string]*manifest.SidecarConfig{ "sidecar": {}, "sidecar2": { Essential: aws.Bool(false), }, }, wantErr: errInvalidSidecarDependsOnStatus, }, "error when container dependency status is invalid": { inSidecar: &manifest.SidecarConfig{ DependsOn: map[string]string{ "frontend": "END", }, }, allSidecars: map[string]*manifest.SidecarConfig{ "sidecar": {}, }, wantErr: errInvalidDependsOnStatus, }, "error when set essential sidecar has a status besides start": { inSidecar: &manifest.SidecarConfig{ DependsOn: map[string]string{ "sidecar2": "COMPLETE", }, }, allSidecars: map[string]*manifest.SidecarConfig{ "sidecar": {}, "sidecar2": { Essential: aws.Bool(true), }, }, wantErr: errEssentialSidecarStatus, }, "error when implied essential sidecar has a status besides start": { inSidecar: &manifest.SidecarConfig{ DependsOn: map[string]string{ "sidecar2": "COMPLETE", }, }, allSidecars: map[string]*manifest.SidecarConfig{ "sidecar": {}, "sidecar2": {}, }, wantErr: errEssentialSidecarStatus, }, "error when essential container dependency status is invalid": { inSidecar: &manifest.SidecarConfig{ DependsOn: map[string]string{ "frontend": "COMPLETE", }, }, allSidecars: map[string]*manifest.SidecarConfig{ "sidecar": {}, }, wantErr: errEssentialContainerStatus, }, } for name, tc := range testCases { t.Run(name, func(t *testing.T) { s := convertSidecarOpts{ sidecarConfig: tc.allSidecars, imageConfig: &manifest.Image{}, workloadName: mockWorkloadName, } gotErr := validateSidecarDependsOn(*tc.inSidecar, mockSidecarName, s) if tc.wantErr == nil { require.NoError(t, gotErr) } else { require.EqualError(t, gotErr, tc.wantErr.Error()) } }) } }
explode_data.jsonl/65187
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1471 }
[ 2830, 3393, 17926, 16384, 6918, 7839, 1412, 1925, 1155, 353, 8840, 836, 8, 341, 77333, 16384, 6918, 675, 1669, 330, 2929, 6918, 698, 77333, 6776, 1078, 675, 1669, 330, 28181, 698, 18185, 37302, 1669, 2415, 14032, 60, 1235, 341, 197, 174...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFirst(t *testing.T) { assert := require.New(t) var input interface{} assert.Nil(First(nil)) assert.Nil(First(input)) input = []int{} assert.Nil(First(input)) input = []int{1, 3, 5} assert.Equal(1, First(input)) }
explode_data.jsonl/45557
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 97 }
[ 2830, 3393, 5338, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 1373, 7121, 1155, 340, 2405, 1946, 3749, 31483, 6948, 59678, 7, 5338, 27907, 1171, 6948, 59678, 7, 5338, 5384, 4390, 22427, 284, 3056, 396, 16094, 6948, 59678, 7, 5338, 5384, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAPITimeMarshalIsReciprocal(t *testing.T) { var when = repo.NewAPITime(time.Now()) subjectBytes, err := json.Marshal(&when) if err != nil { t.Fatal(err) } var actual repo.APITime if err := json.Unmarshal(subjectBytes, &actual); err != nil { t.Fatal(err) } if !when.Equal(actual.Time) { t.Errorf("expected (%s) to equal (%s), but did not", actual, when) } }
explode_data.jsonl/19476
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 157 }
[ 2830, 3393, 2537, 952, 545, 55438, 3872, 3820, 48789, 5416, 1155, 353, 8840, 836, 8, 341, 2405, 979, 284, 15867, 7121, 2537, 952, 545, 9730, 13244, 2398, 28624, 583, 7078, 11, 1848, 1669, 2951, 37271, 2099, 9309, 340, 743, 1848, 961, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetResourceID(t *testing.T) { testData := []struct { Environment azure.Environment Expected string }{ { Environment: azure.ChinaCloud, Expected: "https://account1.blob.core.chinacloudapi.cn/container1", }, { Environment: azure.GermanCloud, Expected: "https://account1.blob.core.cloudapi.de/container1", }, { Environment: azure.PublicCloud, Expected: "https://account1.blob.core.windows.net/container1", }, { Environment: azure.USGovernmentCloud, Expected: "https://account1.blob.core.usgovcloudapi.net/container1", }, } for _, v := range testData { t.Logf("[DEBUG] Testing Environment %q", v.Environment.Name) c := NewWithEnvironment(v.Environment) actual := c.GetResourceID("account1", "container1") if actual != v.Expected { t.Fatalf("Expected the Resource ID to be %q but got %q", v.Expected, actual) } } }
explode_data.jsonl/25311
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 358 }
[ 2830, 3393, 1949, 4783, 915, 1155, 353, 8840, 836, 8, 341, 18185, 1043, 1669, 3056, 1235, 341, 197, 197, 12723, 76530, 45651, 198, 197, 197, 18896, 262, 914, 198, 197, 59403, 197, 197, 515, 298, 197, 12723, 25, 76530, 6353, 2210, 1605...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCreditNoteListLines(t *testing.T) { i := ListLines(&stripe.CreditNoteLineItemListParams{ ID: stripe.String("cn_123"), }) // Verify that we can get at least one invoice assert.True(t, i.Next()) assert.Nil(t, i.Err()) assert.NotNil(t, i.CreditNoteLineItem()) assert.NotNil(t, i.CreditNoteLineItemList()) }
explode_data.jsonl/29989
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 125 }
[ 2830, 3393, 33493, 9112, 852, 16794, 1155, 353, 8840, 836, 8, 341, 8230, 1669, 1759, 16794, 2099, 61233, 727, 10827, 9112, 2460, 82874, 4870, 515, 197, 29580, 25, 45542, 6431, 445, 14271, 62, 16, 17, 18, 4461, 197, 8824, 197, 322, 254...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPointApproxEqual(t *testing.T) { tests := []struct { x1, y1, z1 float64 x2, y2, z2 float64 want bool }{ {1, 0, 0, 1, 0, 0, true}, {1, 0, 0, 0, 1, 0, false}, {1, 0, 0, 0, 1, 1, false}, {1, 0, 0, -1, 0, 0, false}, {1, 2, 3, 2, 3, -1, false}, {1, 0, 0, 1 * (1 + epsilon), 0, 0, true}, {1, 0, 0, 1 * (1 - epsilon), 0, 0, true}, {1, 0, 0, 1 + epsilon, 0, 0, true}, {1, 0, 0, 1 - epsilon, 0, 0, true}, {1, 0, 0, 1, epsilon, 0, true}, {1, 0, 0, 1, epsilon, epsilon, false}, {1, epsilon, 0, 1, -epsilon, epsilon, false}, } for _, test := range tests { p1 := Point{r3.Vector{test.x1, test.y1, test.z1}} p2 := Point{r3.Vector{test.x2, test.y2, test.z2}} if got := p1.ApproxEqual(p2); got != test.want { t.Errorf("%v.ApproxEqual(%v), got %v want %v", p1, p2, got, test.want) } } }
explode_data.jsonl/52183
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 455 }
[ 2830, 3393, 2609, 69520, 2993, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 10225, 16, 11, 379, 16, 11, 1147, 16, 2224, 21, 19, 198, 197, 10225, 17, 11, 379, 17, 11, 1147, 17, 2224, 21, 19, 198, 197, 50780, 9...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFullLegendWithPublicIP(t *testing.T) { ms := machine.MachineState{ ID: "595989bb-cbb7-49ce-8726-722d6e157b4e", PublicIP: "5.6.7.8", Metadata: map[string]string{"foo": "bar"}, } l := machineFullLegend(ms, false) if l != "595989bb.../5.6.7.8" { t.Errorf("Expected partial machine ID with public IP, but it was %s\n", l) } l = machineFullLegend(ms, true) if l != "595989bb-cbb7-49ce-8726-722d6e157b4e/5.6.7.8" { t.Errorf("Expected full machine ID with public IP, but it was %s\n", l) } }
explode_data.jsonl/19864
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 243 }
[ 2830, 3393, 9432, 39675, 2354, 12676, 3298, 1155, 353, 8840, 836, 8, 341, 47691, 1669, 5662, 1321, 3814, 1397, 515, 197, 29580, 25, 981, 330, 20, 24, 20, 24, 23, 24, 6066, 1786, 6066, 22, 12, 19, 24, 346, 12, 23, 22, 17, 21, 12,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTopDownQueryCancellation(t *testing.T) { ast.RegisterBuiltin(&ast.Builtin{ Name: "test.sleep", Decl: types.NewFunction( types.Args(types.S), nil, ), }) RegisterFunctionalBuiltinVoid1("test.sleep", func(a ast.Value) error { d, _ := time.ParseDuration(string(a.(ast.String))) time.Sleep(d) return nil }) ctx := context.Background() compiler := compileModules([]string{ ` package test p { data.arr[_] = _; test.sleep("1ms") } `, }) data := map[string]interface{}{ "arr": make([]interface{}, 1000), } store := inmem.NewFromObject(data) txn := storage.NewTransactionOrDie(ctx, store) cancel := NewCancel() query := NewQuery(ast.MustParseBody("data.test.p")). WithCompiler(compiler). WithStore(store). WithTransaction(txn). WithCancel(cancel) go func() { time.Sleep(time.Millisecond * 50) cancel.Cancel() }() qrs, err := query.Run(ctx) if err == nil || err.(*Error).Code != CancelErr { t.Fatalf("Expected cancel error but got: %v (err: %v)", qrs, err) } }
explode_data.jsonl/25229
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 417 }
[ 2830, 3393, 5366, 4454, 2859, 82298, 1155, 353, 8840, 836, 8, 341, 88836, 19983, 33, 25628, 2099, 559, 1785, 25628, 515, 197, 21297, 25, 330, 1944, 11118, 756, 197, 197, 21629, 25, 4494, 7121, 5152, 1006, 298, 98785, 51015, 52613, 808, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExtraPKNotNullFlag(t *testing.T) { sql := "select count(*) from t3" s := createPlannerSuite() ctx := context.Background() comment := fmt.Sprintf("for %s", sql) stmt, err := s.p.ParseOneStmt(sql, "", "") require.NoError(t, err, comment) p, _, err := BuildLogicalPlanForTest(ctx, s.ctx, stmt, s.is) require.NoError(t, err, comment) ds := p.(*LogicalProjection).children[0].(*LogicalAggregation).children[0].(*DataSource) require.Equal(t, "_tidb_rowid", ds.Columns[2].Name.L) require.Equal(t, mysql.PriKeyFlag|mysql.NotNullFlag, ds.Columns[2].Flag) require.Equal(t, mysql.PriKeyFlag|mysql.NotNullFlag, ds.schema.Columns[2].RetType.Flag) }
explode_data.jsonl/50207
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 269 }
[ 2830, 3393, 11612, 22242, 11005, 12135, 1155, 353, 8840, 836, 8, 341, 30633, 1669, 330, 1742, 1760, 28671, 504, 259, 18, 698, 1903, 1669, 1855, 2120, 4887, 28000, 741, 20985, 1669, 2266, 19047, 741, 96268, 1669, 8879, 17305, 445, 1958, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFields(t *testing.T) { type Student struct { ID int64 `db:"id"` Name string `db:"name"` Age sql.NullInt64 `db:"age"` Score sql.NullFloat64 `db:"score"` CreateTime time.Time `db:"create_time"` UpdateTime sql.NullTime `db:"update_time"` } var ( studentFieldNames = builderx.RawFieldNames(&Student{}) studentRows = strings.Join(studentFieldNames, ",") studentRowsExpectAutoSet = strings.Join(stringx.Remove(studentFieldNames, "`id`", "`create_time`", "`update_time`"), ",") studentRowsWithPlaceHolder = strings.Join(stringx.Remove(studentFieldNames, "`id`", "`create_time`", "`update_time`"), "=?,") + "=?" ) assert.Equal(t, []string{"`id`", "`name`", "`age`", "`score`", "`create_time`", "`update_time`"}, studentFieldNames) assert.Equal(t, "`id`,`name`,`age`,`score`,`create_time`,`update_time`", studentRows) assert.Equal(t, "`name`,`age`,`score`", studentRowsExpectAutoSet) assert.Equal(t, "`name`=?,`age`=?,`score`=?", studentRowsWithPlaceHolder) }
explode_data.jsonl/6517
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 478 }
[ 2830, 3393, 8941, 1155, 353, 8840, 836, 8, 341, 13158, 11726, 2036, 341, 197, 29580, 260, 526, 21, 19, 1843, 1565, 1999, 2974, 307, 8805, 197, 21297, 981, 914, 688, 1565, 1999, 2974, 606, 8805, 197, 197, 16749, 286, 5704, 23979, 1072,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestComputeChunkEndTime(t *testing.T) { cases := []struct { start, cur, max int64 res int64 }{ { start: 0, cur: 250, max: 1000, res: 1000, }, { start: 100, cur: 200, max: 1000, res: 550, }, // Case where we fit floored 0 chunks. Must catch division by 0 // and default to maximum time. { start: 0, cur: 500, max: 1000, res: 1000, }, // Catch division by zero for cur == start. Strictly not a possible case. { start: 100, cur: 100, max: 1000, res: 104, }, } for _, c := range cases { got := computeChunkEndTime(c.start, c.cur, c.max) if got != c.res { t.Errorf("expected %d for (start: %d, cur: %d, max: %d), got %d", c.res, c.start, c.cur, c.max, got) } } }
explode_data.jsonl/38166
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 393 }
[ 2830, 3393, 46254, 28304, 55833, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 21375, 11, 2847, 11, 1932, 526, 21, 19, 198, 197, 10202, 1797, 526, 21, 19, 198, 197, 59403, 197, 197, 515, 298, 21375, 25, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSetupNSTimeout(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() ecscniClient := NewClient(&Config{}) libcniClient := mock_libcni.NewMockCNI(ctrl) ecscniClient.(*cniClient).libcni = libcniClient gomock.InOrder( // ENI plugin was called first libcniClient.EXPECT().AddNetwork(gomock.Any(), gomock.Any(), gomock.Any()).Return(&current.Result{}, errors.New("timeout")).Do( func(ctx context.Context, net *libcni.NetworkConfig, rt *libcni.RuntimeConf) { }).MaxTimes(1), libcniClient.EXPECT().AddNetwork(gomock.Any(), gomock.Any(), gomock.Any()).Return(&current.Result{}, nil).MaxTimes(1), libcniClient.EXPECT().AddNetwork(gomock.Any(), gomock.Any(), gomock.Any()).Return(&current.Result{}, nil).MaxTimes(1), ) _, err := ecscniClient.SetupNS(context.TODO(), &Config{}, time.Millisecond) assert.Error(t, err) }
explode_data.jsonl/30396
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 336 }
[ 2830, 3393, 21821, 50479, 545, 411, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 197, 757, 2388, 7751, 2959, 1669, 1532, 2959, 2099, 2648, 37790, 197, 55576, 7751, 295...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFlowEncodeJSON(t *testing.T) { assert.EqualValues(t, "", gjson.Get(jsonx.TestMarshalJSONString(t, &settings.Flow{RequestURL: "https://foo.bar?foo=bar"}), "return_to").String()) assert.EqualValues(t, "/bar", gjson.Get(jsonx.TestMarshalJSONString(t, &settings.Flow{RequestURL: "https://foo.bar?return_to=/bar"}), "return_to").String()) assert.EqualValues(t, "/bar", gjson.Get(jsonx.TestMarshalJSONString(t, settings.Flow{RequestURL: "https://foo.bar?return_to=/bar"}), "return_to").String()) }
explode_data.jsonl/72741
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 186 }
[ 2830, 3393, 18878, 32535, 5370, 1155, 353, 8840, 836, 8, 341, 6948, 12808, 6227, 1155, 11, 7342, 342, 2236, 2234, 9304, 87, 8787, 55438, 5370, 703, 1155, 11, 609, 6511, 60732, 90, 1900, 3144, 25, 330, 2428, 1110, 7975, 22001, 30, 7975...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewBuilder(t *testing.T) { t.Run("with-range-query", func(t *testing.T) { testBuilderWithRangeQuery = NewBuilder( WithMetric("test_metric"), WithLabelMatchers( &LabelMatcher{ Label: "test_label1", Operator: Equal, Value: "test_value1", }, &LabelMatcher{ Label: "test_label2", Operator: Equal, Value: "test_value2", }), WithRange(Seconds, 5)) assert.NotNil(t, testBuilderWithRangeQuery) assert.Equal(t, "test_metric", testBuilderWithRangeQuery.metric) assert.Len(t, testBuilderWithRangeQuery.labelMatchers, 2) assert.ObjectsAreEqualValues(&LabelMatcher{ Label: "test_label1", Operator: Equal, Value: "test_value1", }, testBuilderWithRangeQuery.labelMatchers[0]) assert.ObjectsAreEqualValues(&LabelMatcher{ Label: "test_label2", Operator: Equal, Value: "test_value2", }, testBuilderWithRangeQuery.labelMatchers[1]) assert.Equal(t, Seconds, testBuilderWithRangeQuery.timeUnit) assert.Equal(t, uint(5), testBuilderWithRangeQuery.timeDuration) }) t.Run("without-range-query", func(t *testing.T) { testBuilderWithoutRangeQuery = NewBuilder( WithMetric("test_metric"), WithLabelMatchers( &LabelMatcher{ Label: "test_label1", Operator: Equal, Value: "test_value1", }, &LabelMatcher{ Label: "test_label2", Operator: Equal, Value: "test_value2", }), WithRange(None, 0)) assert.NotNil(t, testBuilderWithoutRangeQuery) assert.Equal(t, "test_metric", testBuilderWithoutRangeQuery.metric) assert.Len(t, testBuilderWithoutRangeQuery.labelMatchers, 2) assert.ObjectsAreEqualValues(&LabelMatcher{ Label: "test_label1", Operator: Equal, Value: "test_value1", }, testBuilderWithoutRangeQuery.labelMatchers[0]) assert.ObjectsAreEqualValues(&LabelMatcher{ Label: "test_label2", Operator: Equal, Value: "test_value2", }, testBuilderWithoutRangeQuery.labelMatchers[1]) assert.Equal(t, None, testBuilderWithoutRangeQuery.timeUnit) assert.Equal(t, uint(0), testBuilderWithoutRangeQuery.timeDuration) }) }
explode_data.jsonl/20478
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 897 }
[ 2830, 3393, 3564, 3297, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 4197, 30508, 65489, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 18185, 3297, 2354, 6046, 2859, 284, 1532, 3297, 1006, 298, 197, 2354, 54310, 445, 1944, 41294, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInsertShardedSimple(t *testing.T) { invschema := &vschemapb.SrvVSchema{ Keyspaces: map[string]*vschemapb.Keyspace{ "sharded": { Sharded: true, Vindexes: map[string]*vschemapb.Vindex{ "hash": { Type: "hash", }, }, Tables: map[string]*vschemapb.Table{ "t1": { ColumnVindexes: []*vschemapb.ColumnVindex{{ Name: "hash", Columns: []string{"id"}, }}, }, }, }, }, } vs, err := vindexes.BuildVSchema(invschema) if err != nil { t.Fatal(err) } ks := vs.Keyspaces["sharded"] // A single row insert should be autocommitted ins := NewInsert( InsertSharded, ks.Keyspace, []sqltypes.PlanValue{{ // colVindex columns: id Values: []sqltypes.PlanValue{{ // 3 rows. Values: []sqltypes.PlanValue{{ Value: sqltypes.NewInt64(1), }}, }}, }}, ks.Tables["t1"], "prefix", []string{" mid1"}, " suffix", ) vc := &loggingVCursor{ shards: []string{"-20", "20-"}, shardForKsid: []string{"20-", "-20", "20-"}, } _, err = ins.Execute(vc, map[string]*querypb.BindVariable{}, false) if err != nil { t.Fatal(err) } vc.ExpectLog(t, []string{ // Based on shardForKsid, values returned will be 20-. `ResolveDestinations sharded [value:"0" ] Destinations:DestinationKeyspaceID(166b40b44aba4bd6)`, // Row 2 will go to -20, rows 1 & 3 will go to 20- `ExecuteMultiShard ` + `sharded.20-: prefix mid1 suffix /* vtgate:: keyspace_id:166b40b44aba4bd6 */ {_id0: type:INT64 value:"1" } ` + `true true`, }) // Multiple rows are not autocommitted by default ins = NewInsert( InsertSharded, ks.Keyspace, []sqltypes.PlanValue{{ // colVindex columns: id Values: []sqltypes.PlanValue{{ // 3 rows. Values: []sqltypes.PlanValue{{ Value: sqltypes.NewInt64(1), }, { Value: sqltypes.NewInt64(2), }, { Value: sqltypes.NewInt64(3), }}, }}, }}, ks.Tables["t1"], "prefix", []string{" mid1", " mid2", " mid3"}, " suffix", ) vc = &loggingVCursor{ shards: []string{"-20", "20-"}, shardForKsid: []string{"20-", "-20", "20-"}, } _, err = ins.Execute(vc, map[string]*querypb.BindVariable{}, false) if err != nil { t.Fatal(err) } vc.ExpectLog(t, []string{ // Based on shardForKsid, values returned will be 20-, -20, 20-. `ResolveDestinations sharded [value:"0" value:"1" value:"2" ] Destinations:DestinationKeyspaceID(166b40b44aba4bd6),DestinationKeyspaceID(06e7ea22ce92708f),DestinationKeyspaceID(4eb190c9a2fa169c)`, // Row 2 will go to -20, rows 1 & 3 will go to 20- `ExecuteMultiShard ` + `sharded.20-: prefix mid1, mid3 suffix /* vtgate:: keyspace_id:166b40b44aba4bd6,4eb190c9a2fa169c */ {_id0: type:INT64 value:"1" _id1: type:INT64 value:"2" _id2: type:INT64 value:"3" } ` + `sharded.-20: prefix mid2 suffix /* vtgate:: keyspace_id:06e7ea22ce92708f */ {_id0: type:INT64 value:"1" _id1: type:INT64 value:"2" _id2: type:INT64 value:"3" } ` + `true false`, }) // Optional flag overrides autocommit ins = NewInsert( InsertSharded, ks.Keyspace, []sqltypes.PlanValue{{ // colVindex columns: id Values: []sqltypes.PlanValue{{ // 3 rows. Values: []sqltypes.PlanValue{{ Value: sqltypes.NewInt64(1), }, { Value: sqltypes.NewInt64(2), }, { Value: sqltypes.NewInt64(3), }}, }}, }}, ks.Tables["t1"], "prefix", []string{" mid1", " mid2", " mid3"}, " suffix", ) ins.MultiShardAutocommit = true vc = &loggingVCursor{ shards: []string{"-20", "20-"}, shardForKsid: []string{"20-", "-20", "20-"}, } _, err = ins.Execute(vc, map[string]*querypb.BindVariable{}, false) if err != nil { t.Fatal(err) } vc.ExpectLog(t, []string{ // Based on shardForKsid, values returned will be 20-, -20, 20-. `ResolveDestinations sharded [value:"0" value:"1" value:"2" ] Destinations:DestinationKeyspaceID(166b40b44aba4bd6),DestinationKeyspaceID(06e7ea22ce92708f),DestinationKeyspaceID(4eb190c9a2fa169c)`, // Row 2 will go to -20, rows 1 & 3 will go to 20- `ExecuteMultiShard ` + `sharded.20-: prefix mid1, mid3 suffix /* vtgate:: keyspace_id:166b40b44aba4bd6,4eb190c9a2fa169c */ {_id0: type:INT64 value:"1" _id1: type:INT64 value:"2" _id2: type:INT64 value:"3" } ` + `sharded.-20: prefix mid2 suffix /* vtgate:: keyspace_id:06e7ea22ce92708f */ {_id0: type:INT64 value:"1" _id1: type:INT64 value:"2" _id2: type:INT64 value:"3" } ` + `true true`, }) }
explode_data.jsonl/55986
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2034 }
[ 2830, 3393, 13780, 2016, 20958, 16374, 1155, 353, 8840, 836, 8, 341, 17430, 11562, 3416, 1669, 609, 11562, 2407, 391, 65, 808, 10553, 53, 8632, 515, 197, 197, 8850, 27338, 25, 2415, 14032, 8465, 11562, 2407, 391, 65, 37863, 1306, 515, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestThrottling_SamplingPriority(t *testing.T) { tracer, closer := NewTracer("DOOP", NewConstSampler(true), NewNullReporter()) sp1 := tracer.StartSpan("s1", opentracing.Tags{string(ext.SamplingPriority): 0}).(*Span) assert.False(t, sp1.context.IsDebug()) sp1 = tracer.StartSpan("s1", opentracing.Tags{string(ext.SamplingPriority): uint16(1)}).(*Span) assert.True(t, sp1.context.IsDebug()) assert.NotNil(t, findDomainTag(sp1, "sampling.priority"), "sampling.priority tag should be added") closer.Close() tracer, closer = NewTracer("DOOP", NewConstSampler(true), NewNullReporter(), TracerOptions.DebugThrottler(testThrottler{allowAll: false})) defer closer.Close() sp1 = tracer.StartSpan("s1", opentracing.Tags{string(ext.SamplingPriority): uint16(1)}).(*Span) ext.SamplingPriority.Set(sp1, 1) assert.False(t, sp1.context.IsDebug(), "debug should not be allowed by the throttler") }
explode_data.jsonl/44655
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 341 }
[ 2830, 3393, 1001, 46689, 2718, 1098, 29709, 20555, 1155, 353, 8840, 836, 8, 341, 25583, 9584, 11, 12128, 1669, 1532, 1282, 9584, 445, 5865, 3067, 497, 1532, 19167, 66048, 3715, 701, 1532, 3280, 52766, 12367, 41378, 16, 1669, 64306, 12101,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewTxFromBytes(t *testing.T) { t.Parallel() t.Run("valid tx", func(t *testing.T) { rawTx := "02000000011ccba787d421b98904da3329b2c7336f368b62e89bc896019b5eadaa28145b9c0000000049483045022100c4df63202a9aa2bea5c24ebf4418d145e81712072ef744a4b108174f1ef59218022006eb54cf904707b51625f521f8ed2226f7d34b62492ebe4ddcb1c639caf16c3c41ffffffff0140420f00000000001976a91418392a59fc1f76ad6a3c7ffcea20cfcb17bda9eb88ac00000000" b, err := hex.DecodeString(rawTx) assert.NoError(t, err) var tx *bt.Tx tx, err = bt.NewTxFromBytes(b) assert.NoError(t, err) assert.NotNil(t, tx) }) t.Run("invalid tx, too short", func(t *testing.T) { rawTx := "000000" b, err := hex.DecodeString(rawTx) assert.NoError(t, err) var tx *bt.Tx tx, err = bt.NewTxFromBytes(b) assert.Error(t, err) assert.Nil(t, tx) }) }
explode_data.jsonl/28736
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 407 }
[ 2830, 3393, 3564, 31584, 3830, 7078, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 3244, 16708, 445, 1891, 9854, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 76559, 31584, 1669, 330, 15, 17, 15, 15, 15, 15, 15, 15, 15, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHandler(t *testing.T) { valOpAddrs, valConsPks, valConsAddrs := keeper.GetTestAddrs() ctx, ak, _, k, sk, _, supplyKeeper := keeper.CreateTestInputAdvanced(t, false, 1000) dh := NewHandler(k) // create one validator sh := staking.NewHandler(sk) skMsg := staking.NewMsgCreateValidator(valOpAddrs[0], valConsPks[0], staking.Description{}, keeper.NewTestDecCoin(1, 0)) require.True(t, sh(ctx, skMsg).IsOK()) //send 1okt fee feeCollector := supplyKeeper.GetModuleAccount(ctx, k.GetFeeCollectorName()) err := feeCollector.SetCoins(keeper.NewTestDecCoins(1, 0)) require.NoError(t, err) ak.SetAccount(ctx, feeCollector) // crate votes info and allocate tokens abciVal := abci.Validator{Address: valConsPks[0].Address(), Power: 1} votes := []abci.VoteInfo{{Validator: abciVal, SignedLastBlock: true}} k.AllocateTokens(ctx, 100, valConsAddrs[0], votes) //send withdraw-commission msgWithdrawValCommission msgWithdrawValCommission := types.NewMsgWithdrawValidatorCommission(valOpAddrs[0]) require.True(t, dh(ctx, msgWithdrawValCommission).IsOK()) require.False(t, dh(ctx, msgWithdrawValCommission).IsOK()) //send set-withdraw-address msgSetWithdrawAddress msgSetWithdrawAddress := types.NewMsgSetWithdrawAddress(keeper.TestAddrs[0], keeper.TestAddrs[1]) require.True(t, dh(ctx, msgSetWithdrawAddress).IsOK()) k.SetWithdrawAddrEnabled(ctx, false) require.False(t, dh(ctx, msgSetWithdrawAddress).IsOK()) msgSetWithdrawAddress = types.NewMsgSetWithdrawAddress(keeper.TestAddrs[0], supplyKeeper.GetModuleAddress(ModuleName)) require.False(t, dh(ctx, msgSetWithdrawAddress).IsOK()) //send unknown msgWithdrawValCommission fakeMsg := NewMsgFake() require.False(t, dh(ctx, fakeMsg).IsOK()) }
explode_data.jsonl/35267
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 606 }
[ 2830, 3393, 3050, 1155, 353, 8840, 836, 8, 341, 19302, 7125, 2212, 5428, 11, 1044, 15220, 47, 2787, 11, 1044, 15220, 2212, 5428, 1669, 53416, 2234, 2271, 2212, 5428, 741, 20985, 11, 17324, 11, 8358, 595, 11, 1901, 11, 8358, 8149, 7723...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAdd(t *testing.T) { p := New() p.Add("foo", "bar") p.Add("foo", "rab") p.Add("foo", "foo", "bar") assert.Equal(t, []string{"bar", "rab", "foo", "bar"}, p.GetAll("foo")) }
explode_data.jsonl/1462
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 85 }
[ 2830, 3393, 2212, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 1532, 741, 3223, 1904, 445, 7975, 497, 330, 2257, 1138, 3223, 1904, 445, 7975, 497, 330, 50105, 1138, 3223, 1904, 445, 7975, 497, 330, 7975, 497, 330, 2257, 1138, 6948, 12808...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLog(t *testing.T) { const name = "mylog" const supports = eventlog.Error | eventlog.Warning | eventlog.Info err := eventlog.InstallAsEventCreate(name, supports) if err != nil { t.Fatalf("Install failed: %s", err) } l, err := eventlog.Open(name) if err != nil { t.Fatalf("Open failed: %s", err) } defer l.Close() err = l.Info(1, "info") if err != nil { t.Fatalf("Info failed: %s", err) } err = l.Warning(2, "warning") if err != nil { t.Fatalf("Warning failed: %s", err) } err = l.Error(3, "error") if err != nil { t.Fatalf("Error failed: %s", err) } err = eventlog.Remove(name) if err != nil { t.Fatalf("Remove failed: %s", err) } }
explode_data.jsonl/74034
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 284 }
[ 2830, 3393, 2201, 1155, 353, 8840, 836, 8, 341, 4777, 829, 284, 330, 2408, 839, 698, 4777, 11554, 284, 1538, 839, 6141, 760, 1538, 839, 51763, 760, 1538, 839, 20132, 198, 9859, 1669, 1538, 839, 71207, 541, 2121, 1556, 4021, 3153, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestReplaceStr(t *testing.T) { v := &Value{data: []string{string("hello"), string("hello"), string("hello"), string("hello"), string("hello"), string("hello")}} rawArr := v.MustStrSlice() replaced := v.ReplaceStr(func(index int, val string) string { if index < len(rawArr)-1 { return rawArr[index+1] } return rawArr[0] }) replacedArr := replaced.MustStrSlice() if assert.Equal(t, 6, len(replacedArr)) { assert.Equal(t, replacedArr[0], rawArr[1]) assert.Equal(t, replacedArr[1], rawArr[2]) assert.Equal(t, replacedArr[2], rawArr[3]) assert.Equal(t, replacedArr[3], rawArr[4]) assert.Equal(t, replacedArr[4], rawArr[5]) assert.Equal(t, replacedArr[5], rawArr[0]) } }
explode_data.jsonl/23414
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 300 }
[ 2830, 3393, 23107, 2580, 1155, 353, 8840, 836, 8, 1476, 5195, 1669, 609, 1130, 90, 691, 25, 3056, 917, 90, 917, 445, 14990, 3975, 914, 445, 14990, 3975, 914, 445, 14990, 3975, 914, 445, 14990, 3975, 914, 445, 14990, 3975, 914, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestBuilder_Build(t *testing.T) { _ = orc8rplugin.RegisterPluginForTests(t, &pluginimpl.BaseOrchestratorPlugin{}) _ = orc8rplugin.RegisterPluginForTests(t, &plugin.LteOrchestratorPlugin{}) builder := &plugin.Builder{} nw := configurator.Network{ ID: "n1", Configs: map[string]interface{}{ lte.CellularNetworkType: models2.NewDefaultTDDNetworkConfig(), orc8r.DnsdNetworkType: &models.NetworkDNSConfig{ EnableCaching: swag.Bool(true), }, }, } gw := configurator.NetworkEntity{ Type: orc8r.MagmadGatewayType, Key: "gw1", Associations: []storage.TypeAndKey{ {Type: lte.CellularGatewayType, Key: "gw1"}, }, } lteGW := configurator.NetworkEntity{ Type: lte.CellularGatewayType, Key: "gw1", Config: newDefaultGatewayConfig(), Associations: []storage.TypeAndKey{ {Type: lte.CellularEnodebType, Key: "enb1"}, }, ParentAssociations: []storage.TypeAndKey{gw.GetTypeAndKey()}, } enb := configurator.NetworkEntity{ Type: lte.CellularEnodebType, Key: "enb1", Config: newDefaultEnodebConfig(), ParentAssociations: []storage.TypeAndKey{lteGW.GetTypeAndKey()}, } rating1 := configurator.NetworkEntity{ Type: lte.RatingGroupEntityType, Key: "1", Config: &models2.RatingGroup{ ID: models2.RatingGroupID(uint32(1)), LimitType: swag.String("INFINITE_UNMETERED"), }, } rating2 := configurator.NetworkEntity{ Type: lte.RatingGroupEntityType, Key: "2", Config: &models2.RatingGroup{ ID: models2.RatingGroupID(uint32(2)), LimitType: swag.String("INFINITE_METERED"), }, } graph := configurator.EntityGraph{ Entities: []configurator.NetworkEntity{enb, lteGW, gw, rating1, rating2}, Edges: []configurator.GraphEdge{ {From: gw.GetTypeAndKey(), To: lteGW.GetTypeAndKey()}, {From: lteGW.GetTypeAndKey(), To: enb.GetTypeAndKey()}, }, } actual := map[string]proto.Message{} expected := map[string]proto.Message{ "enodebd": &mconfig.EnodebD{ LogLevel: protos.LogLevel_INFO, Pci: 260, TddConfig: &mconfig.EnodebD_TDDConfig{ Earfcndl: 44590, SubframeAssignment: 2, SpecialSubframePattern: 7, }, BandwidthMhz: 20, AllowEnodebTransmit: true, Tac: 1, PlmnidList: "00101", CsfbRat: mconfig.EnodebD_CSFBRAT_2G, Arfcn_2G: nil, EnbConfigsBySerial: map[string]*mconfig.EnodebD_EnodebConfig{ "enb1": { Earfcndl: 39150, SubframeAssignment: 2, SpecialSubframePattern: 7, Pci: 260, TransmitEnabled: true, DeviceClass: "Baicells ID TDD/FDD", BandwidthMhz: 20, Tac: 15000, CellId: 138777000, }, }, }, "mobilityd": &mconfig.MobilityD{ LogLevel: protos.LogLevel_INFO, IpBlock: "192.168.128.0/24", }, "mme": &mconfig.MME{ LogLevel: protos.LogLevel_INFO, Mcc: "001", Mnc: "01", Tac: 1, MmeCode: 1, MmeGid: 1, NonEpsServiceControl: mconfig.MME_NON_EPS_SERVICE_CONTROL_OFF, CsfbMcc: "001", CsfbMnc: "01", Lac: 1, RelayEnabled: false, CloudSubscriberdbEnabled: false, EnableDnsCaching: true, AttachedEnodebTacs: []int32{15000}, }, "pipelined": &mconfig.PipelineD{ LogLevel: protos.LogLevel_INFO, UeIpBlock: "192.168.128.0/24", NatEnabled: true, DefaultRuleId: "", Services: []mconfig.PipelineD_NetworkServices{ mconfig.PipelineD_ENFORCEMENT, }, }, "subscriberdb": &mconfig.SubscriberDB{ LogLevel: protos.LogLevel_INFO, LteAuthOp: []byte("\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11\x11"), LteAuthAmf: []byte("\x80\x00"), SubProfiles: nil, RelayEnabled: false, }, "policydb": &mconfig.PolicyDB{ LogLevel: protos.LogLevel_INFO, InfiniteMeteredChargingKeys: []uint32{uint32(2)}, InfiniteUnmeteredChargingKeys: []uint32{uint32(1)}, }, "sessiond": &mconfig.SessionD{ LogLevel: protos.LogLevel_INFO, RelayEnabled: false, WalletExhaustDetection: &mconfig.WalletExhaustDetection{ TerminateOnExhaust: false, }, }, } // Happy path err := builder.Build("n1", "gw1", graph, nw, actual) assert.NoError(t, err) assert.Equal(t, expected, actual) // Do break with non-allowed network service setEPCNetworkServices([]string{"0xdeadbeef"}, &nw) err = builder.Build("n1", "gw1", graph, nw, actual) assert.EqualError(t, err, "unknown network service name 0xdeadbeef") // Don't break with deprecated network services setEPCNetworkServices([]string{"metering"}, &nw) expected["pipelined"] = &mconfig.PipelineD{ LogLevel: protos.LogLevel_INFO, UeIpBlock: "192.168.128.0/24", NatEnabled: true, DefaultRuleId: "", Services: []mconfig.PipelineD_NetworkServices{ mconfig.PipelineD_METERING, }, } err = builder.Build("n1", "gw1", graph, nw, actual) assert.NoError(t, err) assert.Equal(t, expected, actual) }
explode_data.jsonl/66783
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2595 }
[ 2830, 3393, 3297, 96686, 1155, 353, 8840, 836, 8, 341, 197, 62, 284, 75691, 23, 81, 9138, 19983, 11546, 2461, 18200, 1155, 11, 609, 9138, 6383, 13018, 2195, 331, 15111, 850, 11546, 37790, 197, 62, 284, 75691, 23, 81, 9138, 19983, 1154...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClientReplaceAllProjectParameters(t *testing.T) { client, err := NewRealTestClient(t) require.NoError(t, err, "Expected no error") err = client.DeleteProject("TestClientReplaceAllProjectParameters") require.NoError(t, err, "Expected no error") err = client.CreateProject(&types.Project{ Name: "TestClientReplaceAllProjectParameters", }) require.NoError(t, err, "Expected no error") err = client.ReplaceAllProjectParameters("TestClientReplaceAllProjectParameters", &types.Parameters{ "env.MUH": types.Parameter{ Value: "Hello", Spec: &types.ParameterSpec{ Type: types.PasswordType{}, }, }, }) require.NoError(t, err, "Expected no error") parameters := types.Parameters{ "env.HELLO": types.Parameter{"Good job", nil}, "aws.hush": types.Parameter{ Value: "Bad Job", Spec: &types.ParameterSpec{ Type: types.PasswordType{}, }, }, } err = client.ReplaceAllProjectParameters("TestClientReplaceAllProjectParameters", &parameters) require.NoError(t, err, "Expected no error") require.NotNil(t, parameters, "Update to return parameters") expected := types.Parameters{ "env.HELLO": types.Parameter{"Good job", nil}, "aws.hush": types.Parameter{ Value: "", Spec: &types.ParameterSpec{ Type: types.PasswordType{}, }, }, } assert.Equal(t, expected, parameters) config, err := client.GetProject("TestClientReplaceAllProjectParameters") require.NoError(t, err, "Expected no error") require.NotNil(t, config, "Get to return config") assert.Equal(t, expected, config.Parameters) }
explode_data.jsonl/5822
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 544 }
[ 2830, 3393, 2959, 23107, 2403, 7849, 9706, 1155, 353, 8840, 836, 8, 341, 25291, 11, 1848, 1669, 1532, 12768, 2271, 2959, 1155, 340, 17957, 35699, 1155, 11, 1848, 11, 330, 18896, 902, 1465, 1138, 9859, 284, 2943, 18872, 7849, 445, 2271, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCAFileMustExist(t *testing.T) { paramUri := fmt.Sprintf("localhost?user=test&pass=yep&caCert=foo") _, err := newHawkularSource(paramUri) if err == nil { t.Errorf("Expected error from newHawkularSource") } }
explode_data.jsonl/55850
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 5049, 1703, 31776, 25613, 1155, 353, 8840, 836, 8, 341, 36037, 13899, 1669, 8879, 17305, 445, 8301, 30, 872, 53538, 5, 6385, 29368, 747, 5, 924, 36934, 28, 7975, 1138, 197, 6878, 1848, 1669, 501, 39, 23397, 1276, 3608, 974...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRulePatternMatchingUsage(t *testing.T) { common.Log.Debug("Entering function: %s", common.GetFunctionName()) sqls := []string{ `select c1,c2,c3,c4 from tab1 where col_id REGEXP '[[:<:]]12[[:>:]]';`, } for _, sql := range sqls { q, err := NewQuery4Audit(sql) if err == nil { rule := q.RulePatternMatchingUsage() if rule.Item != "ARG.007" { t.Error("Rule not match:", rule.Item, "Expect : ARG.007") } } else { t.Error("sqlparser.Parse Error:", err) } } common.Log.Debug("Exiting function: %s", common.GetFunctionName()) }
explode_data.jsonl/76791
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 235 }
[ 2830, 3393, 11337, 15760, 64430, 14783, 1155, 353, 8840, 836, 8, 341, 83825, 5247, 20345, 445, 82867, 729, 25, 1018, 82, 497, 4185, 2234, 5152, 675, 2398, 30633, 82, 1669, 3056, 917, 515, 197, 197, 63, 1742, 272, 16, 10109, 17, 10109,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRegisterChargenEventFilter(t *testing.T) { sensor := newUnitTestSensor(t) defer sensor.Stop() // Invalid length should fail s := newTestSubscription(t, sensor) s.RegisterChargenEventFilter(0, nil) verifyRegisterChargenEventFilter(t, s, -1) s = newTestSubscription(t, sensor) s.RegisterChargenEventFilter(1<<16+1, nil) verifyRegisterChargenEventFilter(t, s, -1) // This should succeed s = newTestSubscription(t, sensor) s.RegisterChargenEventFilter(32, nil) verifyRegisterChargenEventFilter(t, s, 1) ctx, cancel := context.WithCancel(context.Background()) s.Run(ctx, nil) time.Sleep(200 * time.Millisecond) cancel() }
explode_data.jsonl/46910
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 230 }
[ 2830, 3393, 8690, 1143, 858, 268, 1556, 5632, 1155, 353, 8840, 836, 8, 341, 1903, 3805, 1669, 501, 97330, 30752, 1155, 340, 16867, 12002, 30213, 2822, 197, 322, 13882, 3084, 1265, 3690, 198, 1903, 1669, 501, 2271, 33402, 1155, 11, 12002...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRetryWorkerTriesUntilTimeout(t *testing.T) { // Retry every 250ms for up to 1s. c := &config.Config{ WriterRetryBatches: 1, WriterRetryInterval: config.NewDuration(250 * time.Millisecond), WriterRetryTimeout: config.NewDuration(1 * time.Second), } batchCh := make(chan []byte) db := newFakeDBWriter() db.MakeReturnErrors(99) // Writes will just keep failing w := newRetryWorker(batchCh, db, c) defer w.Stop() sendBatch(t, batchCh, "foo") // Wait for double the timeout to ensure the retry worker stops // sending after the batch has timed out. timeout := time.After(multDuration(c.WriterRetryTimeout, 2)) writeCount := 0 for { select { case <-db.Writes: writeCount++ case <-timeout: // Should have retried every 250ms for 1s. 3 not 4 because // the worker waits the retry period before the first // send. assert.Equal(t, 3, writeCount) assertWriteErrorCount(t, w, 3) return } } }
explode_data.jsonl/46120
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 347 }
[ 2830, 3393, 51560, 21936, 51, 4019, 24493, 7636, 1155, 353, 8840, 836, 8, 341, 197, 322, 78870, 1449, 220, 17, 20, 15, 1011, 369, 705, 311, 220, 16, 82, 624, 1444, 1669, 609, 1676, 10753, 515, 197, 197, 6492, 51560, 33, 9118, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestListOfSpeakersModeA(t *testing.T) { f := collection.ListOfSpeakers{}.Modes("A") testCase( "can see", t, f, true, "list_of_speakers/1/meeting_id: 1", withPerms(1, perm.ListOfSpeakersCanSee), ) testCase( "no perm", t, f, false, "list_of_speakers/1/meeting_id: 1", ) }
explode_data.jsonl/75681
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 64090, 19719, 8312, 3636, 32, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 4426, 5814, 2124, 19719, 8312, 46391, 70035, 445, 32, 5130, 18185, 4207, 1006, 197, 197, 1, 4814, 1490, 756, 197, 3244, 345, 197, 1166, 345, 197, 4280...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEvalTakesStruct(t *testing.T) { var strNode = ArgNode{ value: "a.B", values: strings.Split("a.B", "."), valuesLen: 2, } var m = A{B: "B", C: C{D: "D"}} var r, _ = EvalTakes(strNode, m) fmt.Println(r) }
explode_data.jsonl/52556
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 118 }
[ 2830, 3393, 54469, 51, 2050, 9422, 1155, 353, 8840, 836, 8, 341, 2405, 607, 1955, 284, 7638, 1955, 515, 197, 16309, 25, 257, 330, 64, 1785, 756, 197, 45939, 25, 262, 9069, 19823, 445, 64, 1785, 497, 5933, 4461, 197, 45939, 11271, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNoRule(t *testing.T) { rs := NewRuleSet([]Rule{}) result, rule := rs.Apply(&cb.Envelope{}) if result != Forward { t.Fatalf("Should have forwarded") } if rule != nil { t.Fatalf("Forwarded but rule is set") } }
explode_data.jsonl/11107
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 90 }
[ 2830, 3393, 2753, 11337, 1155, 353, 8840, 836, 8, 341, 41231, 1669, 1532, 11337, 1649, 10556, 11337, 37790, 9559, 11, 5912, 1669, 10036, 36051, 2099, 7221, 22834, 18853, 37790, 743, 1102, 961, 22164, 341, 197, 3244, 30762, 445, 14996, 614...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
3
func TestH12_FlushBeforeBody(t *testing.T) { h12Compare{Handler: func(w ResponseWriter, r *Request) { w.(Flusher).Flush() io.WriteString(w, "foo") }}.run(t) }
explode_data.jsonl/4745
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 39, 16, 17, 1400, 34604, 10227, 5444, 1155, 353, 8840, 836, 8, 341, 9598, 16, 17, 27374, 90, 3050, 25, 2915, 3622, 5949, 6492, 11, 435, 353, 1900, 8, 341, 197, 6692, 12832, 46874, 261, 568, 46874, 741, 197, 53112, 44747,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestYouonDSpecial(t *testing.T) { const want = "zazizuzezozwadidyudu" for _, v := range [2]string{"づぁづぃづぅづぇづぉづゎでぃでゅどぅ", "ヅァヅィヅゥヅェヅォヅヮディデュドゥ"} { got, err := KanaToRomaji(v) assert.Equal(t, want, got) assert.Nil(t, err) } }
explode_data.jsonl/11325
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 155 }
[ 2830, 3393, 2610, 263, 5936, 2964, 1155, 353, 8840, 836, 8, 341, 4777, 1366, 284, 330, 89, 1370, 449, 90211, 89, 9510, 86, 329, 42586, 87207, 1837, 2023, 8358, 348, 1669, 2088, 508, 17, 30953, 4913, 125301, 126025, 125301, 144205, 12530...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2