text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestNewAssetsExtractor(t *testing.T) { assets := assetsExtractor{} extractor, err := assets.new("testProject", filepath.Join("testdata", "assetsproject")) if err != nil { t.Error(err) } directDependencies, err := extractor.DirectDependencies() if err != nil { t.Error(err) } expectedDirectDependencies := []string{"dep1"} if !reflect.DeepEqual(expectedDirectDependencies, directDependencies) { t.Errorf("Expected: \n%s, \nGot: \n%s", expectedDirectDependencies, directDependencies) } allDependencies, err := extractor.AllDependencies() expectedAllDependencies := []string{"dep1", "dep2"} for _, v := range expectedAllDependencies { if _, ok := allDependencies[v]; !ok { t.Error("Expecting", v, "dependency") } } childrenMap, err := extractor.ChildrenMap() if err != nil { t.Error(err) } if len(childrenMap["dep1"]) != 0 { t.Error("Expected: []string{} got :", childrenMap["dep1"]) } if len(childrenMap["dep2"]) != 1 { t.Error("Expected: []string{\"dep1\"} got :", childrenMap["dep2"]) } }
explode_data.jsonl/15606
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 393 }
[ 2830, 3393, 3564, 26879, 56118, 1155, 353, 8840, 836, 8, 341, 197, 5160, 1669, 11770, 56118, 16094, 8122, 54766, 11, 1848, 1669, 11770, 4618, 445, 1944, 7849, 497, 26054, 22363, 445, 92425, 497, 330, 5160, 4987, 5455, 743, 1848, 961, 20...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestTeams_Marshal(t *testing.T) { testJSONMarshal(t, &Team{}, "{}") u := &Team{ ID: Int64(1), NodeID: String("n"), Name: String("n"), Description: String("d"), URL: String("u"), Slug: String("s"), Permission: String("p"), Privacy: String("p"), MembersCount: Int(1), ReposCount: Int(1), MembersURL: String("m"), RepositoriesURL: String("r"), Organization: &Organization{ Login: String("l"), ID: Int64(1), NodeID: String("n"), AvatarURL: String("a"), HTMLURL: String("h"), Name: String("n"), Company: String("c"), Blog: String("b"), Location: String("l"), Email: String("e"), }, Parent: &Team{ ID: Int64(1), NodeID: String("n"), Name: String("n"), Description: String("d"), URL: String("u"), Slug: String("s"), Permission: String("p"), Privacy: String("p"), MembersCount: Int(1), ReposCount: Int(1), }, LDAPDN: String("l"), } want := `{ "id": 1, "node_id": "n", "name": "n", "description": "d", "url": "u", "slug": "s", "permission": "p", "privacy": "p", "members_count": 1, "repos_count": 1, "members_url": "m", "repositories_url": "r", "organization": { "login": "l", "id": 1, "node_id": "n", "avatar_url": "a", "html_url": "h", "name": "n", "company": "c", "blog": "b", "location": "l", "email": "e" }, "parent": { "id": 1, "node_id": "n", "name": "n", "description": "d", "url": "u", "slug": "s", "permission": "p", "privacy": "p", "members_count": 1, "repos_count": 1 }, "ldap_dn": "l" }` testJSONMarshal(t, u, want) }
explode_data.jsonl/4561
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 973 }
[ 2830, 3393, 60669, 1245, 28423, 1155, 353, 8840, 836, 8, 341, 18185, 5370, 55438, 1155, 11, 609, 14597, 22655, 35503, 5130, 10676, 1669, 609, 14597, 515, 197, 29580, 25, 1060, 1333, 21, 19, 7, 16, 1326, 197, 30217, 915, 25, 688, 923, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMWFunc(t *testing.T) { cases := []struct { name string wantStatus int header string }{ { name: "Empty header", wantStatus: http.StatusUnauthorized, }, { name: "Header not containing Bearer", header: "notBearer", wantStatus: http.StatusUnauthorized, }, { name: "Invalid header", header: mock.HeaderInvalid(), wantStatus: http.StatusUnauthorized, }, { name: "Success", header: mock.HeaderValid(), wantStatus: http.StatusOK, }, } jwtCfg := &config.JWT{Realm: "testRealm", Secret: "jwtsecret", Duration: 60, SigningAlgorithm: "HS256"} jwtMW := mw.NewJWT(jwtCfg) ts := httptest.NewServer(echoHandler(jwtMW.MWFunc())) defer ts.Close() path := ts.URL + "/hello" client := &http.Client{} for _, tt := range cases { t.Run(tt.name, func(t *testing.T) { req, _ := http.NewRequest("GET", path, nil) req.Header.Set("Authorization", tt.header) res, err := client.Do(req) if err != nil { t.Fatal("Cannot create http request") } assert.Equal(t, tt.wantStatus, res.StatusCode) }) } }
explode_data.jsonl/12254
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 506 }
[ 2830, 3393, 54173, 9626, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 981, 914, 198, 197, 50780, 2522, 526, 198, 197, 20883, 257, 914, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 981, 330, 3522, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNewPodTracker(t *testing.T) { // setup types logger := logrus.NewEntry(logrus.StandardLogger()) clientset := fake.NewSimpleClientset() tests := []struct { name string pod *v1.Pod wantErr bool }{ { name: "pass-with-pod", pod: _pod, wantErr: false, }, { name: "error-with-nil-pod", pod: nil, wantErr: true, }, { name: "error-with-empty-pod", pod: &v1.Pod{}, wantErr: true, }, { name: "error-with-pod-without-namespace", pod: &v1.Pod{ ObjectMeta: metav1.ObjectMeta{Name: "test-pod"}, }, wantErr: true, }, { name: "fail-with-pod", pod: &v1.Pod{ ObjectMeta: metav1.ObjectMeta{ Name: "github-octocat-1-for-some-odd-reason-this-name-is-way-too-long-and-will-cause-an-error", Namespace: _pod.ObjectMeta.Namespace, Labels: _pod.ObjectMeta.Labels, }, TypeMeta: _pod.TypeMeta, Spec: _pod.Spec, Status: _pod.Status, }, wantErr: true, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { _, err := newPodTracker(logger, clientset, test.pod, 0*time.Second) if (err != nil) != test.wantErr { t.Errorf("newPodTracker() error = %v, wantErr %v", err, test.wantErr) return } }) } }
explode_data.jsonl/62937
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 646 }
[ 2830, 3393, 3564, 23527, 31133, 1155, 353, 8840, 836, 8, 341, 197, 322, 6505, 4494, 198, 17060, 1669, 1487, 20341, 7121, 5874, 12531, 20341, 53615, 7395, 2398, 25291, 746, 1669, 12418, 7121, 16374, 2959, 746, 2822, 78216, 1669, 3056, 1235...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestHeader(t *testing.T) { key := "X-Custom" value := "foo" req := URL("https://example.com/") req.Header(key, value) got := req.Headers.Get(key) if got != value { t.Errorf("Expected header %s=%s not found, got %s instead", key, value, got) } }
explode_data.jsonl/24734
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 106 }
[ 2830, 3393, 4047, 1155, 353, 8840, 836, 8, 341, 23634, 1669, 330, 55, 7658, 1450, 698, 16309, 1669, 330, 7975, 698, 24395, 1669, 5548, 445, 2428, 1110, 8687, 905, 53006, 24395, 15753, 4857, 11, 897, 692, 3174, 354, 1669, 4232, 43968, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFullDynamicASCII(t *testing.T) { from := 0 to := 128 c := []string{ "a", "aa", "a?", "aa?", "aba", "abba", "bbaa", //TODO add more here } Exit: for _, chars := range c { for i := from; i < to; i++ { source := strings.Repeat(chars, i) err := compareInOut(source) if err != nil { t.Error(err) break Exit } } } }
explode_data.jsonl/71761
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 170 }
[ 2830, 3393, 9432, 21752, 56450, 1155, 353, 8840, 836, 8, 341, 42727, 1669, 220, 15, 198, 31709, 1669, 220, 16, 17, 23, 198, 1444, 1669, 3056, 917, 515, 197, 197, 56693, 497, 330, 5305, 497, 330, 64, 31011, 330, 5305, 35718, 197, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestClient(t *testing.T) { connectionString := os.Getenv("APPCONFIGURATION_CONNECTION_STRING") if connectionString == "" { // This test does run as live test, when the azure template is deployed, // and then the corresponding environment variable is set. t.Skip("Skipping client test") } key := "key" label := "label" value := "value" client, err := NewClientFromConnectionString(connectionString, nil) require.NoError(t, err) require.NotEmpty(t, client) addResp, err2 := client.AddSetting(context.TODO(), Setting{Key: &key, Label: &label, Value: &value}, nil) require.NoError(t, err2) require.NotEmpty(t, addResp) require.NotNil(t, addResp.Key) require.NotNil(t, addResp.Label) require.NotNil(t, addResp.Value) require.Equal(t, key, *addResp.Key) require.Equal(t, label, *addResp.Label) require.Equal(t, value, *addResp.Value) getResp, err3 := client.GetSetting(context.TODO(), Setting{Key: &key, Label: &label}, nil) require.NoError(t, err3) require.NotEmpty(t, getResp) require.NotNil(t, getResp.Key) require.NotNil(t, getResp.Label) require.NotNil(t, getResp.Value) require.Equal(t, key, *getResp.Key) require.Equal(t, label, *getResp.Label) require.Equal(t, value, *getResp.Value) value = "value2" setResp, err4 := client.SetSetting(context.TODO(), Setting{Key: &key, Label: &label, Value: &value}, nil) require.NoError(t, err4) require.NotEmpty(t, setResp) require.NotNil(t, setResp.Key) require.NotNil(t, setResp.Label) require.NotNil(t, setResp.Value) require.Equal(t, key, *setResp.Key) require.Equal(t, label, *setResp.Label) require.Equal(t, value, *setResp.Value) roResp, err5 := client.SetReadOnly(context.TODO(), Setting{Key: &key, Label: &label}, true, nil) require.NoError(t, err5) require.NotEmpty(t, roResp) require.NotNil(t, roResp.Key) require.NotNil(t, roResp.Label) require.NotNil(t, roResp.Value) require.NotNil(t, roResp.IsReadOnly) require.Equal(t, key, *roResp.Key) require.Equal(t, label, *roResp.Label) require.Equal(t, value, *roResp.Value) require.True(t, *roResp.IsReadOnly) roResp2, err6 := client.SetReadOnly(context.TODO(), Setting{Key: &key, Label: &label}, false, nil) require.NoError(t, err6) require.NotEmpty(t, roResp2) require.NotNil(t, roResp2.Key) require.NotNil(t, roResp2.Label) require.NotNil(t, roResp2.Value) require.NotNil(t, roResp2.IsReadOnly) require.Equal(t, key, *roResp2.Key) require.Equal(t, label, *roResp2.Label) require.Equal(t, value, *roResp2.Value) require.False(t, *roResp2.IsReadOnly) any := "*" revPgr := client.ListRevisions(SettingSelector{KeyFilter: &any, LabelFilter: &any, Fields: AllSettingFields()}, nil) require.NotEmpty(t, revPgr) hasMore := revPgr.More() require.True(t, hasMore) revResp, err7 := revPgr.NextPage(context.TODO()) require.NoError(t, err7) require.NotEmpty(t, revResp) require.Equal(t, key, *revResp.Settings[0].Key) require.Equal(t, label, *revResp.Settings[0].Label) delResp, err8 := client.DeleteSetting(context.TODO(), Setting{Key: &key, Label: &label}, nil) require.NoError(t, err8) require.NotEmpty(t, delResp) require.NotNil(t, delResp.Key) require.NotNil(t, delResp.Label) require.NotNil(t, delResp.Value) require.Equal(t, key, *delResp.Key) require.Equal(t, label, *delResp.Label) require.Equal(t, value, *delResp.Value) }
explode_data.jsonl/38129
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1339 }
[ 2830, 3393, 2959, 1155, 353, 8840, 836, 8, 341, 54590, 703, 1669, 2643, 64883, 445, 2537, 4872, 711, 6373, 28328, 40708, 12283, 1138, 743, 62084, 621, 1591, 341, 197, 197, 322, 1096, 1273, 1558, 1598, 438, 3887, 1273, 11, 979, 279, 76...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestResetSessionTimerLocked_Renew(t *testing.T) { dir1, s1 := testServer(t) defer os.RemoveAll(dir1) defer s1.Shutdown() testrpc.WaitForLeader(t, s1.RPC, "dc1") s1.sessionTimersLock.Lock() s1.resetSessionTimerLocked("foo", 5*time.Millisecond) s1.sessionTimersLock.Unlock() if _, ok := s1.sessionTimers["foo"]; !ok { t.Fatalf("missing timer") } time.Sleep(5 * time.Millisecond) // Renew the session s1.sessionTimersLock.Lock() renew := time.Now() s1.resetSessionTimerLocked("foo", 5*time.Millisecond) s1.sessionTimersLock.Unlock() // Watch for invalidation for time.Now().Sub(renew) < 20*time.Millisecond { s1.sessionTimersLock.Lock() _, ok := s1.sessionTimers["foo"] s1.sessionTimersLock.Unlock() if !ok { end := time.Now() if end.Sub(renew) < 5*time.Millisecond { t.Fatalf("early invalidate") } return } time.Sleep(time.Millisecond) } t.Fatalf("should have expired") }
explode_data.jsonl/35332
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 397 }
[ 2830, 3393, 14828, 5283, 10105, 49010, 2568, 268, 365, 1155, 353, 8840, 836, 8, 341, 48532, 16, 11, 274, 16, 1669, 1273, 5475, 1155, 340, 16867, 2643, 84427, 14161, 16, 340, 16867, 274, 16, 10849, 18452, 2822, 18185, 29414, 99153, 52621...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestListFiles(t *testing.T) { ts := newTestServer() if files := listFiles(ts.URL); len(files) != 2 { t.Errorf("listFiles expected 2 files, but got %d files", len(files)) } }
explode_data.jsonl/7963
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 852, 10809, 1155, 353, 8840, 836, 8, 341, 57441, 1669, 501, 2271, 5475, 741, 743, 3542, 1669, 1140, 10809, 35864, 20893, 1215, 2422, 32544, 8, 961, 220, 17, 341, 197, 3244, 13080, 445, 1607, 10809, 3601, 220, 17, 3542, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestGrantIDFromString(t *testing.T) { r := require.New(t) // Vanilla without GrantOption id := "database_name|schema|view_name|privilege|test1,test2" grant, err := grantIDFromString(id) r.NoError(err) r.Equal("database_name", grant.ResourceName) r.Equal("schema", grant.SchemaName) r.Equal("view_name", grant.ObjectName) r.Equal("privilege", grant.Privilege) r.Equal(false, grant.GrantOption) // Vanilla with GrantOption id = "database_name|schema|view_name|privilege|test1,test2|true" grant, err = grantIDFromString(id) r.NoError(err) r.Equal("database_name", grant.ResourceName) r.Equal("schema", grant.SchemaName) r.Equal("view_name", grant.ObjectName) r.Equal("privilege", grant.Privilege) r.Equal(true, grant.GrantOption) // No view id = "database_name|||privilege|" grant, err = grantIDFromString(id) r.NoError(err) r.Equal("database_name", grant.ResourceName) r.Equal("", grant.SchemaName) r.Equal("", grant.ObjectName) r.Equal("privilege", grant.Privilege) r.Equal(false, grant.GrantOption) // Bad ID -- not enough fields id = "database|name-privilege" _, err = grantIDFromString(id) r.Equal(fmt.Errorf("5 or 6 fields allowed"), err) // Bad ID -- privilege in wrong area id = "database||name-privilege" _, err = grantIDFromString(id) r.Equal(fmt.Errorf("5 or 6 fields allowed"), err) // too many fields id = "database_name|schema|view_name|privilege|false|2|too-many" _, err = grantIDFromString(id) r.Equal(fmt.Errorf("5 or 6 fields allowed"), err) // 0 lines id = "" _, err = grantIDFromString(id) r.Equal(fmt.Errorf("1 line per grant"), err) // 2 lines id = `database_name|schema|view_name|privilege database_name|schema|view_name|privilege` _, err = grantIDFromString(id) r.Equal(fmt.Errorf("1 line per grant"), err) }
explode_data.jsonl/27810
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 699 }
[ 2830, 3393, 67971, 915, 44491, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1373, 7121, 1155, 340, 197, 322, 65582, 2041, 23736, 5341, 198, 15710, 1669, 330, 12216, 1269, 91, 17349, 91, 1050, 1269, 91, 11887, 42769, 91, 1944, 16, 67296, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTxnSucc(t *testing.T) { testRunner.BeforeTest(t) reqs := []txnReq{ { compare: []string{`value("key1") != "value2"`, `value("key2") != "value1"`}, ifSucess: []string{"get key1", "get key2"}, results: []string{"SUCCESS", "key1", "value1", "key2", "value2"}, }, { compare: []string{`version("key1") = "1"`, `version("key2") = "1"`}, ifSucess: []string{"get key1", "get key2", `put "key \"with\" space" "value \x23"`}, ifFail: []string{`put key1 "fail"`, `put key2 "fail"`}, results: []string{"SUCCESS", "key1", "value1", "key2", "value2", "OK"}, }, { compare: []string{`version("key \"with\" space") = "1"`}, ifSucess: []string{`get "key \"with\" space"`}, results: []string{"SUCCESS", `key "with" space`, "value \x23"}, }, } for _, cfg := range clusterTestCases { t.Run(cfg.name, func(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) defer cancel() clus := testRunner.NewCluster(ctx, t, cfg.config) defer clus.Close() cc := clus.Client() testutils.ExecuteUntil(ctx, t, func() { if err := cc.Put("key1", "value1", config.PutOptions{}); err != nil { t.Fatalf("could not create key:%s, value:%s", "key1", "value1") } if err := cc.Put("key2", "value2", config.PutOptions{}); err != nil { t.Fatalf("could not create key:%s, value:%s", "key2", "value2") } for _, req := range reqs { resp, err := cc.Txn(req.compare, req.ifSucess, req.ifFail, config.TxnOptions{ Interactive: true, }) if err != nil { t.Errorf("Txn returned error: %s", err) } assert.Equal(t, req.results, getRespValues(resp)) } }) }) } }
explode_data.jsonl/47681
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 770 }
[ 2830, 3393, 31584, 77, 50, 14570, 1155, 353, 8840, 836, 8, 341, 18185, 19486, 31153, 2271, 1155, 340, 24395, 82, 1669, 3056, 73370, 27234, 515, 197, 197, 515, 298, 197, 18948, 25, 220, 3056, 917, 90, 63, 957, 445, 792, 16, 899, 961,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestUpdateOrderExecutionLimits(t *testing.T) { err := f.UpdateOrderExecutionLimits(context.Background(), "") if err != nil { t.Fatal(err) } cp := currency.NewPair(currency.BTC, currency.USD) limit, err := f.GetOrderExecutionLimits(asset.Spot, cp) if err != nil { t.Fatal(err) } err = limit.Conforms(33000, 0.00001, order.Limit) if !errors.Is(err, order.ErrAmountBelowMin) { t.Fatalf("expected error %v but received %v", order.ErrAmountBelowMin, err) } err = limit.Conforms(33000, 0.0001, order.Limit) if !errors.Is(err, nil) { t.Fatalf("expected error %v but received %v", nil, err) } }
explode_data.jsonl/15258
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 260 }
[ 2830, 3393, 4289, 4431, 20294, 94588, 1155, 353, 8840, 836, 8, 341, 9859, 1669, 282, 16689, 4431, 20294, 94588, 5378, 19047, 1507, 14676, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 52018, 1669, 11413, 7121, 12443, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestRegisterRoutingRetrieve(t *testing.T) { servlet := NewRetrieveServlet(&persistence.Conn{}, &retrieval.Authenticator{}, &retrieval.Signer{}) router := Router() servlet.RegisterRouting(router) expectedPaths := GetPaths(router) assert.Contains(t, expectedPaths, "/retrieve/{region:[0-9]{3}}/{day:[0-9]{5}}/{auth:.*}", "should include a retrieve path") }
explode_data.jsonl/22692
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 137 }
[ 2830, 3393, 8690, 24701, 87665, 1155, 353, 8840, 836, 8, 1476, 1903, 4560, 1669, 1532, 87665, 7438, 2099, 79, 8026, 50422, 22655, 609, 265, 8927, 831, 25233, 61393, 22655, 609, 265, 8927, 831, 41152, 261, 37790, 67009, 1669, 10554, 741, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClient_Mkdir(t *testing.T) { if !isTestManual { t.Skipf("%s not set", envNameTestManual) } path := "/tmp/lib-ssh-sftp-mkdir" err := testClient.Mkdir(path, nil) if err != nil { t.Fatal(err) } err = testClient.Rmdir(path) if err != nil { t.Fatal(err) } }
explode_data.jsonl/66414
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 2959, 1245, 12438, 1155, 353, 8840, 836, 8, 341, 743, 753, 285, 2271, 52092, 341, 197, 3244, 57776, 69, 4430, 82, 537, 738, 497, 6105, 675, 2271, 52092, 340, 197, 630, 26781, 1669, 3521, 5173, 8194, 12, 25537, 1331, 25068,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCAConfig(t *testing.T) { //Test config configPath := filepath.Join(getConfigPath(), configTestFile) backend, err := config.FromFile(configPath)() if err != nil { t.Fatal("Failed to get config backend") } endpointConfig, err := ConfigFromBackend(backend...) if err != nil { t.Fatal("Failed to get identity config") } //Test Crypto config path val, _ := backend[0].Lookup("client.cryptoconfig.path") assert.True(t, pathvar.Subst(val.(string)) == endpointConfig.CryptoConfigPath(), "Incorrect crypto config path", t) //Testing MSPID mspID, ok := comm.MSPID(endpointConfig, org1) assert.True(t, ok, "Get MSP ID failed") assert.True(t, mspID == "Org1MSP", "Get MSP ID failed") // testing empty OrgMSP _, ok = comm.MSPID(endpointConfig, "dummyorg1") assert.False(t, ok, "Get MSP ID did not fail for dummyorg1") }
explode_data.jsonl/34100
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 310 }
[ 2830, 3393, 5049, 2648, 1155, 353, 8840, 836, 8, 341, 197, 322, 2271, 2193, 198, 25873, 1820, 1669, 26054, 22363, 5433, 2648, 1820, 1507, 2193, 2271, 1703, 340, 197, 20942, 11, 1848, 1669, 2193, 11439, 1703, 8754, 1820, 8, 741, 743, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestClient_On_Middleware(t *testing.T) { c := New(Config{ BotToken: "testing", DisableCache: true, Cache: &CacheNop{}, }) defer close(c.dispatcher.shutdown) dispatcher := c.dispatcher input := make(chan *gateway.Event) go c.demultiplexer(dispatcher, input) const prefix = "this cool prefix" var mdlwHasBotPrefix Middleware = func(evt interface{}) interface{} { msg := (evt.(*MessageCreate)).Message if strings.HasPrefix(msg.Content, prefix) { return evt } return nil } var mdlwHasDifferentPrefix Middleware = func(evt interface{}) interface{} { msg := (evt.(*MessageCreate)).Message if strings.HasPrefix(msg.Content, "random unknown prefix") { return evt } return nil } wg := sync.WaitGroup{} c.Gateway().MessageCreate(func(_ Session, _ *MessageCreate) { wg.Done() }) c.Gateway().WithMiddleware(mdlwHasBotPrefix).MessageCreate(func(_ Session, _ *MessageCreate) { wg.Done() }) c.Gateway().WithMiddleware(mdlwHasDifferentPrefix).MessageCreate(func(_ Session, _ *MessageCreate) { wg.Done() }) wg.Add(2) input <- &gateway.Event{Name: EvtMessageCreate, Data: []byte(`{"content":"` + prefix + ` testing"}`)} input <- &gateway.Event{Name: EvtReady, Data: []byte(`{"content":"testing"}`)} wg.Wait() }
explode_data.jsonl/41655
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 493 }
[ 2830, 3393, 2959, 35482, 1245, 11603, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 1532, 33687, 515, 197, 12791, 354, 3323, 25, 257, 330, 8840, 756, 197, 197, 25479, 8233, 25, 830, 345, 197, 6258, 1777, 25, 286, 609, 8233, 45, 453, 388...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestImmutableForEachStopIterator(t *testing.T) { t.Parallel() //插入几把钥匙。 numItems := 10 testTreap := NewImmutable() for i := 0; i < numItems; i++ { key := serializeUint32(uint32(i)) testTreap = testTreap.Put(key, key) } //确保foreach在调用方错误返回时提前退出。 var numIterated int testTreap.ForEach(func(k, v []byte) bool { numIterated++ return numIterated != numItems/2 }) if numIterated != numItems/2 { t.Fatalf("ForEach: unexpected iterate count - got %d, want %d", numIterated, numItems/2) } }
explode_data.jsonl/6141
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 249 }
[ 2830, 3393, 58890, 91392, 10674, 11951, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 322, 114731, 99195, 99360, 107605, 8997, 22431, 4353, 1669, 220, 16, 15, 198, 18185, 65775, 391, 1669, 1532, 58890, 741, 2023, 600, 1669, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCatalog_ListServices_Timeout(t *testing.T) { t.Parallel() dir1, s1 := testServer(t) defer os.RemoveAll(dir1) defer s1.Shutdown() codec := rpcClient(t, s1) defer codec.Close() args := structs.DCSpecificRequest{ Datacenter: "dc1", } var out structs.IndexedServices testrpc.WaitForLeader(t, s1.RPC, "dc1") // Run the query if err := msgpackrpc.CallWithCodec(codec, "Catalog.ListServices", &args, &out); err != nil { t.Fatalf("err: %v", err) } // Setup a blocking query args.MinQueryIndex = out.Index args.MaxQueryTime = 100 * time.Millisecond // Re-run the query start := time.Now() out = structs.IndexedServices{} if err := msgpackrpc.CallWithCodec(codec, "Catalog.ListServices", &args, &out); err != nil { t.Fatalf("err: %v", err) } // Should block at least 100ms if time.Since(start) < 100*time.Millisecond { t.Fatalf("too fast") } // Check the indexes, should not change if out.Index != args.MinQueryIndex { t.Fatalf("bad: %v", out) } }
explode_data.jsonl/49231
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 392 }
[ 2830, 3393, 41606, 27104, 11025, 39080, 411, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 48532, 16, 11, 274, 16, 1669, 1273, 5475, 1155, 340, 16867, 2643, 84427, 14161, 16, 340, 16867, 274, 16, 10849, 18452, 741, 43343, 66, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestSessionCombinedOutput(t *testing.T) { conn := dial(fixedOutputHandler, t) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("Unable to request new session: %v", err) } defer session.Close() buf, err := session.CombinedOutput("") // cmd is ignored by fixedOutputHandler if err != nil { t.Error("Remote command did not exit cleanly:", err) } const stdout = "this-is-stdout." const stderr = "this-is-stderr." g := string(buf) if g != stdout+stderr && g != stderr+stdout { t.Error("Remote command did not return expected string:") t.Logf("want %q, or %q", stdout+stderr, stderr+stdout) t.Logf("got %q", g) } }
explode_data.jsonl/34795
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 257 }
[ 2830, 3393, 5283, 94268, 5097, 1155, 353, 8840, 836, 8, 341, 32917, 1669, 27860, 955, 3286, 5097, 3050, 11, 259, 340, 16867, 4534, 10421, 741, 25054, 11, 1848, 1669, 4534, 7121, 5283, 741, 743, 1848, 961, 2092, 341, 197, 3244, 30762, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestTerragruntWorksWithExistingGCSBucket(t *testing.T) { t.Parallel() cleanupTerraformFolder(t, TEST_FIXTURE_GCS_BYO_BUCKET_PATH) // We need a project to create the bucket in, so we pull one from the recommended environment variable. project := os.Getenv("GOOGLE_CLOUD_PROJECT") gcsBucketName := fmt.Sprintf("terragrunt-test-bucket-%s", strings.ToLower(uniqueId())) defer deleteGCSBucket(t, gcsBucketName) // manually create the GCS bucket outside the US (default) to test Terragrunt works correctly with an existing bucket. location := TERRAFORM_REMOTE_STATE_GCP_REGION createGCSBucket(t, project, location, gcsBucketName) tmpTerragruntGCSConfigPath := createTmpTerragruntGCSConfig(t, TEST_FIXTURE_GCS_BYO_BUCKET_PATH, project, TERRAFORM_REMOTE_STATE_GCP_REGION, gcsBucketName, config.DefaultTerragruntConfigPath) runTerragrunt(t, fmt.Sprintf("terragrunt apply -auto-approve --terragrunt-non-interactive --terragrunt-config %s --terragrunt-working-dir %s", tmpTerragruntGCSConfigPath, TEST_FIXTURE_GCS_BYO_BUCKET_PATH)) validateGCSBucketExistsAndIsLabeled(t, location, gcsBucketName, nil) }
explode_data.jsonl/10078
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 416 }
[ 2830, 3393, 51402, 68305, 3850, 6776, 16056, 53067, 38, 6412, 36018, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 1444, 60639, 51, 13886, 627, 13682, 1155, 11, 13602, 42635, 41486, 2646, 6412, 13696, 46, 74444, 7944, 692, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_defaultModelBuilder_buildTargetGroupBindingNodeSelector(t *testing.T) { tests := []struct { testName string svc *corev1.Service targetType elbv2.TargetType want *metav1.LabelSelector wantErr error }{ { testName: "IP target empty selector", targetType: elbv2.TargetTypeIP, }, { testName: "IP Target with selector", targetType: elbv2.TargetTypeIP, svc: &corev1.Service{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{ "service.beta.kubernetes.io/aws-load-balancer-target-node-labels": "key1=value1, k2=v2", }, }, }, }, { testName: "Instance target empty selector", targetType: elbv2.TargetTypeInstance, svc: &corev1.Service{}, }, { testName: "Instance target with selector", targetType: elbv2.TargetTypeInstance, svc: &corev1.Service{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{ "service.beta.kubernetes.io/aws-load-balancer-target-node-labels": "key1=value1, key2=value.2", }, }, }, want: &metav1.LabelSelector{ MatchLabels: map[string]string{ "key1": "value1", "key2": "value.2", }, }, }, { testName: "Instance target with invalid selector", targetType: elbv2.TargetTypeInstance, svc: &corev1.Service{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{ "service.beta.kubernetes.io/aws-load-balancer-target-node-labels": "key1=value1, invalid", }, }, }, wantErr: errors.New("failed to parse stringMap annotation, service.beta.kubernetes.io/aws-load-balancer-target-node-labels: key1=value1, invalid"), }, } for _, tt := range tests { t.Run(tt.testName, func(t *testing.T) { parser := annotations.NewSuffixAnnotationParser("service.beta.kubernetes.io") builder := &defaultModelBuildTask{ annotationParser: parser, service: tt.svc, } got, err := builder.buildTargetGroupBindingNodeSelector(context.Background(), tt.targetType) if tt.wantErr != nil { assert.EqualError(t, err, tt.wantErr.Error()) } else { assert.Equal(t, tt.want, got) } }) } }
explode_data.jsonl/40061
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 979 }
[ 2830, 3393, 9993, 1712, 3297, 20801, 6397, 2808, 15059, 1955, 5877, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 18185, 675, 256, 914, 198, 197, 1903, 7362, 286, 353, 98645, 16, 13860, 198, 197, 28861, 929, 655, 54...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestOverrideMetricsURL(t *testing.T) { const DebugEndpoint string = "http://localhost:8080" cfg := Config{ API: APIConfig{Key: "notnull", Site: DefaultSite}, Metrics: MetricsConfig{ TCPAddr: confignet.TCPAddr{ Endpoint: DebugEndpoint, }, }, } err := cfg.Sanitize() require.NoError(t, err) assert.Equal(t, cfg.Metrics.Endpoint, DebugEndpoint) }
explode_data.jsonl/32912
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 155 }
[ 2830, 3393, 2177, 27328, 3144, 1155, 353, 8840, 836, 8, 1476, 4777, 11091, 27380, 914, 284, 330, 1254, 1110, 8301, 25, 23, 15, 23, 15, 1837, 50286, 1669, 5532, 515, 197, 197, 7082, 25, 5333, 2648, 90, 1592, 25, 330, 1921, 2921, 497,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReverseProxyTimeout(t *testing.T) { timeout := 2 * time.Second errorMargin := 100 * time.Millisecond log.SetOutput(ioutil.Discard) defer log.SetOutput(os.Stderr) // set up proxy p := &Proxy{ Next: httpserver.EmptyNext, // prevents panic in some cases when test fails Upstreams: []Upstream{newFakeUpstream("https://8.8.8.8", true, timeout)}, } // create request and response recorder r := httptest.NewRequest("GET", "/", nil) w := httptest.NewRecorder() start := time.Now() p.ServeHTTP(w, r) took := time.Since(start) if took > timeout+errorMargin { t.Errorf("Expected timeout ~ %v but got %v", timeout, took) } }
explode_data.jsonl/64228
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 245 }
[ 2830, 3393, 45695, 16219, 7636, 1155, 353, 8840, 836, 8, 341, 78395, 1669, 220, 17, 353, 882, 32435, 198, 18290, 21681, 1669, 220, 16, 15, 15, 353, 882, 71482, 198, 6725, 4202, 5097, 1956, 30158, 909, 47560, 340, 16867, 1487, 4202, 50...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRankingRepository_UpdateAmounts(t *testing.T) { sqlHandler, cleanup := setupTestingSuite(t) defer cleanup() repo := repositories.NewRankingRepository(sqlHandler) contestID := uint64(1) userID := uint64(1) // Create initial rankings for i, language := range []domain.LanguageCode{domain.Japanese, domain.Korean, domain.Global} { ranking := domain.Ranking{ ContestID: contestID, UserID: userID, Language: language, Amount: float32(i), } err := repo.Store(ranking) assert.NoError(t, err) } // Update rankings updatedRankings := domain.Rankings{} { rankings, err := repo.FindAll(contestID, userID) assert.NoError(t, err) for _, r := range rankings { updatedRankings = append(updatedRankings, domain.Ranking{ ID: r.ID, Amount: r.Amount + 10, }) } err = repo.UpdateAmounts(updatedRankings) assert.NoError(t, err) } // Check updated content { rankings, err := repo.FindAll(contestID, userID) assert.NoError(t, err) assert.Equal(t, len(updatedRankings), len(rankings)) expectedRankings := make(map[uint64]domain.Ranking) for _, ranking := range updatedRankings { expectedRankings[ranking.ID] = ranking } for _, ranking := range rankings { assert.Equal(t, ranking.Amount, expectedRankings[ranking.ID].Amount) } } }
explode_data.jsonl/18798
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 502 }
[ 2830, 3393, 22550, 287, 4624, 47393, 10093, 82, 1155, 353, 8840, 836, 8, 341, 30633, 3050, 11, 21290, 1669, 6505, 16451, 28000, 1155, 340, 16867, 21290, 2822, 17200, 5368, 1669, 49657, 7121, 22550, 287, 4624, 13148, 3050, 692, 197, 65717,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestAbsCollection_Each(t *testing.T) { intColl := NewIntCollection([]int{1, 2, 3, 4}) sum := 0 intColl.Each(func(item interface{}, key int) { v := item.(int) sum = sum + v }) if intColl.Err() != nil { t.Fatal(intColl.Err()) } if sum != 10 { t.Fatal("Each 错误") } sum = 0 intColl.Each(func(item interface{}, key int) { v := item.(int) sum = sum + v if sum > 4 { intColl.SetErr(errors.New("stop the cycle")) return } }) if sum != 6 { t.Fatal("Each 错误") } }
explode_data.jsonl/66438
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 235 }
[ 2830, 3393, 27778, 6482, 2089, 610, 1155, 353, 8840, 836, 8, 341, 2084, 15265, 1669, 1532, 1072, 6482, 10556, 396, 90, 16, 11, 220, 17, 11, 220, 18, 11, 220, 19, 3518, 31479, 1669, 220, 15, 198, 2084, 15265, 13, 4854, 18552, 5393, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFilterAddCrossProtocol(t *testing.T) { data := []byte{0x01, 0x02} msg := btcwire.NewMsgFilterAdd(data) if !bytes.Equal(msg.Data, data) { t.Errorf("should get same data back out") } // Encode with latest protocol version. var buf bytes.Buffer err := msg.BtcEncode(&buf, btcwire.ProtocolVersion) if err != nil { t.Errorf("encode of MsgFilterAdd failed %v err <%v>", msg, err) } // Decode with old protocol version. var readmsg btcwire.MsgFilterAdd err = readmsg.BtcDecode(&buf, btcwire.BIP0031Version) if err == nil { t.Errorf("decode of MsgFilterAdd succeeded when it shouldn't "+ "have %v", msg) } // Since one of the protocol versions doesn't support the filteradd // message, make sure the data didn't get encoded and decoded back out. if bytes.Equal(msg.Data, readmsg.Data) { t.Error("should not get same data for cross protocol") } }
explode_data.jsonl/52858
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 316 }
[ 2830, 3393, 5632, 2212, 28501, 20689, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 3056, 3782, 90, 15, 87, 15, 16, 11, 220, 15, 87, 15, 17, 532, 21169, 1669, 86037, 35531, 7121, 6611, 5632, 2212, 2592, 340, 743, 753, 9651, 12808, 8119,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestSchema_SkipInitialScan(t *testing.T) { c := createClient("skip-initial-scan-test") flush(c) // check RediSearch version version, err := c.getRediSearchVersion() assert.Nil(t, err) // This feature is only available since RediSearch >= v2.0 if version <= 10699 { return } vanillaConnection := c.pool.Get() _, err = vanillaConnection.Do("HSET", "create-index-info:doc1", "name", "Jon", "age", 25) assert.Nil(t, err) q := NewQuery("@name:Jon") schema1 := NewSchema(DefaultOptions).AddField(NewTextField("name")) schema2 := NewSchema(Options{SkipInitialScan: true}).AddField(NewTextField("name")) indexDefinition := NewIndexDefinition() c = createClient("skip-initial-scan-test-scan") c.CreateIndexWithIndexDefinition(schema1, indexDefinition) assert.Nil(t, err) // Wait for all documents to be indexed info, err := c.Info() assert.Nil(t, err) for info.IsIndexing { time.Sleep(time.Second) info, _ = c.Info() } _, total, err := c.Search(q) assert.Nil(t, err) assert.Equal(t, 1, total) c = createClient("skip-initial-scan-test-skip-scan") c.CreateIndexWithIndexDefinition(schema2, indexDefinition) assert.Nil(t, err) _, total, err = c.Search(q) assert.Nil(t, err) assert.Equal(t, 0, total) }
explode_data.jsonl/72801
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 466 }
[ 2830, 3393, 8632, 1098, 13389, 6341, 26570, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 1855, 2959, 445, 20599, 47967, 1331, 4814, 16839, 1138, 1166, 34604, 1337, 692, 197, 322, 1779, 3731, 72, 5890, 2319, 198, 74954, 11, 1848, 1669, 272,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_minioEnvironFromFile(t *testing.T) { testCases := []struct { content string expectedErr bool expectedEkvs []envKV }{ { ` export MINIO_ROOT_USER=minio export MINIO_ROOT_PASSWORD=minio123`, false, []envKV{ { Key: "MINIO_ROOT_USER", Value: "minio", }, { Key: "MINIO_ROOT_PASSWORD", Value: "minio123", }, }, }, // Value with double quotes { `export MINIO_ROOT_USER="minio"`, false, []envKV{ { Key: "MINIO_ROOT_USER", Value: "minio", }, }, }, // Value with single quotes { `export MINIO_ROOT_USER='minio'`, false, []envKV{ { Key: "MINIO_ROOT_USER", Value: "minio", }, }, }, { ` MINIO_ROOT_USER=minio MINIO_ROOT_PASSWORD=minio123`, false, []envKV{ { Key: "MINIO_ROOT_USER", Value: "minio", }, { Key: "MINIO_ROOT_PASSWORD", Value: "minio123", }, }, }, { ` export MINIO_ROOT_USERminio export MINIO_ROOT_PASSWORD=minio123`, true, nil, }, } for _, testCase := range testCases { testCase := testCase t.Run("", func(t *testing.T) { tmpfile, err := ioutil.TempFile("", "testfile") if err != nil { t.Error(err) } tmpfile.WriteString(testCase.content) tmpfile.Sync() tmpfile.Close() ekvs, err := minioEnvironFromFile(tmpfile.Name()) if err != nil && !testCase.expectedErr { t.Error(err) } if err == nil && testCase.expectedErr { t.Error(errors.New("expected error, found success")) } if !reflect.DeepEqual(ekvs, testCase.expectedEkvs) { t.Errorf("expected %v, got %v", testCase.expectedEkvs, ekvs) } }) } }
explode_data.jsonl/24506
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 884 }
[ 2830, 3393, 7260, 815, 1702, 2772, 43633, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 27751, 414, 914, 198, 197, 42400, 7747, 220, 1807, 198, 197, 42400, 41917, 11562, 3056, 3160, 82707, 198, 197, 59403, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestIntsFound(t *testing.T) { assert := assert.New(t) v := 1 a := []int{0, 1, 2} assert.Equal(1, slices.IndexOf(v, a)) }
explode_data.jsonl/18820
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 61 }
[ 2830, 3393, 1072, 82, 6650, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 5195, 1669, 220, 16, 198, 11323, 1669, 3056, 396, 90, 15, 11, 220, 16, 11, 220, 17, 630, 6948, 12808, 7, 16, 11, 34254, 32770, 3747, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestReceiveTicket_InvalidRecipientRand_InvalidSeed(t *testing.T) { sender, b, v, ts, faceValue, winProb, sig := newRecipientFixtureOrFatal(t) r := newRecipientOrFatal(t, RandAddress(), b, v, ts, faceValue, winProb) params := r.TicketParams(sender) // Test invalid recipientRand from seed (invalid seed) ticket := newTicket(sender, params, 0) // Using invalid seed invalidSeed := new(big.Int).Add(params.Seed, big.NewInt(99)) _, _, err := r.ReceiveTicket(ticket, sig, invalidSeed) if err == nil { t.Error("expected invalid recipientRand generated from seed error") } if err != nil && !strings.Contains(err.Error(), "invalid recipientRand generated from seed") { t.Errorf("expected invalid recipientRand generated from seed error, got %v", err) } }
explode_data.jsonl/44750
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 263 }
[ 2830, 3393, 14742, 34058, 62, 7928, 74432, 56124, 62, 7928, 41471, 1155, 353, 8840, 836, 8, 341, 1903, 1659, 11, 293, 11, 348, 11, 10591, 11, 3579, 1130, 11, 3164, 36980, 11, 8366, 1669, 501, 74432, 18930, 2195, 62396, 1155, 340, 7000...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestJWKsCache_GetKey(t *testing.T) { t.Run("should fetch, cache and return valid key", func(t *testing.T) { // GIVEN handler := http.HandlerFunc(mockValidJWKsHandler(t)) httpClient, teardown := testingHTTPClient(handler) defer teardown() restoreHTTPClient := setHTTPClient(httpClient) defer restoreHTTPClient() jwksFetch := NewJWKsFetch() jwksCache := NewJWKsCache(jwksFetch, cachePeriod) token := createToken() logger, hook := logrustest.NewNullLogger() ctx := log.ContextWithLogger(context.TODO(), logrus.NewEntry(logger)) // WHEN key, err := jwksCache.GetKey(ctx, token) // THEN require.NoError(t, err) require.Equal(t, 1, len(jwksCache.cache)) require.NotNil(t, key) require.Equal(t, 1, len(hook.Entries)) require.Equal(t, "Adding key 67bf0153-a6dc-4f06-9ce4-2f203b79adc8 to cache", hook.LastEntry().Message) }) t.Run("should fetch, cache and return valid key, second call should return from cache", func(t *testing.T) { // GIVEN handler := http.HandlerFunc(mockValidJWKsHandler(t)) httpClient, teardown := testingHTTPClient(handler) defer teardown() restoreHTTPClient := setHTTPClient(httpClient) defer restoreHTTPClient() jwksFetch := NewJWKsFetch() jwksCache := NewJWKsCache(jwksFetch, cachePeriod) token := createToken() logger, hook := logrustest.NewNullLogger() ctx := log.ContextWithLogger(context.TODO(), logrus.NewEntry(logger)) // WHEN _, err := jwksCache.GetKey(ctx, token) require.NoError(t, err) key, err := jwksCache.GetKey(ctx, token) // THEN require.NoError(t, err) require.Equal(t, 1, len(jwksCache.cache)) require.NotNil(t, key) require.Equal(t, 2, len(hook.Entries)) require.Equal(t, "Adding key 67bf0153-a6dc-4f06-9ce4-2f203b79adc8 to cache", hook.Entries[0].Message) require.Equal(t, "Using key 67bf0153-a6dc-4f06-9ce4-2f203b79adc8 from cache", hook.Entries[1].Message) }) t.Run("should return error when token is nil", func(t *testing.T) { // GIVEN jwksFetch := NewJWKsFetch() jwksCache := NewJWKsCache(jwksFetch, cachePeriod) // WHEN _, err := jwksCache.GetKey(context.TODO(), nil) // THEN require.EqualError(t, err, apperrors.NewUnauthorizedError("token cannot be nil").Error()) }) t.Run("should return error when unable to get token key ID", func(t *testing.T) { // GIVEN token := &jwt.Token{} jwksFetch := NewJWKsFetch() jwksCache := NewJWKsCache(jwksFetch, cachePeriod) // WHEN _, err := jwksCache.GetKey(context.TODO(), token) // THEN require.EqualError(t, err, "while getting the key ID: Internal Server Error: unable to find the key ID in the token") }) t.Run("should return error when unable to get key from remote server", func(t *testing.T) { // GIVEN handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(404) }) httpClient, teardown := testingHTTPClient(handler) defer teardown() restoreHTTPClient := setHTTPClient(httpClient) defer restoreHTTPClient() jwksFetch := NewJWKsFetch() jwksCache := NewJWKsCache(jwksFetch, cachePeriod) token := createToken() // WHEN _, err := jwksCache.GetKey(context.TODO(), token) // THEN require.EqualError(t, err, "while getting the key with ID [kid=67bf0153-a6dc-4f06-9ce4-2f203b79adc8]: while getting the JWKs URI: while decoding the configuration discovery response: EOF") }) }
explode_data.jsonl/29081
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1365 }
[ 2830, 3393, 41, 68316, 82, 8233, 13614, 1592, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 5445, 7807, 11, 6500, 323, 470, 2697, 1376, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 197, 322, 89836, 198, 197, 53326, 1669, 1758, 89...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetData(t *testing.T) { pver := ProtocolVersion // Ensure the command is expected value. wantCmd := "getdata" msg := NewMsgGetData() if cmd := msg.Command(); cmd != wantCmd { t.Errorf("NewMsgGetData: wrong command - got %v want %v", cmd, wantCmd) } // Ensure max payload is expected value for latest protocol version. // Num inventory vectors (varInt) + max allowed inventory vectors. wantPayload := uint32(1800009) maxPayload := msg.MaxPayloadLength(pver) if maxPayload != wantPayload { t.Errorf("MaxPayloadLength: wrong max payload length for "+ "protocol version %d - got %v, want %v", pver, maxPayload, wantPayload) } // Ensure inventory vectors are added properly. hash := chainhash.Hash{} iv := NewInvVect(InvTypeBlock, &hash) err := msg.AddInvVect(iv) if err != nil { t.Errorf("AddInvVect: %v", err) } if msg.InvList[0] != iv { t.Errorf("AddInvVect: wrong invvect added - got %v, want %v", spew.Sprint(msg.InvList[0]), spew.Sprint(iv)) } // Ensure adding more than the max allowed inventory vectors per // message returns an error. for i := 0; i < MaxInvPerMsg; i++ { err = msg.AddInvVect(iv) } if err == nil { t.Errorf("AddInvVect: expected error on too many inventory " + "vectors not received") } // Ensure creating the message with a size hint larger than the max // works as expected. msg = NewMsgGetDataSizeHint(MaxInvPerMsg + 1) wantCap := MaxInvPerMsg if cap(msg.InvList) != wantCap { t.Errorf("NewMsgGetDataSizeHint: wrong cap for size hint - "+ "got %v, want %v", cap(msg.InvList), wantCap) } }
explode_data.jsonl/76187
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 577 }
[ 2830, 3393, 68957, 1155, 353, 8840, 836, 8, 341, 3223, 423, 1669, 24572, 5637, 271, 197, 322, 29279, 279, 3210, 374, 3601, 897, 624, 50780, 15613, 1669, 330, 455, 691, 698, 21169, 1669, 1532, 6611, 68957, 741, 743, 5439, 1669, 3750, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestPacketJSON(t *testing.T) { packet := &Packet{ Project: "1", EventID: "2", Platform: "linux", Culprit: "caused_by", ServerName: "host1", Release: "721e41770371db95eee98ca2707686226b993eda", Environment: "production", Message: "test", Timestamp: Timestamp(time.Date(2000, 01, 01, 0, 0, 0, 0, time.UTC)), Level: ERROR, Logger: "com.getsentry.raven-go.logger-test-packet-json", Tags: []Tag{Tag{"foo", "bar"}}, Modules: map[string]string{"foo": "bar"}, Fingerprint: []string{"{{ default }}", "a-custom-fingerprint"}, Interfaces: []Interface{&Message{Message: "foo"}}, } packet.AddTags(map[string]string{"foo": "foo"}) packet.AddTags(map[string]string{"baz": "buzz"}) expected := `{"message":"test","event_id":"2","project":"1","timestamp":"2000-01-01T00:00:00.00","level":"error","logger":"com.getsentry.raven-go.logger-test-packet-json","platform":"linux","culprit":"caused_by","server_name":"host1","release":"721e41770371db95eee98ca2707686226b993eda","environment":"production","tags":[["foo","bar"],["foo","foo"],["baz","buzz"]],"modules":{"foo":"bar"},"fingerprint":["{{ default }}","a-custom-fingerprint"],"logentry":{"message":"foo"}}` j, err := packet.JSON() if err != nil { t.Fatalf("JSON marshalling should not fail: %v", err) } actual := string(j) if actual != expected { t.Errorf("incorrect json; got %s, want %s", actual, expected) } }
explode_data.jsonl/29779
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 587 }
[ 2830, 3393, 16679, 5370, 1155, 353, 8840, 836, 8, 341, 68802, 1669, 609, 16679, 515, 197, 197, 7849, 25, 257, 330, 16, 756, 197, 56055, 915, 25, 257, 330, 17, 756, 197, 197, 17296, 25, 262, 330, 14210, 756, 197, 6258, 360, 39707, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTable(t *testing.T) { dir := testutils.InitTestEnv(ModuleName, t) c, mgr, driver := initTestContext(t, dir) defer driver.Close() defer c.Close() defer mgr.Stop() schema := catalog.MockSchemaAll(3) schema.BlockMaxRows = 10000 schema.SegmentMaxBlocks = 10 schema.PrimaryKey = 2 { txn := mgr.StartTxn(nil) db, _ := txn.CreateDatabase("db") rel, _ := db.CreateRelation(schema) bat := compute.MockBatch(schema.Types(), common.K*100, int(schema.PrimaryKey), nil) bats := compute.SplitBatch(bat, 100) for _, data := range bats { err := rel.Append(data) assert.Nil(t, err) } tbl, _ := txn.GetStore().(*txnStore).getOrSetTable(rel.ID()) tbl.RangeDeleteLocalRows(1024+20, 1024+30) tbl.RangeDeleteLocalRows(1024*2+38, 1024*2+40) assert.True(t, tbl.IsLocalDeleted(1024+20)) assert.True(t, tbl.IsLocalDeleted(1024+30)) assert.False(t, tbl.IsLocalDeleted(1024+19)) assert.False(t, tbl.IsLocalDeleted(1024+31)) err := txn.Commit() assert.Nil(t, err) } }
explode_data.jsonl/14639
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 438 }
[ 2830, 3393, 2556, 1155, 353, 8840, 836, 8, 341, 48532, 1669, 1273, 6031, 26849, 2271, 14359, 75295, 675, 11, 259, 340, 1444, 11, 57897, 11, 5579, 1669, 2930, 2271, 1972, 1155, 11, 5419, 340, 16867, 5579, 10421, 741, 16867, 272, 10421, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCreateSnapshot(t *testing.T) { fctesting.RequiresRoot(t) cases := []struct { name string createSnapshot func(m *Machine, ctx context.Context, memPath, snapPath string) }{ { name: "CreateSnapshot", createSnapshot: func(m *Machine, ctx context.Context, memPath, snapPath string) { err := m.PauseVM(ctx) require.NoError(t, err) err = m.CreateSnapshot(ctx, memPath, snapPath) require.NoError(t, err) }, }, { name: "CreateSnapshot before pause", createSnapshot: func(m *Machine, ctx context.Context, memPath, snapPath string) { err := m.CreateSnapshot(ctx, memPath, snapPath) require.Error(t, err) }, }, } for _, c := range cases { t.Run(c.name, func(t *testing.T) { ctx := context.Background() socketPath := filepath.Join(testDataPath, fsSafeTestName.Replace(t.Name())) snapPath := socketPath + "SnapFile" memPath := socketPath + "MemFile" defer os.Remove(socketPath) defer os.Remove(snapPath) defer os.Remove(memPath) // Tee logs for validation: var logBuffer bytes.Buffer machineLogger := logrus.New() machineLogger.Out = io.MultiWriter(os.Stderr, &logBuffer) cfg := createValidConfig(t, socketPath) m, err := NewMachine(ctx, cfg, func(m *Machine) { // Rewriting m.cmd partially wouldn't work since Cmd has // some unexported members args := m.cmd.Args[1:] m.cmd = exec.Command(getFirecrackerBinaryPath(), args...) }, WithLogger(logrus.NewEntry(machineLogger))) require.NoError(t, err) err = m.Start(ctx) require.NoError(t, err) c.createSnapshot(m, ctx, memPath, snapPath) err = m.StopVMM() require.NoError(t, err) }) } }
explode_data.jsonl/70450
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 692 }
[ 2830, 3393, 4021, 15009, 1155, 353, 8840, 836, 8, 341, 1166, 302, 59855, 85012, 8439, 1155, 692, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 1843, 914, 198, 197, 39263, 15009, 2915, 1255, 353, 21605, 11, 5635, 2266, 9328, 11, 1833, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeleteRBACsForNamespace(t *testing.T) { a := makeTestArgoCD() testClient := testclient.NewSimpleClientset() testNameSpace := "testNameSpace" role := newRole("xyz", policyRuleForApplicationController(), a) role.Namespace = testNameSpace // create role with label _, err := testClient.RbacV1().Roles(testNameSpace).Create(context.TODO(), role, metav1.CreateOptions{}) assert.NilError(t, err) role2 := newRole("abc", policyRuleForApplicationController(), a) role2.Namespace = testNameSpace role2.Labels = map[string]string{} // create role without label _, err = testClient.RbacV1().Roles(testNameSpace).Create(context.TODO(), role2, metav1.CreateOptions{}) assert.NilError(t, err) roleBinding := newRoleBindingWithname("xyz", a) roleBinding.Namespace = testNameSpace // create roleBinding with label _, err = testClient.RbacV1().RoleBindings(testNameSpace).Create(context.TODO(), roleBinding, metav1.CreateOptions{}) assert.NilError(t, err) roleBinding2 := newRoleBindingWithname("abc", a) roleBinding2.Namespace = testNameSpace roleBinding2.Labels = map[string]string{} // create RoleBinding without label _, err = testClient.RbacV1().RoleBindings(testNameSpace).Create(context.TODO(), roleBinding2, metav1.CreateOptions{}) assert.NilError(t, err) secret := argoutil.NewSecretWithSuffix(a.ObjectMeta, "xyz") secret.Labels = map[string]string{common.ArgoCDSecretTypeLabel: "cluster"} secret.Data = map[string][]byte{ "server": []byte(common.ArgoCDDefaultServer), "namespaces": []byte(strings.Join([]string{testNameSpace, "testNamespace2"}, ",")), } // create secret with the label _, err = testClient.CoreV1().Secrets(a.Namespace).Create(context.TODO(), secret, metav1.CreateOptions{}) assert.NilError(t, err) // run deleteRBACsForNamespace assert.NilError(t, deleteRBACsForNamespace(a.Namespace, testNameSpace, testClient)) // role with the label should be deleted _, err = testClient.RbacV1().Roles(testNameSpace).Get(context.TODO(), role.Name, metav1.GetOptions{}) assert.ErrorContains(t, err, "not found") // role without the label should still exists, no error _, err = testClient.RbacV1().Roles(testNameSpace).Get(context.TODO(), role2.Name, metav1.GetOptions{}) assert.NilError(t, err) // roleBinding with the label should be deleted _, err = testClient.RbacV1().Roles(testNameSpace).Get(context.TODO(), roleBinding.Name, metav1.GetOptions{}) assert.ErrorContains(t, err, "not found") // roleBinding without the label should still exists, no error _, err = testClient.RbacV1().Roles(testNameSpace).Get(context.TODO(), roleBinding2.Name, metav1.GetOptions{}) assert.NilError(t, err) // secret should still exists with updated list of namespaces s, err := testClient.CoreV1().Secrets(a.Namespace).Get(context.TODO(), secret.Name, metav1.GetOptions{}) assert.NilError(t, err) assert.DeepEqual(t, string(s.Data["namespaces"]), "testNamespace2") }
explode_data.jsonl/11937
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1043 }
[ 2830, 3393, 6435, 29259, 1706, 82, 2461, 22699, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 1281, 2271, 2735, 78, 6484, 741, 18185, 2959, 1669, 1273, 2972, 7121, 16374, 2959, 746, 741, 18185, 675, 9914, 1669, 330, 1944, 675, 9914, 1837, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConvertStruct(t *testing.T) { lines, err := convertTypes( "Foo", "Bar", `struct NestedFoo { 1: required string one 2: optional string two } struct NestedBar { 1: required string one 2: optional string two } struct Foo { 3: optional NestedFoo three 4: required NestedFoo four } struct Bar { 3: optional NestedBar three 4: required NestedBar four }`, nil, nil, ) assert.NoError(t, err) assertPrettyEqual(t, trim(` if in.Three != nil { out.Three = &structs.NestedBar{} out.Three.One = string(in.Three.One) out.Three.Two = (*string)(in.Three.Two) } else { out.Three = nil } if in.Four != nil { out.Four = &structs.NestedBar{} out.Four.One = string(in.Four.One) out.Four.Two = (*string)(in.Four.Two) } else { out.Four = nil } `), lines) }
explode_data.jsonl/62034
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 371 }
[ 2830, 3393, 12012, 9422, 1155, 353, 8840, 836, 8, 341, 78390, 11, 1848, 1669, 5508, 4173, 1006, 197, 197, 1, 40923, 497, 330, 3428, 756, 197, 197, 63, 1235, 71742, 40923, 341, 298, 197, 16, 25, 2567, 914, 825, 198, 298, 197, 17, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHttpParser_RedactAuthorization(t *testing.T) { logp.TestingSetup(logp.WithSelectors("http", "httpdetailed")) http := httpModForTests(nil) http.redactAuthorization = true http.parserConfig.sendHeaders = true http.parserConfig.sendAllHeaders = true data := []byte("POST /services/ObjectControl?ID=client0 HTTP/1.1\r\n" + "User-Agent: Mozilla/4.0 (compatible; MSIE 6.0; MS Web Services Client Protocol 2.0.50727.5472)\r\n" + "Content-Type: text/xml; charset=utf-8\r\n" + "SOAPAction: \"\"\r\n" + "Authorization: Basic ZHVtbXk6NmQlc1AwOC1XemZ3Cg\r\n" + "Proxy-Authorization: Basic cHJveHk6MWM3MGRjM2JhZDIwCg==\r\n" + "Host: production.example.com\r\n" + "Content-Length: 0\r\n" + "Expect: 100-continue\r\n" + "Accept-Encoding: gzip\r\n" + "X-Forwarded-For: 10.216.89.132\r\n" + "\r\n") st := &stream{data: data, message: new(message)} ok, _ := testParseStream(http, st, 0) st.message.raw = st.data[st.message.start:] http.hideHeaders(st.message) msg := st.message.raw assert.True(t, ok) assert.Equal(t, "*", string(st.message.headers["authorization"])) authPattern, _ := regexp.Compile(`(?m)^[Aa]uthorization:\*+`) authObscured := authPattern.Match(msg) assert.True(t, authObscured) assert.Equal(t, "*", string(st.message.headers["proxy-authorization"])) proxyPattern, _ := regexp.Compile(`(?m)^[Pp]roxy-[Aa]uthorization:\*+`) proxyObscured := proxyPattern.Match(msg) assert.True(t, proxyObscured) }
explode_data.jsonl/16510
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 624 }
[ 2830, 3393, 2905, 6570, 92940, 531, 18124, 1155, 353, 8840, 836, 8, 341, 6725, 79, 8787, 287, 21821, 12531, 79, 26124, 96995, 445, 1254, 497, 330, 1254, 67, 10111, 28075, 28080, 1669, 1758, 4459, 2461, 18200, 27907, 340, 28080, 17335, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFetch_localDir(t *testing.T) { wd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := FetchNative(wd); err != nil { t.Fatalf("fetchEngine failed: %s", err) } }
explode_data.jsonl/15513
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 84 }
[ 2830, 3393, 20714, 13564, 6184, 1155, 353, 8840, 836, 8, 341, 197, 6377, 11, 1848, 1669, 2643, 2234, 6377, 741, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 630, 743, 1848, 1669, 22104, 20800, 3622, 67, 1215, 1848, 961...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
3
func Test_Hoverfly_SetMiddleware_CanSetBinaryAndScript(t *testing.T) { RegisterTestingT(t) unit := NewHoverflyWithConfiguration(&Configuration{}) err := unit.SetMiddleware("python", pythonMiddlewareBasic, "") Expect(err).To(BeNil()) Expect(unit.Cfg.Middleware.Binary).To(Equal("python")) script, err := unit.Cfg.Middleware.GetScript() Expect(script).To(Equal(pythonMiddlewareBasic)) Expect(err).To(BeNil()) }
explode_data.jsonl/45385
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 151 }
[ 2830, 3393, 2039, 1975, 21642, 14812, 24684, 920, 276, 1649, 21338, 3036, 5910, 1155, 353, 8840, 836, 8, 341, 79096, 16451, 51, 1155, 692, 81189, 1669, 1532, 34379, 21642, 2354, 7688, 2099, 7688, 6257, 692, 9859, 1669, 4982, 4202, 24684, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContextRenderPureJSON(t *testing.T) { w := httptest.NewRecorder() c, _ := CreateTestContext(w) c.PureJSON(http.StatusCreated, H{"foo": "bar", "html": "<b>"}) assert.Equal(t, http.StatusCreated, w.Code) assert.Equal(t, "{\"foo\":\"bar\",\"html\":\"<b>\"}\n", w.Body.String()) assert.Equal(t, "application/json; charset=utf-8", w.Header().Get("Content-Type")) }
explode_data.jsonl/26780
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 149 }
[ 2830, 3393, 1972, 6750, 57613, 5370, 1155, 353, 8840, 836, 8, 341, 6692, 1669, 54320, 70334, 7121, 47023, 741, 1444, 11, 716, 1669, 4230, 2271, 1972, 3622, 340, 1444, 1069, 552, 5370, 19886, 10538, 11694, 11, 472, 4913, 7975, 788, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Unexported(t *testing.T) { c1 := NewLidi(Settings{}) type A struct{} type B struct { a *A `lidi:"inject()"` } if err := c1.Provide(&A{}); err != nil { t.Fatal(err) } if err := c1.Provide(&B{}); err != nil { if err.Error() != fmt.Sprintf("lidi: cannot inject service in unexported field '%s'", reflect.TypeOf(&A{}).String()) { t.Fatal("Not Equal") } } else { t.Fatal(err) } }
explode_data.jsonl/40206
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 183 }
[ 2830, 3393, 40687, 1533, 291, 1155, 353, 8840, 836, 8, 341, 1444, 16, 1669, 1532, 43, 12278, 57395, 6257, 692, 13158, 362, 2036, 16094, 13158, 425, 2036, 341, 197, 11323, 353, 32, 1565, 75, 12278, 2974, 32133, 368, 8805, 197, 532, 743...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDetach(t *testing.T) { th.SetupHTTP() defer th.TeardownHTTP() MockDetachResponse(t) err := volumeactions.Detach(client.ServiceClient(), "cd281d77-8217-4830-be95-9528227c105c", &volumeactions.DetachOpts{}).ExtractErr() th.AssertNoErr(t, err) }
explode_data.jsonl/20626
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 109 }
[ 2830, 3393, 89306, 1155, 353, 8840, 836, 8, 341, 70479, 39820, 9230, 741, 16867, 270, 94849, 37496, 9230, 2822, 9209, 1176, 89306, 2582, 1155, 692, 9859, 1669, 8123, 4020, 909, 295, 610, 12805, 13860, 2959, 1507, 330, 4385, 17, 23, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPipeDotMixing(t *testing.T) { jsonStr := `{ "info": { "friends": [ {"first": "Dale", "last": "Murphy", "age": 44}, {"first": "Roger", "last": "Craig", "age": 68}, {"first": "Jane", "last": "Murphy", "age": 47} ] } }` var res string res = Get(jsonStr, `info.friends.#[first="Dale"].last`).String() if res != "Murphy" { t.Fatalf("expected '%v', got '%v'", "Murphy", res) } res = Get(jsonStr, `info|friends.#[first="Dale"].last`).String() if res != "Murphy" { t.Fatalf("expected '%v', got '%v'", "Murphy", res) } res = Get(jsonStr, `info|friends.#[first="Dale"]|last`).String() if res != "Murphy" { t.Fatalf("expected '%v', got '%v'", "Murphy", res) } res = Get(jsonStr, `info|friends|#[first="Dale"]|last`).String() if res != "Murphy" { t.Fatalf("expected '%v', got '%v'", "Murphy", res) } res = Get(jsonStr, `@ugly|info|friends|#[first="Dale"]|last`).String() if res != "Murphy" { t.Fatalf("expected '%v', got '%v'", "Murphy", res) } res = Get(jsonStr, `@ugly|info.@ugly|friends|#[first="Dale"]|last`).String() if res != "Murphy" { t.Fatalf("expected '%v', got '%v'", "Murphy", res) } res = Get(jsonStr, `@ugly.info|@ugly.friends|#[first="Dale"]|last`).String() if res != "Murphy" { t.Fatalf("expected '%v', got '%v'", "Murphy", res) } }
explode_data.jsonl/43462
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 600 }
[ 2830, 3393, 34077, 34207, 58083, 287, 1155, 353, 8840, 836, 8, 341, 30847, 2580, 1669, 1565, 515, 197, 197, 1, 2733, 788, 341, 298, 197, 1, 29462, 788, 2278, 571, 197, 4913, 3896, 788, 330, 35, 1574, 497, 330, 4259, 788, 330, 59878,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestJson2Struct_bool(t *testing.T) { ret, err := Json2Struct("auto_name", "true", GoStructType) fmt.Printf("ret:\n %s, %v\n", ret, err) }
explode_data.jsonl/72748
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 63 }
[ 2830, 3393, 5014, 17, 9422, 22159, 1155, 353, 8840, 836, 8, 341, 11262, 11, 1848, 1669, 8308, 17, 9422, 445, 3902, 1269, 497, 330, 1866, 497, 5994, 9422, 929, 340, 11009, 19367, 445, 2122, 7190, 77, 1018, 82, 11, 1018, 85, 1699, 497...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestVerifyRequestForMethod(t *testing.T) { req := GivenRequest("GET") plugin := New() revision, _, _, proceed, err := plugin.Extract(buildConfig, "secret100", "", req) if err == nil || !strings.Contains(err.Error(), "unsupported HTTP method") { t.Errorf("Expected unsupported HTTP method, got %v", err) } if proceed { t.Error("Expected 'proceed' return value to be 'false'") } if revision != nil { t.Error("Expected the 'revision' return value to be nil") } }
explode_data.jsonl/26507
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 167 }
[ 2830, 3393, 32627, 1900, 2461, 3523, 1155, 353, 8840, 836, 8, 341, 24395, 1669, 16246, 1900, 445, 3806, 1138, 197, 9138, 1669, 1532, 741, 197, 28342, 11, 8358, 8358, 10354, 11, 1848, 1669, 9006, 5121, 2144, 43333, 2648, 11, 330, 20474, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestLocateKeyFromArrayBounds(t *testing.T) { tree, err := simpleyaml.NewYaml([]byte(data)) key := "spec.containers[0].ports[1].name" if err != nil { t.Fatalf("Could not parse YAML: %s", err.Error()) } tree, err = locate(key, tree) if err != nil { t.Errorf("Could not parse key: %s", err.Error()) } if tree == nil { t.Errorf("Error: could not find key '%s'", key) } }
explode_data.jsonl/80707
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 161 }
[ 2830, 3393, 9152, 349, 1592, 74679, 11394, 1155, 353, 8840, 836, 8, 341, 51968, 11, 1848, 1669, 4285, 41466, 7121, 56, 9467, 10556, 3782, 2592, 4390, 23634, 1669, 330, 9535, 5345, 20568, 58, 15, 936, 3394, 58, 16, 936, 606, 1837, 743,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestSliceIndexAccess(t *testing.T) { v.SetConfigType("yaml") r := strings.NewReader(string(yamlDeepNestedSlices)) err := v.unmarshalReader(r, v.config) require.NoError(t, err) assert.Equal(t, "The expanse", v.GetString("tv.0.title")) assert.Equal(t, "February 1, 2017", v.GetString("tv.0.seasons.1.first_released")) assert.Equal(t, "Static", v.GetString("tv.0.seasons.1.episodes.2.title")) assert.Equal(t, "December 15, 2015", v.GetString("tv.0.seasons.0.episodes.1.air_date")) // Test for index out of bounds assert.Equal(t, "", v.GetString("tv.0.seasons.2.first_released")) // Accessing multidimensional arrays assert.Equal(t, "Static", v.GetString("tv.0.episodes.1.2")) }
explode_data.jsonl/9928
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 280 }
[ 2830, 3393, 33236, 1552, 6054, 1155, 353, 8840, 836, 8, 341, 5195, 4202, 2648, 929, 445, 41466, 1138, 7000, 1669, 9069, 68587, 3609, 7021, 9467, 33464, 71986, 50, 37414, 4390, 9859, 1669, 348, 6307, 27121, 5062, 2601, 11, 348, 5423, 340...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMatBitwiseOperations(t *testing.T) { mat1 := NewMatWithSize(101, 102, MatTypeCV8U) mat2 := NewMatWithSize(101, 102, MatTypeCV8U) mat3 := NewMat() BitwiseAnd(mat1, mat2, &mat3) if mat3.Empty() { t.Error("TestMatBitwiseAnd dest mat3 should not be empty.") } BitwiseOr(mat1, mat2, &mat3) if mat3.Empty() { t.Error("TestMatBitwiseOr dest mat3 should not be empty.") } BitwiseXor(mat1, mat2, &mat3) if mat3.Empty() { t.Error("TestMatBitwiseXor dest mat3 should not be empty.") } BitwiseNot(mat1, &mat3) if mat3.Empty() { t.Error("TestMatBitwiseNot dest mat3 should not be empty.") } }
explode_data.jsonl/81707
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 257 }
[ 2830, 3393, 11575, 8344, 4482, 35120, 1155, 353, 8840, 836, 8, 341, 59874, 16, 1669, 1532, 11575, 2354, 1695, 7, 16, 15, 16, 11, 220, 16, 15, 17, 11, 6867, 929, 19589, 23, 52, 340, 59874, 17, 1669, 1532, 11575, 2354, 1695, 7, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestLegacyReplicaCalcEmptyCPURequest(t *testing.T) { tc := legacyReplicaCalcTestCase{ currentReplicas: 1, expectedError: fmt.Errorf("missing request for"), resource: &resourceInfo{ name: v1.ResourceCPU, requests: []resource.Quantity{}, levels: makePodMetricLevels(200), targetUtilization: 100, }, } tc.runTest(t) }
explode_data.jsonl/26691
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 141 }
[ 2830, 3393, 77415, 18327, 15317, 47168, 3522, 31615, 1900, 1155, 353, 8840, 836, 8, 341, 78255, 1669, 19588, 18327, 15317, 47168, 16458, 515, 197, 20121, 18327, 52210, 25, 220, 16, 345, 197, 42400, 1454, 25, 256, 8879, 13080, 445, 30616, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDownloadWithoutFlags(t *testing.T) { v := viper.New() v.Set("token", "abc123") v.Set("workspace", "/home/username") v.Set("apibaseurl", "http://example.com") cfg := config.Config{ UserViperConfig: v, } flags := pflag.NewFlagSet("fake", pflag.PanicOnError) setupDownloadFlags(flags) err := runDownload(cfg, flags, []string{}) if assert.Error(t, err) { assert.Regexp(t, "need an --exercise name or a solution --uuid", err.Error()) } }
explode_data.jsonl/15068
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 177 }
[ 2830, 3393, 11377, 26040, 9195, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 95132, 7121, 741, 5195, 4202, 445, 5839, 497, 330, 13683, 16, 17, 18, 1138, 5195, 4202, 445, 42909, 497, 3521, 5117, 14, 5113, 1138, 5195, 4202, 445, 391, 98746...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDialectIdentityWriting(t *testing.T) { for _, td := range []testDialect{ {lex.Token{V: "name"}, "name"}, {lex.Token{Quote: '`', V: "has.period"}, "`has.period`"}, {lex.Token{Quote: '`', V: "has`.`period"}, "has.period"}, {lex.Token{V: "has space"}, "`has space`"}, } { dw := NewDefaultWriter() in := NewIdentityNode(&td.t) in.WriteDialect(dw) assert.Equal(t, td.expect, dw.String()) } for _, td := range []testDialect{ {lex.Token{V: "name"}, "name"}, {lex.Token{Quote: '`', V: "has.period"}, "'has.period'"}, {lex.Token{V: "has space"}, "'has space'"}, } { dw := NewDialectWriter('"', '\'') in := NewIdentityNode(&td.t) in.WriteDialect(dw) assert.Equal(t, td.expect, dw.String()) } for _, td := range []testDialect{ {lex.Token{V: "name"}, "name"}, {lex.Token{Quote: '`', V: "has.period"}, "[has.period]"}, {lex.Token{V: "has space"}, "[has space]"}, } { dw := NewDialectWriter('"', '[') in := NewIdentityNode(&td.t) in.WriteDialect(dw) assert.Equal(t, td.expect, dw.String()) } // strip Namespaces for _, td := range []testDialect{ {lex.Token{V: "name"}, "name"}, {lex.Token{Quote: '`', V: "table_name`.`fieldname"}, "fieldname"}, {lex.Token{V: "has space"}, "`has space`"}, } { dw := NewDefaultNoNamspaceWriter() in := NewIdentityNode(&td.t) in.WriteDialect(dw) assert.Equal(t, td.expect, dw.String()) } }
explode_data.jsonl/77115
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 614 }
[ 2830, 3393, 35, 55056, 18558, 39313, 1155, 353, 8840, 836, 8, 1476, 2023, 8358, 17941, 1669, 2088, 3056, 1944, 35, 55056, 515, 197, 197, 90, 2571, 32277, 90, 53, 25, 330, 606, 14345, 330, 606, 7115, 197, 197, 90, 2571, 32277, 90, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestVersionedTreeProofs(t *testing.T) { require := require.New(t) tree := NewMutableTree(db.NewMemDB(), 0) tree.Set([]byte("k1"), []byte("v1")) tree.Set([]byte("k2"), []byte("v1")) tree.Set([]byte("k3"), []byte("v1")) tree.SaveVersion() // fmt.Println("TREE VERSION 1") // printNode(tree.ndb, tree.root, 0) // fmt.Println("TREE VERSION 1 END") root1 := tree.Hash() tree.Set([]byte("k2"), []byte("v2")) tree.Set([]byte("k4"), []byte("v2")) tree.SaveVersion() // fmt.Println("TREE VERSION 2") // printNode(tree.ndb, tree.root, 0) // fmt.Println("TREE VERSION END") root2 := tree.Hash() require.NotEqual(root1, root2) tree.Remove([]byte("k2")) tree.SaveVersion() // fmt.Println("TREE VERSION 3") // printNode(tree.ndb, tree.root, 0) // fmt.Println("TREE VERSION END") root3 := tree.Hash() require.NotEqual(root2, root3) val, proof, err := tree.GetVersionedWithProof([]byte("k2"), 1) require.NoError(err) require.EqualValues(val, []byte("v1")) require.NoError(proof.Verify(root1), proof.String()) require.NoError(proof.VerifyItem([]byte("k2"), val)) val, proof, err = tree.GetVersionedWithProof([]byte("k4"), 1) require.NoError(err) require.Nil(val) require.NoError(proof.Verify(root1)) require.NoError(proof.VerifyAbsence([]byte("k4"))) val, proof, err = tree.GetVersionedWithProof([]byte("k2"), 2) require.NoError(err) require.EqualValues(val, []byte("v2")) require.NoError(proof.Verify(root2), proof.String()) require.NoError(proof.VerifyItem([]byte("k2"), val)) val, proof, err = tree.GetVersionedWithProof([]byte("k1"), 2) require.NoError(err) require.EqualValues(val, []byte("v1")) require.NoError(proof.Verify(root2)) require.NoError(proof.VerifyItem([]byte("k1"), val)) val, proof, err = tree.GetVersionedWithProof([]byte("k2"), 3) require.NoError(err) require.Nil(val) require.NoError(proof.Verify(root3)) require.NoError(proof.VerifyAbsence([]byte("k2"))) require.Error(proof.Verify(root1)) require.Error(proof.Verify(root2)) }
explode_data.jsonl/25135
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 805 }
[ 2830, 3393, 5637, 291, 6533, 31076, 82, 1155, 353, 8840, 836, 8, 341, 17957, 1669, 1373, 7121, 1155, 340, 51968, 1669, 1532, 11217, 6533, 9791, 7121, 18816, 3506, 1507, 220, 15, 692, 51968, 4202, 10556, 3782, 445, 74, 16, 3975, 3056, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSHA3FromBytes(t *testing.T) { tests := []struct { name string arg []byte want []byte }{ {"SHA3-256 From Bytes", []byte{}, []byte{167, 255, 198, 248, 191, 30, 215, 102, 81, 193, 71, 86, 160, 97, 214, 98, 245, 128, 255, 77, 228, 59, 73, 250, 130, 216, 10, 75, 128, 248, 67, 74}}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := crypto.SHA3FromBytes(tt.arg); !reflect.DeepEqual(got, tt.want) { t.Errorf("SHA3FromBytes() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/6956
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 237 }
[ 2830, 3393, 33145, 18, 3830, 7078, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 47903, 220, 3056, 3782, 198, 197, 50780, 3056, 3782, 198, 197, 59403, 197, 197, 4913, 33145, 18, 12, 17, 20, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUnmarshalString(t *testing.T) { input := []byte(`{ "S": "Hello"}`) var av DynamoDBAttributeValue err := json.Unmarshal(input, &av) assert.Nil(t, err) assert.Equal(t, DataTypeString, av.DataType()) assert.Equal(t, "Hello", av.String()) }
explode_data.jsonl/61704
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 105 }
[ 2830, 3393, 1806, 27121, 703, 1155, 353, 8840, 836, 8, 341, 22427, 1669, 3056, 3782, 5809, 90, 330, 50, 788, 330, 9707, 1, 5541, 692, 2405, 1822, 71813, 3506, 78554, 198, 9859, 1669, 2951, 38097, 5384, 11, 609, 402, 692, 6948, 59678, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsCertificateWithinExpiry(t *testing.T) { now := time.Now() threeDays := time.Hour * 24 * 3 // Test: Within expiry case (happy case) x509Cert, err := createCertWithTimeRange(now.Add(-threeDays), now.Add(threeDays), "ecdsa") if err != nil { fmt.Printf("Parse ERROR %s \n", err.Error()) t.Fatal(fmt.Sprintf("Parse ERROR %s \n", err.Error())) } err = isCertificateWithinExpiry(x509Cert) require.NoError(t, err) // Test: Expired cert case x509Cert, err = createCertWithTimeRange(now.Add(-3*threeDays), now.Add(-2*threeDays), "ecdsa") if err != nil { fmt.Printf("Parse ERROR %s \n", err.Error()) t.Fatal(fmt.Sprintf("Parse ERROR %s \n", err.Error())) } err = isCertificateWithinExpiry(x509Cert) require.EqualError(t, err, fmt.Sprintf("Cert is invalid")) // Test: Not valid yet case x509Cert, err = createCertWithTimeRange(now.Add(3*threeDays), now.Add(4*threeDays), "ecdsa") if err != nil { fmt.Printf("Parse ERROR %s \n", err.Error()) t.Fatal(fmt.Sprintf("Parse ERROR %s \n", err.Error())) } err = isCertificateWithinExpiry(x509Cert) require.EqualError(t, err, fmt.Sprintf("Cert is invalid")) }
explode_data.jsonl/48209
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 452 }
[ 2830, 3393, 3872, 33202, 41961, 840, 48209, 1155, 353, 8840, 836, 8, 341, 80922, 1669, 882, 13244, 741, 197, 27856, 20557, 1669, 882, 73550, 353, 220, 17, 19, 353, 220, 18, 271, 197, 322, 3393, 25, 24236, 49921, 1142, 320, 56521, 1142...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_skipEvent(t *testing.T) { tests := []struct { config string event string want bool }{ { config: "kind: pipeline\ntrigger: { }", event: "push", want: false, }, { config: "kind: pipeline\ntrigger: { event: [ push ] }", event: "push", want: false, }, { config: "kind: pipeline\ntrigger: { event: [ push ] }", event: "pull_request", want: true, }, } for i, test := range tests { manifest, err := yaml.ParseString(test.config) if err != nil { t.Error(err) } pipeline := manifest.Resources[0].(*yaml.Pipeline) got, want := skipEvent(pipeline, test.event), test.want if got != want { t.Errorf("Want test %d to return %v", i, want) } } }
explode_data.jsonl/14347
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 326 }
[ 2830, 3393, 44830, 1556, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 25873, 914, 198, 197, 28302, 220, 914, 198, 197, 50780, 256, 1807, 198, 197, 59403, 197, 197, 515, 298, 25873, 25, 330, 15314, 25, 15301, 1699, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestPoolReuseDecoder(t *testing.T) { assert := assert.New(t) pool := NewDecoderPool(1) decoder := pool.Borrow("application/msgpack") pool.Release(decoder) anotherDecoder := pool.Borrow("application/msgpack") assert.Equal(anotherDecoder, decoder) }
explode_data.jsonl/35765
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 92 }
[ 2830, 3393, 10551, 38081, 20732, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 85273, 1669, 1532, 20732, 10551, 7, 16, 340, 197, 48110, 1669, 7314, 1785, 7768, 445, 5132, 80765, 4748, 1138, 85273, 58693, 35712, 4316, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestIotaGetTransactions(t *testing.T) { InitTest() Transfers, err := Lin.API.GetTransfers(CreationSeed, api.GetTransfersOptions{Start: 3}) if err != nil { fmt.Println("GetBalances err ", err.Error()) } else { fmt.Println("GetBalances success ") fmt.Printf("%+v \n", Transfers) for k, v := range Transfers { fmt.Println("No.", k) fmt.Println(">>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>") for vk, vv := range v { fmt.Println("vk No.", vk) fmt.Println("Bundle", vv.Bundle) fmt.Println("Address", vv.Address) fmt.Println("Value", vv.Value) fmt.Println("Hash", vv.Hash) fmt.Println("Confirmed", vv.Confirmed) fmt.Println("---------------------------------") } } } //Transactions, err := Lin.API.FindTransactionObjects(api.FindTransactionsQuery{ // Addresses: []trinary.Hash{ // "OLCHRMEFITACDJWFTSIQWPOUWZIPCTEFUPEXTMDELUCAXENBDWIINONPFXHRTIUMVAU99VNERAVVQYYXC", // }, //}) //if err != nil { // fmt.Println("GetBalances err ", err.Error()) //} else { // for k, v := range Transactions{ // fmt.Println("No.", k) // fmt.Println("Address", v.Address) // fmt.Println("Value", v.Value) // fmt.Println("Hash", v.Hash) // fmt.Println("Confirmed", v.Confirmed) // fmt.Println("---------------------------------") // } //} //TxHash := "VTCPXLFHNPABBYHTPICNFKDHHYJZHYGNCXUAJIFRCILHLZQJGSZTKXYPDSFGODQVA9LJPNVZHJFFIFRTC" //TraverseBundle, err := Lin.API.TraverseBundle(TxHash, bundle.Bundle{}) //if err != nil { // fmt.Println("GetBalances err ", err.Error()) //} else { // for k, v := range TraverseBundle{ // fmt.Println("No.", k) // fmt.Println("Address", v.Address) // fmt.Println("Value", v.Value) // fmt.Println("Hash", v.Hash) // fmt.Println("Confirmed", v.Confirmed) // fmt.Println("---------------------------------") // } //} //Hash := "DRQJXFIRMKJFQUUMHLETZMKKVTSJZRQRGSOBTHLAOKQBRVQQMPDAIATDIAGZQVGHHPJKYFASUVVQFX999" //Tx, err := Lin.API.GetTrytes(Hash) //if err != nil { // fmt.Println("GetBalances err ", err.Error()) //} else { // for k, v := range Tx{ // fmt.Println("No.", k,"Tx", v) // } //} //Addr := "VTCPXLFHNPABBYHTPICNFKDHHYJZHYGNCXUAJIFRCILHLZQJGSZTKXYPDSFGODQVA9LJPNVZHJFFIFRTC" //Tx, err := Lin.API.FindTransactions(api.FindTransactionsQuery{ // Addresses: trinary.Hashes{Addr}, //}) //if err != nil { // fmt.Println("FindTransactions err ", err.Error()) //} else { // for k, v := range Tx{ // fmt.Println("No.", k,"Tx", v) // } //} }
explode_data.jsonl/15331
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1142 }
[ 2830, 3393, 40, 6089, 1949, 48761, 1155, 353, 8840, 836, 8, 341, 98762, 2271, 741, 197, 3167, 49793, 11, 1848, 1669, 8564, 24922, 2234, 3167, 49793, 3025, 26453, 41471, 11, 6330, 2234, 3167, 49793, 3798, 90, 3479, 25, 220, 18, 3518, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestStream_hasNBytesToRead(t *testing.T) { t.Run("test", func(t *testing.T) { assert := base.NewAssert(t) stream := NewStream() stream.SetWritePos(3 * streamBlockSize) for i := streamPosBody; i < 2*streamBlockSize; i++ { assert(stream.SetReadPos(i)).IsTrue() for n := 0; n < 3*streamBlockSize; n++ { assert(stream.hasNBytesToRead(n)). Equals(i+n <= 3*streamBlockSize) } } stream.Release() }) }
explode_data.jsonl/21193
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 185 }
[ 2830, 3393, 3027, 21778, 45, 7078, 1249, 4418, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 1944, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 6948, 1669, 2331, 7121, 8534, 1155, 340, 197, 44440, 1669, 1532, 3027, 741, 197, 44440,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSerializeTransactionsSameRow_RequestCanceled(t *testing.T) { // This test is similar to TestSerializeTransactionsSameRow, but tests only // that a queued request unblocks itself when its context is done. // // tx1 and tx2 run against the same row. // tx2 is blocked on tx1. Eventually, tx2 is canceled and its request fails. // Only after that tx1 commits and finishes. config := tabletenv.NewDefaultConfig() config.HotRowProtection.Mode = tabletenv.Enable config.HotRowProtection.MaxConcurrency = 1 db, tsv := setupTabletServerTestCustom(t, config, "") defer tsv.StopService() defer db.Close() target := querypb.Target{TabletType: topodatapb.TabletType_PRIMARY} countStart := tsv.stats.WaitTimings.Counts()["TabletServerTest.TxSerializer"] // Fake data. q1 := "update test_table set name_string = 'tx1' where pk = :pk and `name` = :name" q2 := "update test_table set name_string = 'tx2' where pk = :pk and `name` = :name" q3 := "update test_table set name_string = 'tx3' where pk = :pk and `name` = :name" // Every request needs their own bind variables to avoid data races. bvTx1 := map[string]*querypb.BindVariable{ "pk": sqltypes.Int64BindVariable(1), "name": sqltypes.Int64BindVariable(1), } bvTx2 := map[string]*querypb.BindVariable{ "pk": sqltypes.Int64BindVariable(1), "name": sqltypes.Int64BindVariable(1), } bvTx3 := map[string]*querypb.BindVariable{ "pk": sqltypes.Int64BindVariable(1), "name": sqltypes.Int64BindVariable(1), } // Make sure that tx2 starts only after tx1 is running its Execute(). tx1Started := make(chan struct{}) // Signal when tx2 is done. tx2Done := make(chan struct{}) db.SetBeforeFunc("update test_table set name_string = 'tx1' where pk = 1 and `name` = 1 limit 10001", func() { close(tx1Started) // Keep blocking until tx2 was canceled. <-tx2Done }) // Run the two transactions. wg := sync.WaitGroup{} // tx1. wg.Add(1) go func() { defer wg.Done() _, tx1, _, err := tsv.BeginExecute(ctx, &target, nil, q1, bvTx1, 0, nil) if err != nil { t.Errorf("failed to execute query: %s: %s", q1, err) } if _, err := tsv.Commit(ctx, &target, tx1); err != nil { t.Errorf("call TabletServer.Commit failed: %v", err) } }() // tx2. ctxTx2, cancelTx2 := context.WithCancel(ctx) wg.Add(1) go func() { defer wg.Done() defer close(tx2Done) // Wait until tx1 has started to make the test deterministic. <-tx1Started _, _, _, err := tsv.BeginExecute(ctxTx2, &target, nil, q2, bvTx2, 0, nil) if err == nil || vterrors.Code(err) != vtrpcpb.Code_CANCELED || err.Error() != "context canceled" { t.Errorf("tx2 should have failed because the context was canceled: %v", err) } // No commit necessary because the Begin failed. }() // tx3. wg.Add(1) go func() { defer wg.Done() // Wait until tx1 and tx2 are pending to make the test deterministic. if err := waitForTxSerializationPendingQueries(tsv, "test_table where pk = 1 and `name` = 1", 2); err != nil { t.Error(err) } _, tx3, _, err := tsv.BeginExecute(ctx, &target, nil, q3, bvTx3, 0, nil) if err != nil { t.Errorf("failed to execute query: %s: %s", q3, err) } if _, err := tsv.Commit(ctx, &target, tx3); err != nil { t.Errorf("call TabletServer.Commit failed: %v", err) } }() // Wait until tx1, 2 and 3 are pending. err := waitForTxSerializationPendingQueries(tsv, "test_table where pk = 1 and `name` = 1", 3) require.NoError(t, err) // Now unblock tx2 and cancel it. cancelTx2() wg.Wait() got, ok := tsv.stats.WaitTimings.Counts()["TabletServerTest.TxSerializer"] want := countStart + 2 if got != want { t.Fatalf("tx2 and tx3 should have been serialized: ok? %v got: %v want: %v", ok, got, want) } }
explode_data.jsonl/80007
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1467 }
[ 2830, 3393, 15680, 48761, 19198, 3102, 44024, 63263, 1155, 353, 8840, 836, 8, 341, 197, 322, 1096, 1273, 374, 4428, 311, 3393, 15680, 48761, 19198, 3102, 11, 714, 7032, 1172, 198, 197, 322, 429, 264, 57163, 1681, 650, 21928, 5086, 979, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWorkerPoolStop(t *testing.T) { require := require.New(t) pool := concurrency.NewWorkerPool(5) count := int32(0) for i := 0; i < 5; i++ { pool.Do(func() { time.Sleep(1 * time.Millisecond) atomic.AddInt32(&count, 1) }) } var wg sync.WaitGroup wg.Add(1) pool.Do(func() { defer wg.Done() atomic.AddInt32(&count, 1) pool.Stop() }) wg.Wait() // Some future tasks will be executed after stop is called. for i := 6; i < 100; i++ { pool.Do(func() { time.Sleep(1 * time.Millisecond) atomic.AddInt32(&count, 1) }) } pool.Wait() require.True(count >= 6) }
explode_data.jsonl/418
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 268 }
[ 2830, 3393, 21936, 10551, 10674, 1155, 353, 8840, 836, 8, 341, 17957, 1669, 1373, 7121, 1155, 340, 85273, 1669, 78010, 7121, 21936, 10551, 7, 20, 340, 18032, 1669, 526, 18, 17, 7, 15, 340, 2023, 600, 1669, 220, 15, 26, 600, 366, 220...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUtilStringPatternMatch(t *testing.T) { goodJsons := []map[string]interface{}{ {"pattern": "*", "name": "tom", "result": true}, {"pattern": "aa", "name": "tom", "result": false}, {"pattern": "", "name": "aa", "result": false}, {"pattern": "test*", "name": "123testabc", "result": false}, {"pattern": "test*", "name": "testabc", "result": true}, {"pattern": "*test*", "name": "test1", "result": true}, {"pattern": "*test", "name": "123testabc", "result": false}, {"pattern": "*test", "name": "123test", "result": true}, } for _, value := range goodJsons { if stringPatternMatch(value["pattern"].(string), value["name"].(string)) != value["result"].(bool) { t.Fatalf("Failed in %s match %s\n", value["pattern"].(string), value["name"].(string)) } } }
explode_data.jsonl/9660
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 295 }
[ 2830, 3393, 2742, 703, 15760, 8331, 1155, 353, 8840, 836, 8, 341, 3174, 1386, 5014, 82, 1669, 3056, 2186, 14032, 31344, 67066, 197, 197, 4913, 14339, 788, 79936, 330, 606, 788, 330, 37401, 497, 330, 1382, 788, 830, 1583, 197, 197, 491...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAcquireBuildLock_expired(t *testing.T) { // acquire a lock when the previous lock has expired client := buildLock_Client(t) buildLock_Lock(t, client, "my-namespace", "my-owner", "my-repository", "my-branch", "42", 42, time.Duration(-1)*time.Minute) pod := buildLock_Pod(t, client, "my-owner", "my-repository", "my-branch", "13") clean, channel := buildLock_AcquireFromPod(t, client, "my-namespace", pod, false) defer clean() callback := <-channel require.NotNil(t, callback, "timeout") buildLock_AssertLockFromPod(t, client, "my-namespace", pod) callback() buildLock_AssertNoLock(t, client, "my-namespace") }
explode_data.jsonl/28226
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 229 }
[ 2830, 3393, 11654, 984, 11066, 11989, 80221, 1155, 353, 8840, 836, 8, 341, 197, 322, 21256, 264, 5296, 979, 279, 3681, 5296, 702, 26391, 198, 25291, 1669, 1936, 11989, 46102, 1155, 340, 69371, 11989, 2351, 1176, 1155, 11, 2943, 11, 330,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddImageEmpty(t *testing.T) { t.Parallel() // arrange p := podcast.New("title", "link", "description", nil, nil) // act p.AddImage("") // assert assert.Nil(t, p.Image) assert.Nil(t, p.IImage) }
explode_data.jsonl/73072
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 89 }
[ 2830, 3393, 2212, 1906, 3522, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 322, 30893, 198, 3223, 1669, 17711, 7121, 445, 2102, 497, 330, 2080, 497, 330, 4684, 497, 2092, 11, 2092, 692, 197, 322, 1160, 198, 3223, 1904, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMergeSecurityGroupId(t *testing.T) { for _, tt := range securityGroupIdTests { d := Driver{SecurityGroupId: tt.groupId, SecurityGroupIds: tt.groupIds} assert.Equal(t, tt.expected, d.securityGroupIds()) } }
explode_data.jsonl/7423
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 86 }
[ 2830, 3393, 52096, 15352, 48410, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 4763, 48410, 18200, 341, 197, 2698, 1669, 14577, 90, 15352, 48410, 25, 17853, 6175, 764, 11, 8234, 2808, 12701, 25, 17853, 6175, 12701, 532, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestCreateArchive(t *testing.T) { common.SetupConfig("./test") mockConfig := config.Mock() mockConfig.Set("confd_path", "./test/confd") mockConfig.Set("log_file", "./test/logs/agent.log") zipFilePath := getArchivePath() filePath, err := createArchive(zipFilePath, true, SearchPaths{}, "") assert.Nil(t, err) assert.Equal(t, zipFilePath, filePath) if _, err := os.Stat(zipFilePath); os.IsNotExist(err) { assert.Fail(t, "The Zip File was not created") } else { os.Remove(zipFilePath) } }
explode_data.jsonl/18210
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 203 }
[ 2830, 3393, 4021, 42502, 1155, 353, 8840, 836, 8, 341, 83825, 39820, 2648, 13988, 1944, 1138, 77333, 2648, 1669, 2193, 24664, 741, 77333, 2648, 4202, 445, 6135, 67, 2638, 497, 5924, 1944, 59241, 67, 1138, 77333, 2648, 4202, 445, 839, 24...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUnitAuthenticatePasscode(t *testing.T) { var err error sr := &snowflakeRestful{ FuncPostAuth: postAuthCheckPasscode, } sc := getDefaultSnowflakeConn() sc.cfg.Passcode = "987654321" sc.rest = sr _, err = authenticate(sc, []byte{}, []byte{}) if err != nil { t.Fatalf("failed to run. err: %v", err) } sr.FuncPostAuth = postAuthCheckPasscodeInPassword sc.rest = sr sc.cfg.PasscodeInPassword = true _, err = authenticate(sc, []byte{}, []byte{}) if err != nil { t.Fatalf("failed to run. err: %v", err) } }
explode_data.jsonl/54344
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 213 }
[ 2830, 3393, 4562, 99087, 12187, 1851, 1155, 353, 8840, 836, 8, 341, 2405, 1848, 1465, 198, 1903, 81, 1669, 609, 74478, 63456, 12416, 1262, 515, 197, 197, 9626, 4133, 5087, 25, 1736, 5087, 3973, 12187, 1851, 345, 197, 532, 29928, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMultipleSections(t *testing.T) { src := ` s1 -- s2 -- s3 --` expected := ` Doc Section[s1] Section[s2] Section[s3] ` assertParse(t, expected, src) }
explode_data.jsonl/21254
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 79 }
[ 2830, 3393, 32089, 38122, 1155, 353, 8840, 836, 8, 341, 41144, 1669, 22074, 274, 16, 198, 39514, 274, 17, 198, 39514, 274, 18, 198, 1177, 3989, 42400, 1669, 22074, 9550, 198, 197, 9620, 11880, 16, 921, 197, 9620, 11880, 17, 921, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestNotEmpty(t *testing.T) { cases := []struct { args []string expect bool }{ { args: []string{"a", "b", "c"}, expect: true, }, { args: []string{"a", "", "c"}, expect: false, }, { args: []string{"a"}, expect: true, }, { args: []string{""}, expect: false, }, { args: []string{}, expect: true, }, } for _, each := range cases { t.Run(path.Join(each.args...), func(t *testing.T) { assert.Equal(t, each.expect, NotEmpty(each.args...)) }) } }
explode_data.jsonl/76719
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 266 }
[ 2830, 3393, 27416, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 31215, 256, 3056, 917, 198, 197, 24952, 1807, 198, 197, 59403, 197, 197, 515, 298, 31215, 25, 256, 3056, 917, 4913, 64, 497, 330, 65, 497, 330,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDiffGetFail(t *testing.T) { s := newScaffold(t) defer s.reset() err := s.executeCommand("diff", "dev", "--parallel=1") require.NotNil(t, err) a := assert.New(t) a.Contains(err.Error(), "not implemented") }
explode_data.jsonl/72086
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 92 }
[ 2830, 3393, 21751, 1949, 19524, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 501, 50, 27864, 1155, 340, 16867, 274, 13857, 741, 9859, 1669, 274, 7769, 4062, 445, 13490, 497, 330, 3583, 497, 14482, 46103, 28, 16, 1138, 17957, 93882, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestTransform(t *testing.T) { src := newImage(2, 2, red, green, blue, yellow) buf := new(bytes.Buffer) png.Encode(buf, src) tests := []struct { name string encode func(io.Writer, image.Image) exactOutput bool // whether input and output should match exactly }{ {"bmp", func(w io.Writer, m image.Image) { bmp.Encode(w, m) }, true}, {"gif", func(w io.Writer, m image.Image) { gif.Encode(w, m, nil) }, true}, {"jpeg", func(w io.Writer, m image.Image) { jpeg.Encode(w, m, nil) }, false}, {"png", func(w io.Writer, m image.Image) { png.Encode(w, m) }, true}, } for _, tt := range tests { buf := new(bytes.Buffer) tt.encode(buf, src) in := buf.Bytes() out, err := Transform(in, emptyOptions) if err != nil { t.Errorf("Transform with encoder %s returned unexpected error: %v", tt.name, err) } if !reflect.DeepEqual(in, out) { t.Errorf("Transform with with encoder %s with empty options returned modified result", tt.name) } out, err = Transform(in, Options{Width: -1, Height: -1}) if err != nil { t.Errorf("Transform with encoder %s returned unexpected error: %v", tt.name, err) } if len(out) == 0 { t.Errorf("Transform with encoder %s returned empty bytes", tt.name) } if tt.exactOutput && !reflect.DeepEqual(in, out) { t.Errorf("Transform with encoder %s with noop Options returned modified result", tt.name) } } if _, err := Transform([]byte{}, Options{Width: 1}); err == nil { t.Errorf("Transform with invalid image input did not return expected err") } }
explode_data.jsonl/11691
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 598 }
[ 2830, 3393, 8963, 1155, 353, 8840, 836, 8, 341, 41144, 1669, 501, 1906, 7, 17, 11, 220, 17, 11, 2518, 11, 6176, 11, 6303, 11, 13753, 692, 26398, 1669, 501, 23158, 22622, 340, 3223, 968, 50217, 10731, 11, 2286, 692, 78216, 1669, 3056...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateReplica(t *testing.T) { ns := api.NamespaceDefault body := runtime.EncodeOrDie(testapi.Codec(), &api.Pod{ObjectMeta: api.ObjectMeta{Name: "empty_pod"}}) fakeHandler := util.FakeHandler{ StatusCode: 200, ResponseBody: string(body), } testServer := httptest.NewServer(&fakeHandler) defer testServer.Close() client := client.NewOrDie(&client.Config{Host: testServer.URL, Version: testapi.Version()}) podControl := RealPodControl{ KubeClient: client, Recorder: &record.FakeRecorder{}, } controllerSpec := newReplicationController(1) // Make sure createReplica sends a POST to the apiserver with a pod from the controllers pod template podControl.CreateReplica(ns, controllerSpec) expectedPod := api.Pod{ ObjectMeta: api.ObjectMeta{ Labels: controllerSpec.Spec.Template.Labels, GenerateName: fmt.Sprintf("%s-", controllerSpec.Name), }, Spec: controllerSpec.Spec.Template.Spec, } fakeHandler.ValidateRequest(t, testapi.ResourcePath("pods", api.NamespaceDefault, ""), "POST", nil) actualPod, err := client.Codec.Decode([]byte(fakeHandler.RequestBody)) if err != nil { t.Errorf("Unexpected error: %#v", err) } if !api.Semantic.DeepDerivative(&expectedPod, actualPod) { t.Logf("Body: %s", fakeHandler.RequestBody) t.Errorf("Unexpected mismatch. Expected\n %#v,\n Got:\n %#v", &expectedPod, actualPod) } }
explode_data.jsonl/36057
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 487 }
[ 2830, 3393, 4021, 18327, 15317, 1155, 353, 8840, 836, 8, 341, 84041, 1669, 6330, 46011, 3675, 198, 35402, 1669, 15592, 50217, 2195, 18175, 8623, 2068, 20274, 66, 1507, 609, 2068, 88823, 90, 1190, 12175, 25, 6330, 80222, 63121, 25, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestOutboundListenerConfig_WithSidecar(t *testing.T) { // Add a service and verify it's config services := []*model.Service{ buildService("test1.com", wildcardIP, protocol.HTTP, tnow.Add(1*time.Second)), buildService("test2.com", wildcardIP, protocol.TCP, tnow), buildService("test3.com", wildcardIP, protocol.HTTP, tnow.Add(2*time.Second))} service4 := &model.Service{ CreationTime: tnow.Add(1 * time.Second), Hostname: host.Name("test4.com"), Address: wildcardIP, ClusterVIPs: make(map[string]string), Ports: model.PortList{ &model.Port{ Name: "default", Port: 9090, Protocol: protocol.HTTP, }, }, Resolution: model.Passthrough, Attributes: model.ServiceAttributes{ Namespace: "default", }, } testOutboundListenerConfigWithSidecar(t, services...) services = append(services, service4) testOutboundListenerConfigWithSidecarWithCaptureModeNone(t, services...) testOutboundListenerConfigWithSidecarWithUseRemoteAddress(t, services...) }
explode_data.jsonl/61270
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 377 }
[ 2830, 3393, 2662, 10891, 2743, 2648, 62, 2354, 16384, 6918, 1155, 353, 8840, 836, 8, 341, 197, 322, 2691, 264, 2473, 323, 10146, 432, 594, 2193, 198, 1903, 2161, 1669, 29838, 2528, 13860, 515, 197, 69371, 1860, 445, 1944, 16, 905, 497...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTxnEventsPayloadsUnderLimit(t *testing.T) { events := newTxnEvents(10) for i := 0; i < 4; i++ { events.AddTxnEvent(&TxnEvent{}, Priority(float32(i)/10.0)) } ps := events.payloads(5) if len(ps) != 1 { t.Error(ps) } if data, err := ps[0].Data("agentRunID", time.Now()); data == nil || err != nil { t.Error(data, err) } }
explode_data.jsonl/37464
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 149 }
[ 2830, 3393, 31584, 77, 7900, 29683, 82, 16250, 16527, 1155, 353, 8840, 836, 8, 341, 90873, 1669, 501, 31584, 77, 7900, 7, 16, 15, 340, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 19, 26, 600, 1027, 341, 197, 90873, 1904, 31584, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestVarBytesWireErrors(t *testing.T) { pver := ProtocolVersion // bytes256 is a byte array that takes a 2-byte varint to encode. bytes256 := bytes.Repeat([]byte{0x01}, 256) tests := []struct { in []byte // Byte Array to write buf []byte // Wire encoding pver uint32 // Protocol version for wire encoding max int // Max size of fixed buffer to induce errors writeErr error // Expected write error readErr error // Expected read error }{ // Latest protocol version with intentional read/write errors. // Force errors on empty byte array. {[]byte{}, []byte{0x00}, pver, 0, io.ErrShortWrite, io.EOF}, // Force error on single byte varint + byte array. {[]byte{0x01, 0x02, 0x03}, []byte{0x04}, pver, 2, io.ErrShortWrite, io.ErrUnexpectedEOF}, // Force errors on 2-byte varint + byte array. {bytes256, []byte{0xfd}, pver, 2, io.ErrShortWrite, io.ErrUnexpectedEOF}, } t.Logf("Running %d tests", len(tests)) for i, test := range tests { // Encode to wire format. w := newFixedWriter(test.max) err := WriteVarBytes(w, test.pver, test.in) if err != test.writeErr { t.Errorf("WriteVarBytes #%d wrong error got: %v, want: %v", i, err, test.writeErr) continue } // Decode from wire format. r := newFixedReader(test.max, test.buf) _, err = ReadVarBytes(r, test.pver, MaxMessagePayload, "test payload") if err != test.readErr { t.Errorf("ReadVarBytes #%d wrong error got: %v, want: %v", i, err, test.readErr) continue } } }
explode_data.jsonl/15304
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 598 }
[ 2830, 3393, 3962, 7078, 37845, 13877, 1155, 353, 8840, 836, 8, 341, 3223, 423, 1669, 24572, 5637, 271, 197, 322, 5820, 17, 20, 21, 374, 264, 4922, 1334, 429, 4990, 264, 220, 17, 53952, 762, 396, 311, 16164, 624, 70326, 17, 20, 21, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestEs6Destructuring(t *testing.T) { if Version.Major < 5 { t.Skip("V8 versions before 5.* don't support destructuring.") } t.Parallel() ctx := NewIsolate().NewContext() bar, err := ctx.Eval(` const f = (n) => ({foo:n, bar:n+1}); var {foo, bar} = f(5); bar `, "test.js") if err != nil { t.Fatal(err) } if num := bar.Int64(); num != 6 { t.Errorf("Expected 6, got %v (%v)", num, bar) } }
explode_data.jsonl/81573
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 185 }
[ 2830, 3393, 17360, 21, 35, 29294, 1677, 1155, 353, 8840, 836, 8, 341, 743, 6079, 1321, 3035, 366, 220, 20, 341, 197, 3244, 57776, 445, 53, 23, 10795, 1573, 220, 20, 4908, 1513, 944, 1824, 20780, 1677, 13053, 197, 630, 3244, 41288, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDeletePipelineVersion_FileError(t *testing.T) { store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) defer store.Close() manager := NewResourceManager(store) // Create a pipeline. _, err := manager.CreatePipeline("pipeline", "", []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow")) assert.Nil(t, err) // Create a version under the above pipeline. pipelineStore, ok := store.pipelineStore.(*storage.PipelineStore) assert.True(t, ok) pipelineStore.SetUUIDGenerator(util.NewFakeUUIDGeneratorOrFatal(FakeUUIDOne, nil)) _, err = manager.CreatePipelineVersion( &api.PipelineVersion{ Name: "pipeline_version", ResourceReferences: []*api.ResourceReference{ &api.ResourceReference{ Key: &api.ResourceKey{ Id: DefaultFakeUUID, Type: api.ResourceType_PIPELINE, }, Relationship: api.Relationship_OWNER, }, }, }, []byte("apiVersion: argoproj.io/v1alpha1\nkind: Workflow"), true) // Switch to a bad object store manager.objectStore = &FakeBadObjectStore{} // Delete the above pipeline_version. err = manager.DeletePipelineVersion(FakeUUIDOne) assert.NotNil(t, err) // Verify the version in deleting status. version, err := manager.pipelineStore.GetPipelineVersionWithStatus(FakeUUIDOne, model.PipelineVersionDeleting) assert.Nil(t, err) assert.NotNil(t, version) }
explode_data.jsonl/77076
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 508 }
[ 2830, 3393, 6435, 34656, 5637, 34061, 1454, 1155, 353, 8840, 836, 8, 341, 57279, 1669, 1532, 52317, 2959, 2043, 2195, 62396, 67811, 7121, 52317, 1462, 2461, 44338, 2398, 16867, 3553, 10421, 741, 92272, 1669, 1532, 32498, 31200, 692, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetExpiringWithData(t *testing.T) { var numTimesInitialized int32 var numTimesFinalized int32 cache := NewWithData("Example_Expiring_Cache", func(key Key, data interface{}) (interface{}, error) { atomic.AddInt32(&numTimesInitialized, 1) return fmt.Sprintf("Value_for_key_%s_[%d]", key, data.(int)), nil }, lazyref.WithAbsoluteExpiration(25*time.Millisecond), lazyref.WithFinalizer(func(expiredValue interface{}) { atomic.AddInt32(&numTimesFinalized, 1) }), ) defer cache.Close() numTimesIndexChanged := 0 prevIndex := 0 for i := 0; i < 10; i++ { time.Sleep(10 * time.Millisecond) value, err := cache.Get(NewStringKey("Key"), i) require.NoError(t, err) strValue := value.(string) i := strings.Index(strValue, "[") assert.Truef(t, i > 0, "expecting to find [ in value") j := strings.Index(strValue, "]") assert.Truef(t, j > 0, "expecting to find ] in value") index, err := strconv.Atoi(strValue[i+1 : j]) require.NoError(t, err) assert.Truef(t, index <= i, "expecting index to be less than or equal to i") if index != prevIndex { numTimesIndexChanged++ prevIndex = index } } assert.Truef(t, numTimesIndexChanged > 2, "expecting that the index would change at least 2 times but it changed %d tim(s)", numTimesIndexChanged) }
explode_data.jsonl/2257
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 489 }
[ 2830, 3393, 1949, 840, 78763, 80356, 1155, 353, 8840, 836, 8, 341, 2405, 1629, 18889, 22495, 526, 18, 17, 198, 2405, 1629, 18889, 19357, 1506, 526, 18, 17, 271, 52680, 1669, 1532, 80356, 445, 13314, 62531, 78763, 920, 1777, 756, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContext2Plan_preventDestroy_destroyPlan(t *testing.T) { m := testModule(t, "plan-prevent-destroy-good") p := testProvider("aws") p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Config: m, ProviderResolver: providers.ResolverFixed( map[string]providers.Factory{ "aws": testProviderFuncFixed(p), }, ), State: MustShimLegacyState(&State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "aws_instance.foo": &ResourceState{ Type: "aws_instance", Primary: &InstanceState{ ID: "i-abc123", }, }, }, }, }, }), Destroy: true, }) plan, diags := ctx.Plan() expectedErr := "aws_instance.foo has lifecycle.prevent_destroy" if !strings.Contains(fmt.Sprintf("%s", diags.Err()), expectedErr) { if plan != nil { t.Logf(legacyDiffComparisonString(plan.Changes)) } t.Fatalf("expected err would contain %q\nerr: %s", expectedErr, diags.Err()) } }
explode_data.jsonl/28665
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 441 }
[ 2830, 3393, 1972, 17, 20485, 10442, 684, 14245, 18066, 20485, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1273, 3332, 1155, 11, 330, 10393, 21334, 684, 12, 15518, 71700, 1138, 3223, 1669, 1273, 5179, 445, 8635, 1138, 3223, 98063, 24911, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSummaryDataPoint_LabelsMap(t *testing.T) { ms := NewSummaryDataPoint() ms.InitEmpty() assert.EqualValues(t, NewStringMap(), ms.LabelsMap()) fillTestStringMap(ms.LabelsMap()) testValLabelsMap := generateTestStringMap() assert.EqualValues(t, testValLabelsMap, ms.LabelsMap()) }
explode_data.jsonl/19575
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 103 }
[ 2830, 3393, 19237, 1043, 2609, 53557, 82, 2227, 1155, 353, 8840, 836, 8, 341, 47691, 1669, 1532, 19237, 1043, 2609, 741, 47691, 26849, 3522, 741, 6948, 12808, 6227, 1155, 11, 1532, 703, 2227, 1507, 9829, 4679, 82, 2227, 2398, 65848, 227...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInactiveDownloader62(t *testing.T) { t.Parallel() tester := newTester() defer tester.terminate() // Check that neither block headers nor bodies are accepted if err := tester.downloader.DeliverHeaders("bad peer", []*types.Header{}); err != errNoSyncActive { t.Errorf("error mismatch: have %v, want %v", err, errNoSyncActive) } if err := tester.downloader.DeliverBodies("bad peer", [][]*types.Transaction{}, [][]*types.Header{}); err != errNoSyncActive { t.Errorf("error mismatch: have %v, want %v", err, errNoSyncActive) } }
explode_data.jsonl/33354
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 190 }
[ 2830, 3393, 72214, 92698, 21, 17, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 18185, 261, 1669, 501, 58699, 741, 16867, 37111, 98942, 2822, 197, 322, 4248, 429, 13866, 2504, 7102, 6329, 12866, 525, 11666, 198, 743, 1848, 1669...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestValidateUpgradeWithPoliciesFail(t *testing.T) { ccname := "mycc" ccver := "upgradewithpoliciesfail" state := make(map[string]map[string][]byte) state["lscc"] = make(map[string][]byte) v := newValidationInstance(state) // create lscc record with reject all instantiation policy ipbytes, err := proto.Marshal(cauthdsl.RejectAllPolicy) if err != nil { t.Fatalf("Failed to marshal RejectAllPolicy: %s", err) } cd := &ccprovider.ChaincodeData{ InstantiationPolicy: ipbytes, Version: ccver, } cdbytes, err := proto.Marshal(cd) if err != nil { t.Fatalf("Failed to marshal ChaincodeData: %s", err) } state["lscc"][ccname] = cdbytes ccver = "2" simresres, err := createCCDataRWset(ccname, ccname, ccver, nil) assert.NoError(t, err) tx, err := createLSCCTx(ccname, ccver, lscc.UPGRADE, simresres) if err != nil { t.Fatalf("createTx returned err %s", err) } envBytes, err := protoutil.GetBytesEnvelope(tx) if err != nil { t.Fatalf("GetBytesEnvelope returned err %s", err) } // good path: signed by the right MSP policy, err := getSignedByMSPMemberPolicy(mspid) if err != nil { t.Fatalf("failed getting policy, err %s", err) } bl := &common.Block{Data: &common.BlockData{Data: [][]byte{envBytes}}} err = v.Validate(bl, "lscc", 0, 0, policy) assert.EqualError(t, err, "chaincode instantiation policy violated, error signature set did not satisfy policy") }
explode_data.jsonl/42512
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 546 }
[ 2830, 3393, 17926, 43861, 2354, 47, 42038, 19524, 1155, 353, 8840, 836, 8, 341, 63517, 606, 1669, 330, 2408, 638, 698, 63517, 423, 1669, 330, 454, 6759, 97809, 79, 42038, 18403, 1837, 24291, 1669, 1281, 9147, 14032, 60, 2186, 14032, 457...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestMapSetNil(t *testing.T) { m := make(map[string]int) vm := ValueOf(&m) vm.Elem().Set(Zero(vm.Elem().Type())) if m != nil { t.Errorf("got non-nil (%p), want nil", m) } }
explode_data.jsonl/29527
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 86 }
[ 2830, 3393, 2227, 1649, 19064, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1281, 9147, 14032, 63025, 340, 54879, 1669, 5162, 2124, 2099, 76, 340, 54879, 5142, 3433, 1005, 1649, 7, 17999, 31723, 5142, 3433, 1005, 929, 12145, 743, 296, 961,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestNoConnectionError(t *testing.T) { cli, err := clientv3.NewFromURL("http://no.server.here:999") if err != nil { t.Fatal(err) } v, err := OpenVariable(cli, "variable-name", nil, nil) if err != nil { t.Fatal(err) } // Watch will block for quite a while trying to connect, // so use a short timeout. ctx, cancel := context.WithTimeout(context.Background(), 1*time.Millisecond) defer cancel() _, err = v.Watch(ctx) if err == nil { t.Error("got nil want error") } }
explode_data.jsonl/45338
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 185 }
[ 2830, 3393, 2753, 4526, 1454, 1155, 353, 8840, 836, 8, 341, 86448, 11, 1848, 1669, 2943, 85, 18, 7121, 3830, 3144, 445, 1254, 1110, 2152, 12638, 860, 485, 25, 24, 24, 24, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestPortForwardNoSuchPod(t *testing.T) { testKubelet := newTestKubelet(t) kubelet := testKubelet.kubelet fakeRuntime := testKubelet.fakeRuntime fakeRuntime.PodList = []*kubecontainer.Pod{} fakeCommandRunner := fakeContainerCommandRunner{} kubelet.runner = &fakeCommandRunner podName := "podFoo" podNamespace := "nsFoo" var port uint16 = 5000 err := kubelet.PortForward( kubecontainer.GetPodFullName(&api.Pod{ObjectMeta: api.ObjectMeta{Name: podName, Namespace: podNamespace}}), "", port, nil, ) if err == nil { t.Fatal("unexpected non-error") } if !fakeCommandRunner.ID.IsEmpty() { t.Fatal("unexpected invocation of runner.PortForward") } }
explode_data.jsonl/43322
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 256 }
[ 2830, 3393, 7084, 25925, 65531, 23527, 1155, 353, 8840, 836, 8, 341, 18185, 42, 3760, 1149, 1669, 501, 2271, 42, 3760, 1149, 1155, 340, 16463, 3760, 1149, 1669, 1273, 42, 3760, 1149, 5202, 3760, 1149, 198, 1166, 726, 15123, 1669, 1273, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPrefetch(t *testing.T) { defaultPrefetchSize := int64(10000) landmarkPosition := func(t *testing.T, l *layer) int64 { if l.r == nil { t.Fatalf("layer hasn't been verified yet") } if e, ok := l.r.Lookup(estargz.PrefetchLandmark); ok { return e.Offset } return defaultPrefetchSize } tests := []struct { name string in []testutil.TarEntry wantNum int // number of chunks wanted in the cache wants []string // filenames to compare prefetchSize func(*testing.T, *layer) int64 prioritizedFiles []string }{ { name: "no_prefetch", in: []testutil.TarEntry{ testutil.File("foo.txt", sampleData1), }, wantNum: 0, prioritizedFiles: nil, }, { name: "prefetch", in: []testutil.TarEntry{ testutil.File("foo.txt", sampleData1), testutil.File("bar.txt", sampleData2), }, wantNum: chunkNum(sampleData1), wants: []string{"foo.txt"}, prefetchSize: landmarkPosition, prioritizedFiles: []string{"foo.txt"}, }, { name: "with_dir", in: []testutil.TarEntry{ testutil.Dir("foo/"), testutil.File("foo/bar.txt", sampleData1), testutil.Dir("buz/"), testutil.File("buz/buzbuz.txt", sampleData2), }, wantNum: chunkNum(sampleData1), wants: []string{"foo/bar.txt"}, prefetchSize: landmarkPosition, prioritizedFiles: []string{"foo/", "foo/bar.txt"}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { sr, dgst, err := testutil.BuildEStargz(tt.in, testutil.WithEStargzOptions( estargz.WithChunkSize(sampleChunkSize), estargz.WithPrioritizedFiles(tt.prioritizedFiles), )) if err != nil { t.Fatalf("failed to build eStargz: %v", err) } blob := newBlob(sr) mcache := cache.NewMemoryCache() vr, err := reader.NewReader(sr, mcache) if err != nil { t.Fatalf("failed to make stargz reader: %v", err) } l := newLayer( &Resolver{ prefetchTimeout: time.Second, }, ocispec.Descriptor{Digest: testStateLayerDigest}, &blobRef{blob, func() {}}, vr, ) if err := l.Verify(dgst); err != nil { t.Errorf("failed to verify reader: %v", err) return } prefetchSize := int64(0) if tt.prefetchSize != nil { prefetchSize = tt.prefetchSize(t, l) } if err := l.Prefetch(defaultPrefetchSize); err != nil { t.Errorf("failed to prefetch: %v", err) return } if blob.calledPrefetchOffset != 0 { t.Errorf("invalid prefetch offset %d; want %d", blob.calledPrefetchOffset, 0) } if blob.calledPrefetchSize != prefetchSize { t.Errorf("invalid prefetch size %d; want %d", blob.calledPrefetchSize, prefetchSize) } if cLen := len(mcache.(*cache.MemoryCache).Membuf); tt.wantNum != cLen { t.Errorf("number of chunks in the cache %d; want %d: %v", cLen, tt.wantNum, err) return } lr := l.r if lr == nil { t.Fatalf("failed to get reader from layer: %v", err) } for _, file := range tt.wants { e, ok := lr.Lookup(file) if !ok { t.Fatalf("failed to lookup %q", file) } wantFile, err := lr.OpenFile(file) if err != nil { t.Fatalf("failed to open file %q", file) } blob.readCalled = false if _, err := io.Copy(ioutil.Discard, io.NewSectionReader(wantFile, 0, e.Size)); err != nil { t.Fatalf("failed to read file %q", file) } if blob.readCalled { t.Errorf("chunks of file %q aren't cached", file) return } } }) } }
explode_data.jsonl/42293
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1687 }
[ 2830, 3393, 29978, 2995, 1155, 353, 8840, 836, 8, 341, 11940, 29978, 2995, 1695, 1669, 526, 21, 19, 7, 16, 15, 15, 15, 15, 340, 197, 1933, 3987, 3812, 1669, 2915, 1155, 353, 8840, 836, 11, 326, 353, 10333, 8, 526, 21, 19, 341, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestOptionsValidate(t *testing.T) { cases := []struct { options *Options err string }{ { options: &Options{ SnapshotReader: &bytes.Buffer{}, }, }, { options: &Options{ SnapshotFile: "test.bkp", }, }, { options: &Options{ SnapshotFile: "test bkp", SnapshotReader: &bytes.Buffer{}, }, err: "only one of SnapshotFile and SnapshotReader must be set", }, } for _, c := range cases { err := c.options.validate() if err == nil { if c.err != "" { t.Errorf("expected error containing %q but got none", c.err) } continue } if err != nil && c.err == "" { t.Errorf("unexpected error %q", err) continue } if !strings.Contains(err.Error(), c.err) { t.Errorf("expected error to contain %q but got %q", c.err, err) } } }
explode_data.jsonl/2683
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 363 }
[ 2830, 3393, 3798, 17926, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 35500, 353, 3798, 198, 197, 9859, 257, 914, 198, 197, 59403, 197, 197, 515, 298, 35500, 25, 609, 3798, 515, 571, 7568, 9601, 5062, 25, 60...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestSchedulerExtender(t *testing.T) { _, s := framework.RunAMaster(nil) defer s.Close() ns := framework.CreateTestingNamespace("scheduler-extender", s, t) defer framework.DeleteTestingNamespace(ns, s, t) clientSet := clientset.NewForConfigOrDie(&restclient.Config{Host: s.URL, ContentConfig: restclient.ContentConfig{GroupVersion: &registered.GroupOrDie(v1.GroupName).GroupVersion}}) extender1 := &Extender{ name: "extender1", predicates: []fitPredicate{machine_1_2_3_Predicate}, prioritizers: []priorityConfig{{machine_2_Prioritizer, 1}}, } es1 := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { extender1.serveHTTP(t, w, req) })) defer es1.Close() extender2 := &Extender{ name: "extender2", predicates: []fitPredicate{machine_2_3_5_Predicate}, prioritizers: []priorityConfig{{machine_3_Prioritizer, 1}}, } es2 := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) { extender2.serveHTTP(t, w, req) })) defer es2.Close() policy := schedulerapi.Policy{ ExtenderConfigs: []schedulerapi.ExtenderConfig{ { URLPrefix: es1.URL, FilterVerb: filter, PrioritizeVerb: prioritize, Weight: 3, EnableHttps: false, }, { URLPrefix: es2.URL, FilterVerb: filter, PrioritizeVerb: prioritize, Weight: 4, EnableHttps: false, }, }, } policy.APIVersion = registered.GroupOrDie(v1.GroupName).GroupVersion.String() schedulerConfigFactory := factory.NewConfigFactory(clientSet, v1.DefaultSchedulerName, v1.DefaultHardPodAffinitySymmetricWeight, v1.DefaultFailureDomains) schedulerConfig, err := schedulerConfigFactory.CreateFromConfig(policy) if err != nil { t.Fatalf("Couldn't create scheduler config: %v", err) } eventBroadcaster := record.NewBroadcaster() schedulerConfig.Recorder = eventBroadcaster.NewRecorder(v1.EventSource{Component: v1.DefaultSchedulerName}) eventBroadcaster.StartRecordingToSink(&v1core.EventSinkImpl{Interface: clientSet.Core().Events("")}) scheduler.New(schedulerConfig).Run() defer close(schedulerConfig.StopEverything) DoTestPodScheduling(ns, t, clientSet) }
explode_data.jsonl/5993
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 877 }
[ 2830, 3393, 38878, 6756, 1659, 1155, 353, 8840, 836, 8, 341, 197, 6878, 274, 1669, 12626, 16708, 1402, 2300, 27907, 340, 16867, 274, 10421, 2822, 84041, 1669, 12626, 7251, 16451, 22699, 445, 63122, 66103, 1659, 497, 274, 11, 259, 340, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExitSignalAndStatus(t *testing.T) { conn := dial(exitSignalAndStatusHandler, t) defer conn.Close() session, err := conn.NewSession() if err != nil { t.Fatalf("Unable to request new session: %v", err) } defer session.Close() if err := session.Shell(); err != nil { t.Fatalf("Unable to execute command: %v", err) } err = session.Wait() if err == nil { t.Fatalf("expected command to fail but it didn't") } e, ok := err.(*ExitError) if !ok { t.Fatalf("expected *ExitError but got %T", err) } if e.Signal() != "TERM" || e.ExitStatus() != 15 { t.Fatalf("expected command to exit with signal TERM and status 15 but got signal %s and status %v", e.Signal(), e.ExitStatus()) } }
explode_data.jsonl/34798
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 263 }
[ 2830, 3393, 15339, 26810, 3036, 2522, 1155, 353, 8840, 836, 8, 341, 32917, 1669, 27860, 88622, 26810, 3036, 2522, 3050, 11, 259, 340, 16867, 4534, 10421, 741, 25054, 11, 1848, 1669, 4534, 7121, 5283, 741, 743, 1848, 961, 2092, 341, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestPackIndex(t *testing.T) { db, err := EmptyInMemory() if err != nil { t.Fatal(err) } // InsertPackIndex test createdAt := time.Now().UTC() assert.NoError(t, db.InsertPackIndex(index, createdAt)) // InsertPackIndex empty -- should get error err = db.InsertPackIndex(object.PackIndex{}, createdAt) assert.Error(t, err) // ChunkExist test sums := []sum.Sum{block0.Sum, block1.Sum, {}} exists, err := db.ChunksExist(sums) assert.NoError(t, err) assert.Equal(t, []bool{true, true, false}, exists) // ChunksExist empty payload exists, err = db.ChunksExist(nil) assert.NoError(t, err) assert.Empty(t, exists) // GetChunkSize test size, err := db.GetChunkSize(block0.Sum) assert.NoError(t, err) assert.Equal(t, block0.ChunkSize, size) // GetChunkSize not found size, err = db.GetChunkSize(sum.Sum{}) assert.Equal(t, ErrNotFound, err) assert.Zero(t, size) // InsertFile test chunks := []object.Chunk{ {Sequence: 0, Size: block0.ChunkSize, Sum: block0.Sum}, {Sequence: 1, Size: block1.ChunkSize, Sum: block1.Sum}, } file := object.File{ Name: "test.txt", CreatedAt: time.Now(), Chunks: chunks, Versioned: true, } fs0 := sum.Compute([]byte{0}) err = db.InsertFile(file, fs0) assert.NoError(t, err) // InsertFile -- error if name is empty file = object.File{ Name: "", CreatedAt: time.Now(), Chunks: chunks, } fs1 := sum.Compute([]byte{1}) err = db.InsertFile(file, fs1) assert.Error(t, err) // InsertFile -- error if time is zero file = object.File{ Name: "test.txt", CreatedAt: time.Time{}, Chunks: chunks, } fs2 := sum.Compute([]byte{2}) err = db.InsertFile(file, fs2) assert.Error(t, err) // InsertFile -- no chunks is fine file = object.File{ Name: "test.txt", CreatedAt: time.Now(), Chunks: []object.Chunk{}, } fs3 := sum.Compute([]byte{3}) err = db.InsertFile(file, fs3) assert.NoError(t, err) // InsertFile -- error if chunk does not exist file = object.File{ Name: "test.txt", CreatedAt: time.Now(), Chunks: []object.Chunk{{Sequence: 0, Size: 100, Sum: sum.Sum{}}}, } err = db.InsertFile(file, sum.Compute([]byte{4})) assert.Error(t, err) }
explode_data.jsonl/24260
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 924 }
[ 2830, 3393, 30684, 1552, 1155, 353, 8840, 836, 8, 341, 20939, 11, 1848, 1669, 22228, 641, 10642, 741, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 630, 197, 322, 17101, 30684, 1552, 1273, 198, 197, 42765, 1669, 882, 13...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestValidateDatabaseSpanContextInstance(t *testing.T) { validateSpan(t, func(s *apm.Span) { s.Context.SetDatabase(apm.DatabaseSpanContext{ Instance: strings.Repeat("x", 1025), Statement: strings.Repeat("x", 1025), Type: strings.Repeat("x", 1025), User: strings.Repeat("x", 1025), }) }) }
explode_data.jsonl/788
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 135 }
[ 2830, 3393, 17926, 5988, 12485, 1972, 2523, 1155, 353, 8840, 836, 8, 341, 197, 7067, 12485, 1155, 11, 2915, 1141, 353, 391, 76, 85309, 8, 341, 197, 1903, 9328, 4202, 5988, 34420, 76, 25008, 12485, 1972, 515, 298, 197, 2523, 25, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMakeLogicSigMulti(t *testing.T) { var program []byte var args [][]byte var sk ed25519.PrivateKey var pk MultisigAccount ma, sk1, sk2, _ := makeTestMultisigAccount(t) program = []byte{1, 32, 1, 1, 34} sender, err := ma.Address() require.NoError(t, err) acc := GenerateAccount() sk = acc.PrivateKey lsig, err := MakeLogicSig(program, args, sk1, ma) require.NoError(t, err) require.Equal(t, program, lsig.Logic) require.Equal(t, args, lsig.Args) require.Equal(t, types.Signature{}, lsig.Sig) require.False(t, lsig.Msig.Blank()) verified := VerifyLogicSig(lsig, sender) require.False(t, verified) // not enough signatures err = AppendMultisigToLogicSig(&lsig, sk) require.Error(t, err) // sk not part of multisig err = AppendMultisigToLogicSig(&lsig, sk2) require.NoError(t, err) verified = VerifyLogicSig(lsig, sender) require.True(t, verified) // check that a modified program fails verification modProgram := make([]byte, len(program)) copy(modProgram, program) lsigModified, err := MakeLogicSig(modProgram, args, sk1, ma) require.NoError(t, err) modProgram[3] = 2 verified = VerifyLogicSig(lsigModified, sender) require.False(t, verified) // combine sig and multisig, ensure it fails lsigf, err := MakeLogicSig(program, args, sk, pk) require.NoError(t, err) lsig.Sig = lsigf.Sig verified = VerifyLogicSig(lsig, sender) require.False(t, verified) // sig + msig // remove sig and ensure things are good lsig.Sig = types.Signature{} verified = VerifyLogicSig(lsig, sender) require.True(t, verified) // check serialization var lsig1 types.LogicSig encoded := msgpack.Encode(lsig) err = msgpack.Decode(encoded, &lsig1) require.NoError(t, err) require.Equal(t, lsig, lsig1) }
explode_data.jsonl/2160
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 677 }
[ 2830, 3393, 8078, 26751, 47246, 20358, 1155, 353, 8840, 836, 8, 341, 2405, 2025, 3056, 3782, 198, 2405, 2827, 52931, 3782, 198, 2405, 1901, 1578, 17, 20, 20, 16, 24, 87738, 1592, 198, 2405, 22458, 22162, 285, 343, 7365, 271, 197, 1728...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEchoCommand(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() Client := th.Client channel1 := th.BasicChannel echoTestString := "/echo test" r1 := Client.Must(Client.ExecuteCommand(channel1.Id, echoTestString)).(*model.CommandResponse) require.NotNil(t, r1, "Echo command failed to execute") r1 = Client.Must(Client.ExecuteCommand(channel1.Id, "/echo ")).(*model.CommandResponse) require.NotNil(t, r1, "Echo command failed to execute") time.Sleep(100 * time.Millisecond) p1 := Client.Must(Client.GetPostsForChannel(channel1.Id, 0, 2, "")).(*model.PostList) require.Len(t, p1.Order, 2, "Echo command failed to send") }
explode_data.jsonl/26333
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 235 }
[ 2830, 3393, 74994, 4062, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 2822, 71724, 1669, 270, 11716, 198, 71550, 16, 1669, 270, 48868, 9629, 271, 5346, 2271, 703, 1669, 3521,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSerializeIdentities(t *testing.T) { id, err := localMsp.GetDefaultSigningIdentity() if err != nil { t.Fatalf("GetSigningIdentity should have succeeded, got err %s", err) return } serializedID, err := id.Serialize() if err != nil { t.Fatalf("Serialize should have succeeded, got err %s", err) return } idBack, err := localMsp.DeserializeIdentity(serializedID) if err != nil { t.Fatalf("DeserializeIdentity should have succeeded, got err %s", err) return } err = localMsp.Validate(idBack) if err != nil { t.Fatalf("The identity should be valid, got err %s", err) return } if !reflect.DeepEqual(id.GetPublicVersion(), idBack) { t.Fatalf("Identities should be equal (%s) (%s)", id, idBack) return } }
explode_data.jsonl/38893
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 278 }
[ 2830, 3393, 15680, 764, 10499, 1155, 353, 8840, 836, 8, 341, 15710, 11, 1848, 1669, 2205, 83816, 2234, 3675, 93358, 18558, 741, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 1949, 93358, 18558, 1265, 614, 25331, 11, 2684, 1848, 1018...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestCombinationGenerator(t *testing.T) { for n := 0; n <= 10; n++ { for k := 1; k <= n; k++ { combinations := Combinations(n, k) cg := NewCombinationGenerator(n, k) genCombs := make([][]int, 0, len(combinations)) for cg.Next() { genCombs = append(genCombs, cg.Combination(nil)) } if !intSosMatch(combinations, genCombs) { t.Errorf("Combinations and generated combinations do not match. n = %v, k = %v", n, k) } } } }
explode_data.jsonl/41296
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 200 }
[ 2830, 3393, 36192, 2554, 12561, 1155, 353, 8840, 836, 8, 341, 2023, 308, 1669, 220, 15, 26, 308, 2651, 220, 16, 15, 26, 308, 1027, 341, 197, 2023, 595, 1669, 220, 16, 26, 595, 2651, 308, 26, 595, 1027, 341, 298, 32810, 73629, 1669...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestTCPResetsReceivedIncrement(t *testing.T) { c := context.New(t, defaultMTU) defer c.Cleanup() stats := c.Stack().Stats() want := stats.TCP.ResetsReceived.Value() + 1 iss := seqnum.Value(context.TestInitialSequenceNumber) rcvWnd := seqnum.Size(30000) c.CreateConnected(iss, rcvWnd, -1 /* epRcvBuf */) c.SendPacket(nil, &context.Headers{ SrcPort: context.TestPort, DstPort: c.Port, SeqNum: iss.Add(1), AckNum: c.IRS.Add(1), RcvWnd: rcvWnd, Flags: header.TCPFlagRst, }) if got := stats.TCP.ResetsReceived.Value(); got != want { t.Errorf("got stats.TCP.ResetsReceived.Value() = %d, want = %d", got, want) } }
explode_data.jsonl/75925
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 288 }
[ 2830, 3393, 49896, 1061, 1415, 23260, 38311, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 2266, 7121, 1155, 11, 1638, 8505, 52, 340, 16867, 272, 727, 60639, 2822, 79659, 1669, 272, 58646, 1005, 16635, 741, 50780, 1669, 10472, 836, 7123, 83...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestOrderingServiceFirstOperationFailure(t *testing.T) { testOrderingServiceFirstOperationFailure(t, blockDelivererConsumerWithRecv) testOrderingServiceFirstOperationFailure(t, blockDelivererConsumerWithSend) connWG.Wait() }
explode_data.jsonl/60873
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 67 }
[ 2830, 3393, 4431, 287, 1860, 5338, 8432, 17507, 1155, 353, 8840, 836, 8, 341, 18185, 4431, 287, 1860, 5338, 8432, 17507, 1155, 11, 2504, 16532, 1524, 261, 29968, 2354, 63483, 340, 18185, 4431, 287, 1860, 5338, 8432, 17507, 1155, 11, 250...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSaveToDeckAndNewDeckFromFile(t *testing.T) { os.Remove("_decktesting") d := newDeck() d.saveToFile("_decktesting") loadedFile := newDeckFromFile("_decktesting") if len(loadedFile) != 16 { t.Errorf("Expected 16 cards from the deck, but got %d", len(loadedFile)) } os.Remove("_decktesting") }
explode_data.jsonl/25185
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 117 }
[ 2830, 3393, 8784, 1249, 39368, 3036, 3564, 39368, 43633, 1155, 353, 8840, 836, 8, 341, 25078, 13270, 16975, 33425, 8840, 1138, 2698, 1669, 501, 39368, 741, 2698, 5681, 41550, 16975, 33425, 8840, 1138, 197, 15589, 1703, 1669, 501, 39368, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMalformedConsent(t *testing.T) { perms := permissionsImpl{ cfg: config.GDPR{ HostVendorID: 2, }, fetchVendorList: map[uint8]func(ctx context.Context, id uint16) (vendorlist.VendorList, error){ tcf2SpecVersion: listFetcher(nil), }, } sync, err := perms.HostCookiesAllowed(context.Background(), SignalYes, "BON") assertErr(t, err, true) assertBoolsEqual(t, false, sync) }
explode_data.jsonl/31092
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 161 }
[ 2830, 3393, 29600, 10155, 15220, 306, 1155, 353, 8840, 836, 8, 341, 197, 87772, 1669, 8541, 9673, 515, 197, 50286, 25, 2193, 1224, 35, 6480, 515, 298, 197, 9296, 44691, 915, 25, 220, 17, 345, 197, 197, 1583, 197, 1166, 2995, 44691, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1