text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestBadFilters(t *testing.T) { c, err := New( zap.NewNop(), k8sconfig.APIConfig{}, ExtractionRules{}, Filters{Fields: []FieldFilter{{Op: selection.Exists}}}, []Association{}, Excludes{}, newFakeAPIClientset, NewFakeInformer, NewFakeNamespaceInformer, ) assert.Error(t, err) assert.Nil(t, c) }
explode_data.jsonl/56838
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 144 }
[ 2830, 3393, 17082, 28351, 1155, 353, 8840, 836, 8, 341, 1444, 11, 1848, 1669, 1532, 1006, 197, 20832, 391, 7121, 45, 453, 3148, 197, 16463, 23, 82, 1676, 24922, 2648, 38837, 197, 197, 840, 26425, 26008, 38837, 197, 12727, 8612, 90, 89...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidatePeerConnectionParams(t *testing.T) { defer resetFlags() defer viper.Reset() assert := assert.New(t) cleanup := configtest.SetDevFabricConfigPath(t) defer cleanup() // TLS disabled viper.Set("peer.tls.enabled", false) // failure - more than one peer and TLS root cert - not invoke resetFlags() peerAddresses = []string{"peer0", "peer1"} tlsRootCertFiles = []string{"cert0", "cert1"} err := validatePeerConnectionParameters("query") assert.Error(err) assert.Contains(err.Error(), "command can only be executed against one peer") // success - peer provided and no TLS root certs // TLS disabled resetFlags() peerAddresses = []string{"peer0"} err = validatePeerConnectionParameters("query") assert.NoError(err) assert.Nil(tlsRootCertFiles) // success - more TLS root certs than peers // TLS disabled resetFlags() peerAddresses = []string{"peer0"} tlsRootCertFiles = []string{"cert0", "cert1"} err = validatePeerConnectionParameters("invoke") assert.NoError(err) assert.Nil(tlsRootCertFiles) // success - multiple peers and no TLS root certs - invoke // TLS disabled resetFlags() peerAddresses = []string{"peer0", "peer1"} err = validatePeerConnectionParameters("invoke") assert.NoError(err) assert.Nil(tlsRootCertFiles) // TLS enabled viper.Set("peer.tls.enabled", true) // failure - uneven number of peers and TLS root certs - invoke // TLS enabled resetFlags() peerAddresses = []string{"peer0", "peer1"} tlsRootCertFiles = []string{"cert0"} err = validatePeerConnectionParameters("invoke") assert.Error(err) assert.Contains(err.Error(), fmt.Sprintf("number of peer addresses (%d) does not match the number of TLS root cert files (%d)", len(peerAddresses), len(tlsRootCertFiles))) // success - more than one peer and TLS root certs - invoke // TLS enabled resetFlags() peerAddresses = []string{"peer0", "peer1"} tlsRootCertFiles = []string{"cert0", "cert1"} err = validatePeerConnectionParameters("invoke") assert.NoError(err) // failure - connection profile doesn't exist resetFlags() connectionProfile = "blah" err = validatePeerConnectionParameters("invoke") assert.Error(err) assert.Contains(err.Error(), "error reading connection profile") // failure - connection profile has peer defined in channel config but // not in peer config resetFlags() channelID = "mychannel" connectionProfile = "testdata/connectionprofile-uneven.yaml" err = validatePeerConnectionParameters("invoke") assert.Error(err) assert.Contains(err.Error(), "defined in the channel config but doesn't have associated peer config") // success - connection profile exists resetFlags() channelID = "mychannel" connectionProfile = "testdata/connectionprofile.yaml" err = validatePeerConnectionParameters("invoke") assert.NoError(err) }
explode_data.jsonl/46375
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 856 }
[ 2830, 3393, 17926, 30888, 4526, 4870, 1155, 353, 8840, 836, 8, 341, 16867, 7585, 9195, 741, 16867, 95132, 36660, 741, 6948, 1669, 2060, 7121, 1155, 340, 1444, 60639, 1669, 2193, 1944, 4202, 14592, 81731, 2648, 1820, 1155, 340, 16867, 2129...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestShell(t *testing.T) { b := newBuilderWithMockBackend() sb := newDispatchRequest(b, '`', nil, newBuildArgs(make(map[string]*string)), newStagesBuildResults()) shellCmd := "powershell" cmd := &instructions.ShellCommand{Shell: strslice.StrSlice{shellCmd}} err := dispatch(sb, cmd) require.NoError(t, err) expectedShell := strslice.StrSlice([]string{shellCmd}) assert.Equal(t, expectedShell, sb.state.runConfig.Shell) }
explode_data.jsonl/34878
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 25287, 1155, 353, 8840, 836, 8, 341, 2233, 1669, 501, 3297, 2354, 11571, 29699, 741, 24842, 1669, 501, 11283, 1900, 1883, 11, 55995, 516, 2092, 11, 501, 11066, 4117, 36944, 9147, 14032, 8465, 917, 5731, 501, 623, 1134, 11066...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestElasticsearch(t *testing.T) { f := NewElasticFetcher(getTestClient()) license, err := f.Fetch() if !assert.NoError(t, err) { return } assert.Equal(t, Basic, license.Type) assert.Equal(t, Active, license.Status) assert.NotEmpty(t, license.UUID) }
explode_data.jsonl/75316
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 106 }
[ 2830, 3393, 36, 51179, 1836, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 1532, 36, 51179, 97492, 5433, 2271, 2959, 2398, 197, 13266, 11, 1848, 1669, 282, 78506, 741, 743, 753, 2207, 35699, 1155, 11, 1848, 8, 341, 197, 853, 198, 197, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInstrumentationLibraryMetricsSlice_MoveAndAppendTo(t *testing.T) { // Test MoveAndAppendTo to empty expectedSlice := generateTestInstrumentationLibraryMetricsSlice() dest := NewInstrumentationLibraryMetricsSlice() src := generateTestInstrumentationLibraryMetricsSlice() src.MoveAndAppendTo(dest) assert.EqualValues(t, generateTestInstrumentationLibraryMetricsSlice(), dest) assert.EqualValues(t, 0, src.Len()) assert.EqualValues(t, expectedSlice.Len(), dest.Len()) // Test MoveAndAppendTo empty slice src.MoveAndAppendTo(dest) assert.EqualValues(t, generateTestInstrumentationLibraryMetricsSlice(), dest) assert.EqualValues(t, 0, src.Len()) assert.EqualValues(t, expectedSlice.Len(), dest.Len()) // Test MoveAndAppendTo not empty slice generateTestInstrumentationLibraryMetricsSlice().MoveAndAppendTo(dest) assert.EqualValues(t, 2*expectedSlice.Len(), dest.Len()) for i := 0; i < expectedSlice.Len(); i++ { assert.EqualValues(t, expectedSlice.At(i), dest.At(i)) assert.EqualValues(t, expectedSlice.At(i), dest.At(i+expectedSlice.Len())) } }
explode_data.jsonl/19498
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 370 }
[ 2830, 3393, 56324, 367, 16915, 27328, 33236, 66352, 3036, 23877, 1249, 1155, 353, 8840, 836, 8, 341, 197, 322, 3393, 14561, 3036, 23877, 1249, 311, 4287, 198, 42400, 33236, 1669, 6923, 2271, 56324, 367, 16915, 27328, 33236, 741, 49616, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAccessibleReposEnv_MirrorRepos(t *testing.T) { assert.NoError(t, PrepareTestDatabase()) org := AssertExistsAndLoadBean(t, &User{ID: 3}).(*User) testSuccess := func(userID int64, expectedRepoIDs []int64) { env, err := org.AccessibleReposEnv(userID) assert.NoError(t, err) repos, err := env.MirrorRepos() assert.NoError(t, err) expectedRepos := make([]*Repository, len(expectedRepoIDs)) for i, repoID := range expectedRepoIDs { expectedRepos[i] = AssertExistsAndLoadBean(t, &Repository{ID: repoID}).(*Repository) } assert.Equal(t, expectedRepos, repos) } testSuccess(2, []int64{5}) testSuccess(4, []int64{}) }
explode_data.jsonl/71076
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 267 }
[ 2830, 3393, 55390, 693, 966, 14359, 1245, 28812, 693, 966, 1155, 353, 8840, 836, 8, 341, 6948, 35699, 1155, 11, 31166, 2271, 5988, 2398, 87625, 1669, 5319, 15575, 3036, 5879, 10437, 1155, 11, 609, 1474, 90, 915, 25, 220, 18, 16630, 40...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMarathonSDHandleError(t *testing.T) { var errTesting = errors.New("testing failure") ch, md := newTestDiscovery(func(url string) (*AppList, error) { return nil, errTesting }) go func() { select { case tg := <-ch: t.Fatalf("Got group: %s", tg) default: } }() err := md.updateServices(context.Background(), ch) if err != errTesting { t.Fatalf("Expected error: %s", err) } }
explode_data.jsonl/65402
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 159 }
[ 2830, 3393, 12061, 23941, 5491, 6999, 1454, 1155, 353, 8840, 836, 8, 341, 2405, 1848, 16451, 284, 5975, 7121, 445, 8840, 7901, 1138, 23049, 11, 10688, 1669, 501, 2271, 67400, 18552, 6522, 914, 8, 4609, 2164, 852, 11, 1465, 8, 341, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSearchPostsFromUser(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() Client := th.Client th.LoginTeamAdmin() user := th.CreateUser() th.LinkUserToTeam(user, th.BasicTeam) th.App.AddUserToChannel(user, th.BasicChannel) th.App.AddUserToChannel(user, th.BasicChannel2) message := "sgtitlereview with space" _ = th.CreateMessagePost(message) Client.Logout() th.LoginBasic2() message = "sgtitlereview\n with return" _ = th.CreateMessagePostWithClient(Client, th.BasicChannel2, message) posts, _ := Client.SearchPosts(th.BasicTeam.Id, "from: "+th.TeamAdminUser.Username, false) require.Lenf(t, posts.Order, 2, "wrong number of posts for search 'from: %v'", th.TeamAdminUser.Username) posts, _ = Client.SearchPosts(th.BasicTeam.Id, "from: "+th.BasicUser2.Username, false) require.Lenf(t, posts.Order, 1, "wrong number of posts for search 'from: %v", th.BasicUser2.Username) posts, _ = Client.SearchPosts(th.BasicTeam.Id, "from: "+th.BasicUser2.Username+" sgtitlereview", false) require.Lenf(t, posts.Order, 1, "wrong number of posts for search 'from: %v'", th.BasicUser2.Username) message = "hullo" _ = th.CreateMessagePost(message) posts, _ = Client.SearchPosts(th.BasicTeam.Id, "from: "+th.BasicUser2.Username+" in:"+th.BasicChannel.Name, false) require.Len(t, posts.Order, 1, "wrong number of posts for search 'from: %v in:", th.BasicUser2.Username, th.BasicChannel.Name) Client.Login(user.Email, user.Password) // wait for the join/leave messages to be created for user3 since they're done asynchronously time.Sleep(100 * time.Millisecond) posts, _ = Client.SearchPosts(th.BasicTeam.Id, "from: "+th.BasicUser2.Username, false) require.Lenf(t, posts.Order, 2, "wrong number of posts for search 'from: %v'", th.BasicUser2.Username) posts, _ = Client.SearchPosts(th.BasicTeam.Id, "from: "+th.BasicUser2.Username+" from: "+user.Username, false) require.Lenf(t, posts.Order, 2, "wrong number of posts for search 'from: %v from: %v'", th.BasicUser2.Username, user.Username) posts, _ = Client.SearchPosts(th.BasicTeam.Id, "from: "+th.BasicUser2.Username+" from: "+user.Username+" in:"+th.BasicChannel2.Name, false) require.Len(t, posts.Order, 1, "wrong number of posts") message = "coconut" _ = th.CreateMessagePostWithClient(Client, th.BasicChannel2, message) posts, _ = Client.SearchPosts(th.BasicTeam.Id, "from: "+th.BasicUser2.Username+" from: "+user.Username+" in:"+th.BasicChannel2.Name+" coconut", false) require.Len(t, posts.Order, 1, "wrong number of posts") }
explode_data.jsonl/5260
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 908 }
[ 2830, 3393, 5890, 19631, 3830, 1474, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 741, 71724, 1669, 270, 11716, 271, 70479, 32499, 14597, 7210, 741, 19060, 1669, 270, 7251, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCaptivePrepareRange_ErrGettingRootHAS(t *testing.T) { ctx := context.Background() mockArchive := &historyarchive.MockArchive{} mockArchive. On("GetRootHAS"). Return(historyarchive.HistoryArchiveState{}, errors.New("transient error")) captiveBackend := CaptiveStellarCore{ archive: mockArchive, } err := captiveBackend.PrepareRange(ctx, BoundedRange(100, 200)) assert.EqualError(t, err, "error starting prepare range: opening subprocess: error getting latest checkpoint sequence: error getting root HAS: transient error") err = captiveBackend.PrepareRange(ctx, UnboundedRange(100)) assert.EqualError(t, err, "error starting prepare range: opening subprocess: error getting latest checkpoint sequence: error getting root HAS: transient error") mockArchive.AssertExpectations(t) }
explode_data.jsonl/7316
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 244 }
[ 2830, 3393, 34, 27781, 50590, 6046, 93623, 28655, 8439, 78230, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 77333, 42502, 1669, 609, 18844, 16019, 24664, 42502, 16094, 77333, 42502, 624, 197, 86391, 445, 1949, 8439, 78230, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateLoggerLevel(t *testing.T) { var settings *Settings settings, err := initLogger() if err != nil { t.Fatal(err) } // Logrus levels in int // panic: 0 // fatal: 1 // error: 2 // warn: 3 // info: 4 // debug: 5 // trace: 6 settings.level = "DEBUG" log, err := createLogger(settings) if err != nil { t.Fatal(err) } if log.Level != 5 { t.Errorf("Log level doesn't match, returned [%d], expected [%d].", log.Level, 4) } }
explode_data.jsonl/62512
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 206 }
[ 2830, 3393, 4021, 7395, 4449, 1155, 353, 8840, 836, 8, 341, 2405, 5003, 353, 6086, 271, 62930, 11, 1848, 1669, 2930, 7395, 741, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 630, 197, 322, 2835, 20341, 5866, 304, 526, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestInitializeAndGetASMSecretResource(t *testing.T) { secret := apicontainer.Secret{ Provider: "asm", Name: "secret", Region: "us-west-2", ValueFrom: "/test/secretName", } container := &apicontainer.Container{ Name: "myName", Image: "image:tag", Secrets: []apicontainer.Secret{secret}, TransitionDependenciesMap: make(map[apicontainerstatus.ContainerStatus]apicontainer.TransitionDependencySet), } container1 := &apicontainer.Container{ Name: "myName", Image: "image:tag", Secrets: nil, TransitionDependenciesMap: make(map[apicontainerstatus.ContainerStatus]apicontainer.TransitionDependencySet), } task := &Task{ Arn: "test", ResourcesMapUnsafe: make(map[string][]taskresource.TaskResource), Containers: []*apicontainer.Container{container, container1}, } ctrl := gomock.NewController(t) defer ctrl.Finish() credentialsManager := mock_credentials.NewMockManager(ctrl) asmClientCreator := mock_factory.NewMockClientCreator(ctrl) resFields := &taskresource.ResourceFields{ ResourceFieldsCommon: &taskresource.ResourceFieldsCommon{ ASMClientCreator: asmClientCreator, CredentialsManager: credentialsManager, }, } task.initializeASMSecretResource(credentialsManager, resFields) resourceDep := apicontainer.ResourceDependency{ Name: asmsecret.ResourceName, RequiredStatus: resourcestatus.ResourceStatus(asmsecret.ASMSecretCreated), } assert.Equal(t, resourceDep, task.Containers[0].TransitionDependenciesMap[apicontainerstatus.ContainerCreated].ResourceDependencies[0]) assert.Equal(t, 0, len(task.Containers[1].TransitionDependenciesMap)) _, ok := task.getASMSecretsResource() assert.True(t, ok) }
explode_data.jsonl/37247
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 757 }
[ 2830, 3393, 9928, 97726, 1911, 4826, 50856, 4783, 1155, 353, 8840, 836, 8, 341, 197, 20474, 1669, 1443, 51160, 1743, 74779, 515, 197, 197, 5179, 25, 220, 330, 10530, 756, 197, 21297, 25, 414, 330, 20474, 756, 197, 197, 14091, 25, 262,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnmarshalYamlMap_merge_map(t *testing.T) { // editorconfig-checker-disable const input = ` other: &other a: b c: foo: bar: foo <<: *other ` // editorconfig-checker-enable var out interface{} assert.NoError(t, yaml.Unmarshal([]byte(input), &out)) // assert.IsType(t, map[string]interface{}{}, out) n := new(yaml.Node) assert.NoError(t, yaml.Unmarshal([]byte(input), n)) n = prepareYamlNode(n) assert.EqualValues(t, 8, len(n.Content)) pairs, err := unmarshalYamlMap(n.Content) assert.NoError(t, err) assert.EqualValues(t, len(pairs), 5) assert.Equal(t, "other", pairs[0][0].Value) assert.Equal(t, "", pairs[0][1].Value) assert.Equal(t, yaml.MappingNode, pairs[0][1].Kind) assert.Equal(t, "foo", pairs[1][0].Value) assert.Equal(t, "", pairs[1][1].Value) assert.Equal(t, yaml.ScalarNode, pairs[1][1].Kind) assert.Equal(t, "bar", pairs[2][0].Value) assert.Equal(t, "foo", pairs[2][1].Value) assert.Equal(t, "a", pairs[3][0].Value) assert.Equal(t, "b", pairs[3][1].Value) assert.Equal(t, "c", pairs[4][0].Value) assert.Equal(t, "", pairs[4][1].Value) assert.Equal(t, yaml.ScalarNode, pairs[4][1].Kind) }
explode_data.jsonl/30451
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 512 }
[ 2830, 3393, 1806, 27121, 56, 9467, 2227, 20888, 5376, 1155, 353, 8840, 836, 8, 341, 197, 322, 6440, 1676, 15934, 261, 21400, 198, 4777, 1946, 284, 22074, 1575, 25, 609, 1575, 198, 220, 264, 25, 293, 198, 220, 272, 1447, 7975, 510, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Run_Positive_OneDesiredVolumeAttachThenDetachWithUnmountedVolume(t *testing.T) { // Arrange volumePluginMgr, fakePlugin := volumetesting.GetTestVolumePluginMgr(t) dsw := cache.NewDesiredStateOfWorld(volumePluginMgr) asw := cache.NewActualStateOfWorld(volumePluginMgr) fakeKubeClient := controllervolumetesting.CreateTestClient() fakeRecorder := &record.FakeRecorder{} ad := operationexecutor.NewOperationExecutor(operationexecutor.NewOperationGenerator(fakeKubeClient, volumePluginMgr, fakeRecorder, false /* checkNodeCapabilitiesBeforeMount */)) nsu := statusupdater.NewFakeNodeStatusUpdater(false /* returnError */) reconciler := NewReconciler( reconcilerLoopPeriod, maxWaitForUnmountDuration, syncLoopPeriod, false, dsw, asw, ad, nsu) podName := "pod-uid" volumeName := v1.UniqueVolumeName("volume-name") volumeSpec := controllervolumetesting.GetTestVolumeSpec(string(volumeName), volumeName) nodeName := k8stypes.NodeName("node-name") dsw.AddNode(nodeName) volumeExists := dsw.VolumeExists(volumeName, nodeName) if volumeExists { t.Fatalf( "Volume %q/node %q should not exist, but it does.", volumeName, nodeName) } generatedVolumeName, podAddErr := dsw.AddPod(types.UniquePodName(podName), controllervolumetesting.NewPod(podName, podName), volumeSpec, nodeName) if podAddErr != nil { t.Fatalf("AddPod failed. Expected: <no error> Actual: <%v>", podAddErr) } // Act ch := make(chan struct{}) go reconciler.Run(ch) defer close(ch) // Assert waitForNewAttacherCallCount(t, 1 /* expectedCallCount */, fakePlugin) verifyNewAttacherCallCount(t, false /* expectZeroNewAttacherCallCount */, fakePlugin) waitForAttachCallCount(t, 1 /* expectedAttachCallCount */, fakePlugin) verifyNewDetacherCallCount(t, true /* expectZeroNewDetacherCallCount */, fakePlugin) waitForDetachCallCount(t, 0 /* expectedDetachCallCount */, fakePlugin) // Act dsw.DeletePod(types.UniquePodName(podName), generatedVolumeName, nodeName) volumeExists = dsw.VolumeExists(generatedVolumeName, nodeName) if volumeExists { t.Fatalf( "Deleted pod %q from volume %q/node %q. Volume should also be deleted but it still exists.", podName, generatedVolumeName, nodeName) } asw.SetVolumeMountedByNode(generatedVolumeName, nodeName, true /* mounted */) asw.SetVolumeMountedByNode(generatedVolumeName, nodeName, false /* mounted */) // Assert waitForNewDetacherCallCount(t, 1 /* expectedCallCount */, fakePlugin) verifyNewAttacherCallCount(t, false /* expectZeroNewAttacherCallCount */, fakePlugin) waitForAttachCallCount(t, 1 /* expectedAttachCallCount */, fakePlugin) verifyNewDetacherCallCount(t, false /* expectZeroNewDetacherCallCount */, fakePlugin) waitForDetachCallCount(t, 1 /* expectedDetachCallCount */, fakePlugin) }
explode_data.jsonl/39430
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 936 }
[ 2830, 3393, 84158, 44246, 3404, 68201, 4896, 2690, 18902, 30485, 12209, 89306, 2354, 59539, 291, 18902, 1155, 353, 8840, 836, 8, 341, 197, 322, 40580, 198, 5195, 4661, 11546, 25567, 11, 12418, 11546, 1669, 62820, 57824, 287, 2234, 2271, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestContextPathParam(t *testing.T) { e := New() req := test.NewRequest(GET, "/", nil) c := e.NewContext(req, nil) // ParamNames c.SetParamNames("uid", "fid") assert.EqualValues(t, []string{"uid", "fid"}, c.ParamNames()) // ParamValues c.SetParamValues("101", "501") assert.EqualValues(t, []string{"101", "501"}, c.ParamValues()) // P assert.Equal(t, "101", c.P(0)) // Param assert.Equal(t, "501", c.Param("fid")) }
explode_data.jsonl/35872
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 178 }
[ 2830, 3393, 1972, 93492, 1155, 353, 8840, 836, 8, 341, 7727, 1669, 1532, 741, 24395, 1669, 1273, 75274, 62918, 11, 64657, 2092, 340, 1444, 1669, 384, 7121, 1972, 6881, 11, 2092, 692, 197, 322, 6991, 7980, 198, 1444, 4202, 2001, 7980, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSelectors(t *testing.T) { s1 := Selector{Value: "text.comment.line.py"} s2 := Selector{Value: "text.comment"} s3 := Selector{Value: "text.comment.line.rb"} assert.Equal(t, []string{"text", "comment", "line", "py"}, s1.Sections()) assert.False(t, s2.Has("py")) for _, part := range s1.Sections() { assert.True(t, s1.Has(part)) } assert.True(t, s3.Contains(s3)) assert.True(t, s1.Contains(s2)) assert.False(t, s1.Contains(s3)) assert.True(t, s2.Equal(s2)) assert.False(t, s2.Equal(s1)) }
explode_data.jsonl/21110
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 232 }
[ 2830, 3393, 96995, 1155, 353, 8840, 836, 8, 341, 1903, 16, 1669, 49610, 90, 1130, 25, 330, 1318, 30970, 10932, 7197, 16707, 1903, 17, 1669, 49610, 90, 1130, 25, 330, 1318, 30970, 16707, 1903, 18, 1669, 49610, 90, 1130, 25, 330, 1318, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUpdatesExistingConfigMap(t *testing.T) { t.Parallel() oldData, err := yaml.Marshal(orch_meta.ServiceProperties{ ResourceOwner: "old owner", BusinessUnit: "old unit", }) require.NoError(t, err) oldRelData, err := yaml.Marshal(releases.ResolvedRelease{ ServiceName: "svc", Label: "", ResolvedData: map[string]map[string]interface{}{}, }) require.NoError(t, err) existingNS := &core_v1.Namespace{ TypeMeta: meta_v1.TypeMeta{ Kind: k8s.NamespaceKind, APIVersion: core_v1.SchemeGroupVersion.String(), }, ObjectMeta: meta_v1.ObjectMeta{ Name: namespaceName, Labels: map[string]string{ voyager.ServiceNameLabel: serviceName, }, }, } existingCM := &core_v1.ConfigMap{ ObjectMeta: meta_v1.ObjectMeta{ Name: apisynchronization.DefaultServiceMetadataConfigMapName, Namespace: existingNS.GetName(), UID: "some-uid", ResourceVersion: "some-resource-version", }, Data: map[string]string{ orch_meta.ConfigMapConfigKey: string(oldData), }, } existingRelCM := &core_v1.ConfigMap{ ObjectMeta: meta_v1.ObjectMeta{ Name: releases.DefaultReleaseMetadataConfigMapName, Namespace: existingNS.GetName(), UID: "some-rel-uid", ResourceVersion: "some-resource-rel-version", }, Data: map[string]string{ releases.DataKey: string(oldRelData), }, } tc := testCase{ serviceName: serviceNameVoy, mainClientObjects: []runtime.Object{existingNS, existingCM, existingRelCM, existingDefaultDockerSecret()}, ns: existingNS, test: func(t *testing.T, cntrlr *Controller, ctx *ctrl.ProcessContext, tc *testCase) { service := &creator_v1.Service{ ObjectMeta: meta_v1.ObjectMeta{ Name: serviceName, }, Spec: creator_v1.ServiceSpec{ ResourceOwner: "somebody", BusinessUnit: "new unit", Metadata: creator_v1.ServiceMetadata{ PagerDuty: &creator_v1.PagerDutyMetadata{}, }, }, } expected := basicServiceProperties(service, voyager.EnvTypeDev) tc.scFake.On("GetService", mock.Anything, auth.NoUser(), serviceNameSc).Return(service, nil) _, err := cntrlr.Process(ctx) require.NoError(t, err) actions := tc.mainFake.Actions() // Verifying service metadata config map has been updated cm, _ := findUpdatedConfigMap(actions, existingNS.GetName(), apisynchronization.DefaultServiceMetadataConfigMapName) require.NotNil(t, cm) require.Equal(t, cm.Name, apisynchronization.DefaultServiceMetadataConfigMapName) assert.Equal(t, existingCM.GetUID(), cm.GetUID()) assert.Equal(t, existingCM.GetResourceVersion(), cm.GetResourceVersion()) assert.Contains(t, cm.Data, orch_meta.ConfigMapConfigKey) data := cm.Data[orch_meta.ConfigMapConfigKey] var actual orch_meta.ServiceProperties err = yaml.Unmarshal([]byte(data), &actual) require.NoError(t, err) assert.Equal(t, expected, actual) // Verifying releases config map has been updated relCM, _ := findUpdatedConfigMap(actions, existingNS.GetName(), releases.DefaultReleaseMetadataConfigMapName) require.NotNil(t, relCM) require.Equal(t, relCM.Name, releases.DefaultReleaseMetadataConfigMapName) assert.Equal(t, existingRelCM.GetUID(), relCM.GetUID()) assert.Equal(t, existingRelCM.GetResourceVersion(), relCM.GetResourceVersion()) assert.Contains(t, relCM.Data, releases.DataKey) data = relCM.Data[releases.DataKey] var actualRelResponse releases.ResolvedReleaseData err = yaml.Unmarshal([]byte(data), &actualRelResponse) require.NoError(t, err) assert.Equal(t, defaultReleaseResolveResponse(serviceNameVoy).ResolvedData, actualRelResponse) assert.Equal(t, resolveParams(tc.clusterLocation, serviceNameVoy), tc.releasesFake.calledParams) }, } tc.run(t) }
explode_data.jsonl/4270
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1516 }
[ 2830, 3393, 37091, 53067, 2648, 2227, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 61828, 1043, 11, 1848, 1669, 32246, 37271, 7, 21584, 13381, 13860, 7903, 515, 197, 79487, 13801, 25, 330, 813, 6372, 756, 197, 12791, 2063, 456...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWithFloat32_SetsTheBody(t *testing.T) { r, err := Prepare(&http.Request{}, WithFloat32(42.0)) if err != nil { t.Fatalf("autorest: WithFloat32 failed with error (%v)", err) } s, err := ioutil.ReadAll(r.Body) if err != nil { t.Fatalf("autorest: WithFloat32 failed with error (%v)", err) } if r.ContentLength != int64(len(fmt.Sprintf("%v", 42.0))) { t.Fatalf("autorest: WithFloat32 set Content-Length to %v, expected %v", r.ContentLength, int64(len(fmt.Sprintf("%v", 42.0)))) } v, err := strconv.ParseFloat(string(s), 32) if err != nil || float32(v) != float32(42.0) { t.Fatalf("autorest: WithFloat32 incorrectly encoded the boolean as %v", s) } }
explode_data.jsonl/20966
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 294 }
[ 2830, 3393, 2354, 5442, 18, 17, 1098, 1415, 785, 5444, 1155, 353, 8840, 836, 8, 972, 7000, 11, 1848, 1669, 31166, 2099, 1254, 9659, 6257, 1871, 197, 197, 2354, 5442, 18, 17, 7, 19, 17, 13, 15, 5784, 743, 1848, 961, 2092, 972, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestCreateJob_NullWorkflowSpec(t *testing.T) { store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) defer store.Close() manager := NewResourceManager(store) job := &api.Job{ Name: "pp 1", Enabled: true, PipelineSpec: &api.PipelineSpec{ WorkflowManifest: string("null"), // this situation occurs for real when the manifest file disappears from object store in some way due to retention policy or manual deletion. Parameters: []*api.Parameter{ {Name: "param1", Value: "world"}, }, }, } _, err := manager.CreateJob(job) assert.NotNil(t, err) assert.Contains(t, err.Error(), "Failed to fetch workflow spec manifest.: ResourceNotFoundError: WorkflowSpecManifest pp 1 not found.") }
explode_data.jsonl/77040
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 243 }
[ 2830, 3393, 4021, 12245, 55534, 62768, 8327, 1155, 353, 8840, 836, 8, 341, 57279, 1669, 1532, 52317, 2959, 2043, 2195, 62396, 67811, 7121, 52317, 1462, 2461, 44338, 2398, 16867, 3553, 10421, 741, 92272, 1669, 1532, 32498, 31200, 340, 68577,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAuthority(t *testing.T) { authority := smarthome.Authority{ ClientID: "clientID", ClientSecret: "clientSecret", RestrictedUsers: []string{"somebody@mail.com"}, } assert.Equal(t, "clientID", authority.GetClientID()) assert.Equal(t, "clientSecret", authority.GetClientSecret()) assert.Nil(t, authority.AcceptGrant("somebody@mail.com", "", nil)) assert.Errorf(t, authority.AcceptGrant("anybody@mail.com", "", nil), "Restricted users only") }
explode_data.jsonl/1248
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 170 }
[ 2830, 3393, 45532, 1155, 353, 8840, 836, 8, 341, 197, 84454, 1669, 1525, 46501, 635, 33858, 487, 515, 197, 71724, 915, 25, 286, 330, 2972, 915, 756, 197, 71724, 19773, 25, 262, 330, 2972, 19773, 756, 197, 11143, 15111, 12770, 7137, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrintOnError(t *testing.T) { t.Run("PrintOnError", func(t *testing.T) { result := captureStdout(func(w io.Writer) { pterm.PrintOnError(errors.New("hello world")) }) testza.AssertContains(t, result, "hello world") }) }
explode_data.jsonl/49142
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 96 }
[ 2830, 3393, 8994, 74945, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 8994, 74945, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 9559, 1669, 12322, 22748, 411, 18552, 3622, 6399, 47838, 8, 341, 298, 60796, 4195, 7918, 74945, 38881, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestApp01maVendorHndlrRowNext(t *testing.T) { var td *TestData_App01maVendor var rcd App01maVendor.App01maVendor t.Logf("TestVendor.RowNext()...\n") td = &TestData_App01maVendor{} td.Setup(t) // Build and execute a URL. rcd.TestData(0) // "A" keys := rcd.KeysToValue() t.Logf("\tSetting up for next with keys of (%d)\"%s\"\n", len(keys), keys) urlStr := fmt.Sprintf("/Vendor/next?%s", keys) td.GetReq(urlStr, "") t.Logf("TestVendor.RowNext() - End of Test\n\n\n") }
explode_data.jsonl/78277
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 270 }
[ 2830, 3393, 2164, 15, 16, 1728, 44691, 39, 303, 19018, 3102, 5847, 1155, 353, 8840, 836, 8, 341, 262, 762, 17941, 688, 353, 83920, 36117, 15, 16, 1728, 44691, 198, 262, 762, 435, 4385, 260, 1845, 15, 16, 1728, 44691, 5105, 15, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInterceptor_Process_ShouldContinue(t *testing.T) { var ( emptyJSONBody = json.RawMessage(`{}`) secretToken = "secret" ) emptyBodyHMACSignature := test.HMACHeader(t, secretToken, emptyJSONBody) tests := []struct { name string interceptorParams *triggersv1.BitbucketInterceptor payload []byte secret *corev1.Secret signature string eventType string }{{ name: "no secret", interceptorParams: &triggersv1.BitbucketInterceptor{}, payload: json.RawMessage(`{}`), signature: "foo", }, { name: "valid header for secret", interceptorParams: &triggersv1.BitbucketInterceptor{ SecretRef: &triggersv1.SecretRef{ SecretName: "mysecret", SecretKey: "token", }, }, signature: emptyBodyHMACSignature, secret: &corev1.Secret{ ObjectMeta: metav1.ObjectMeta{ Name: "mysecret", }, Data: map[string][]byte{ "token": []byte(secretToken), }, }, payload: json.RawMessage(`{}`), }, { name: "matching event", interceptorParams: &triggersv1.BitbucketInterceptor{ EventTypes: []string{"pr:opened", "repo:refs_changed"}, }, payload: json.RawMessage(`{}`), eventType: "repo:refs_changed", }, { name: "valid header for secret and matching event", interceptorParams: &triggersv1.BitbucketInterceptor{ SecretRef: &triggersv1.SecretRef{ SecretName: "mysecret", SecretKey: "token", }, EventTypes: []string{"pr:opened", "repo:refs_changed"}, }, signature: emptyBodyHMACSignature, secret: &corev1.Secret{ ObjectMeta: metav1.ObjectMeta{ Name: "mysecret", }, Data: map[string][]byte{ "token": []byte(secretToken), }, }, eventType: "repo:refs_changed", payload: json.RawMessage(`{}`), }, { name: "nil body does not panic", interceptorParams: &triggersv1.BitbucketInterceptor{}, payload: nil, signature: "foo", }} for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { ctx, _ := rtesting.SetupFakeContext(t) logger := zaptest.NewLogger(t) kubeClient := fakekubeclient.Get(ctx) w := &Interceptor{ KubeClientSet: kubeClient, Logger: logger.Sugar(), } req := &triggersv1.InterceptorRequest{ Body: string(tt.payload), Header: http.Header{ "Content-Type": []string{"application/json"}, }, InterceptorParams: map[string]interface{}{ "eventTypes": tt.interceptorParams.EventTypes, "secretRef": tt.interceptorParams.SecretRef, }, Context: &triggersv1.TriggerContext{ EventURL: "https://testing.example.com", EventID: "abcde", TriggerID: "namespaces/default/triggers/example-trigger", }, } if tt.eventType != "" { req.Header["X-Event-Key"] = []string{tt.eventType} } if tt.signature != "" { req.Header["X-Hub-Signature"] = []string{tt.signature} } if tt.secret != nil { if _, err := kubeClient.CoreV1().Secrets(metav1.NamespaceDefault).Create(ctx, tt.secret, metav1.CreateOptions{}); err != nil { t.Error(err) } } res := w.Process(ctx, req) if !res.Continue { t.Fatalf("Interceptor.Process() expected res.Continue to be true but got %t. \nStatus.Err(): %v", res.Continue, res.Status.Err()) } }) } }
explode_data.jsonl/61184
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1494 }
[ 2830, 3393, 32786, 70241, 36578, 616, 23526, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 197, 3194, 5370, 5444, 284, 2951, 50575, 2052, 5809, 90, 27085, 197, 197, 20474, 3323, 256, 284, 330, 20474, 698, 197, 340, 197, 3194, 5444, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestListDocksWithFilter(t *testing.T) { type test struct { input []*model.DockSpec param map[string][]string expected []*model.DockSpec } tests := []test{ // select by storage type { input: []*model.DockSpec{ &SampleMultiDocks[0], &SampleMultiDocks[1], &SampleMultiDocks[2], }, param: map[string][]string{ "driverName": {"cinder"}, }, expected: []*model.DockSpec{ &SampleMultiDocks[1], }, }, // sort by name asc { input: []*model.DockSpec{ &SampleMultiDocks[0], &SampleMultiDocks[1], &SampleMultiDocks[2], }, param: map[string][]string{ "sortKey": {"name"}, "sortDir": {"asc"}, }, expected: []*model.DockSpec{ &SampleMultiDocks[0], &SampleMultiDocks[2], &SampleMultiDocks[1], }, }, // sort by name desc { input: []*model.DockSpec{ &SampleMultiDocks[0], &SampleMultiDocks[1], &SampleMultiDocks[2], }, param: map[string][]string{ "sortKey": {"name"}, "sortDir": {"desc"}, }, expected: []*model.DockSpec{ &SampleMultiDocks[1], &SampleMultiDocks[2], &SampleMultiDocks[0], }, }, // limit is 1 { input: []*model.DockSpec{ &SampleMultiDocks[0], &SampleMultiDocks[1]}, param: map[string][]string{ "limit": {"1"}, "offset": {"1"}, }, expected: []*model.DockSpec{ &SampleMultiDocks[1], }, }, } for _, testcase := range tests { ret := fc.FilterAndSort(testcase.input, testcase.param, sortableKeysMap[typeFileShareSnapshots]) var res = []*model.DockSpec{} for _, data := range ret.([]interface{}) { res = append(res, data.(*model.DockSpec)) } if !reflect.DeepEqual(res, testcase.expected) { var expected []model.DockSpec for _, value := range testcase.expected { expected = append(expected, *value) } var got []model.DockSpec for _, value := range res { got = append(got, *value) } t.Errorf("Expected %+v\n", expected) t.Errorf("Got %+v\n", got) } } }
explode_data.jsonl/50715
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 938 }
[ 2830, 3393, 852, 41468, 16056, 5632, 1155, 353, 8840, 836, 8, 341, 13158, 1273, 2036, 341, 197, 22427, 262, 29838, 2528, 17238, 8327, 198, 197, 36037, 262, 2415, 14032, 45725, 917, 198, 197, 42400, 29838, 2528, 17238, 8327, 198, 197, 53...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestTeamsService_CreateOrUpdateIDPGroupConnectionsByID_empty(t *testing.T) { client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/organizations/1/team/1/team-sync/group-mappings", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "PATCH") fmt.Fprint(w, `{"groups": []}`) }) input := IDPGroupList{ Groups: []*IDPGroup{}, } ctx := context.Background() groups, _, err := client.Teams.CreateOrUpdateIDPGroupConnectionsByID(ctx, 1, 1, input) if err != nil { t.Errorf("Teams.CreateOrUpdateIDPGroupConnectionsByID returned error: %v", err) } want := &IDPGroupList{ Groups: []*IDPGroup{}, } if !cmp.Equal(groups, want) { t.Errorf("Teams.CreateOrUpdateIDPGroupConnectionsByID returned %+v. want %+v", groups, want) } }
explode_data.jsonl/4558
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 312 }
[ 2830, 3393, 60669, 1860, 34325, 56059, 915, 47, 2808, 54751, 60572, 15124, 1155, 353, 8840, 836, 8, 341, 25291, 11, 59807, 11, 8358, 49304, 1669, 6505, 741, 16867, 49304, 2822, 2109, 2200, 63623, 4283, 69253, 14, 16, 78015, 14, 16, 7801...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContextTagsSetOnSimpleTelemetry(t *testing.T) { assert := assert.New(t) now := time.Now().UTC() transmitter := new(mocks.Transmitter) transmitter.On("Track", mock.Anything) m, err := metric.New( "SimpleMetric", map[string]string{"kubernetes_container_name": "atcsvc", "kubernetes_pod_name": "bunkie17554"}, map[string]interface{}{"value": 23.0}, now, ) assert.NoError(err) ai := ApplicationInsights{ transmitter: transmitter, InstrumentationKey: "1234", // Fake, but necessary to enable tracking ContextTagSources: map[string]string{ "ai.cloud.role": "kubernetes_container_name", "ai.cloud.roleInstance": "kubernetes_pod_name", "ai.user.id": "nonexistent", }, } err = ai.Connect() assert.NoError(err) mSet := []telegraf.Metric{m} ai.Write(mSet) transmitter.AssertNumberOfCalls(t, "Track", 1) metricTelemetry := transmitter.Calls[0].Arguments.Get(0).(*appinsights.MetricTelemetry) cloudTags := metricTelemetry.Tags.Cloud() assert.Equal("atcsvc", cloudTags.GetRole()) assert.Equal("bunkie17554", cloudTags.GetRoleInstance()) }
explode_data.jsonl/50944
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 480 }
[ 2830, 3393, 1972, 15930, 1649, 1925, 16374, 6639, 35958, 1155, 353, 8840, 836, 8, 972, 6948, 1669, 2060, 7121, 1155, 1218, 80922, 1669, 882, 13244, 1005, 21183, 18005, 72453, 16126, 1669, 501, 1255, 25183, 11815, 16126, 1218, 72453, 16126, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContextHandlerName(t *testing.T) { c, _ := CreateTestContext(httptest.NewRecorder()) c.handlers = HandlersChain{func(c *Context) {}, handlerNameTest} assert.Regexp(t, "^(.*/vendor/)?github.com/gin-gonic/gin.handlerNameTest$", c.HandlerName()) }
explode_data.jsonl/26758
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 96 }
[ 2830, 3393, 1972, 3050, 675, 1155, 353, 8840, 836, 8, 341, 1444, 11, 716, 1669, 4230, 2271, 1972, 73392, 83, 70334, 7121, 47023, 2398, 1444, 55001, 284, 95563, 18837, 90, 2830, 1337, 353, 1972, 8, 16452, 7013, 675, 2271, 630, 6948, 89...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestChannelArbitratorRemoteForceClose(t *testing.T) { log := &mockArbitratorLog{ state: StateDefault, newStates: make(chan ArbitratorState, 5), } chanArbCtx, err := createTestChannelArbitrator(t, log) if err != nil { t.Fatalf("unable to create ChannelArbitrator: %v", err) } chanArb := chanArbCtx.chanArb if err := chanArb.Start(); err != nil { t.Fatalf("unable to start ChannelArbitrator: %v", err) } defer chanArb.Stop() // It should start out in the default state. chanArbCtx.AssertState(StateDefault) // Send a remote force close event. commitSpend := &chainntnfs.SpendDetail{ SpenderTxHash: &chainhash.Hash{}, } uniClose := &lnwallet.UnilateralCloseSummary{ SpendDetail: commitSpend, HtlcResolutions: &lnwallet.HtlcResolutions{}, } chanArb.cfg.ChainEvents.RemoteUnilateralClosure <- &RemoteUnilateralCloseInfo{ UnilateralCloseSummary: uniClose, CommitSet: CommitSet{ ConfCommitKey: &RemoteHtlcSet, HtlcSets: make(map[HtlcSetKey][]channeldb.HTLC), }, } // It should transition StateDefault -> StateContractClosed -> // StateFullyResolved. chanArbCtx.AssertStateTransitions( StateContractClosed, StateFullyResolved, ) // It should also mark the channel as resolved. select { case <-chanArbCtx.resolvedChan: // Expected. case <-time.After(defaultTimeout): t.Fatalf("contract was not resolved") } }
explode_data.jsonl/3691
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 527 }
[ 2830, 3393, 9629, 6953, 4489, 81, 850, 24703, 18573, 7925, 1155, 353, 8840, 836, 8, 341, 6725, 1669, 609, 16712, 6953, 4489, 81, 850, 2201, 515, 197, 24291, 25, 257, 3234, 3675, 345, 197, 8638, 23256, 25, 1281, 35190, 58795, 81, 850, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestIsCSISnapshotReady(t *testing.T) { tests := []struct { state string expectedResp bool }{ { state: "Succeeded", expectedResp: true, }, { state: "succeeded", expectedResp: true, }, { state: "fail", expectedResp: false, }, } for _, test := range tests { flag, err := isCSISnapshotReady(test.state) if flag != test.expectedResp { t.Errorf("testdesc: %v \n expected result:%t \n actual result:%t", test.state, test.expectedResp, flag) } assert.Nil(t, err) } }
explode_data.jsonl/59382
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 254 }
[ 2830, 3393, 3872, 6412, 1637, 9601, 19202, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 24291, 286, 914, 198, 197, 42400, 36555, 1807, 198, 197, 59403, 197, 197, 515, 298, 24291, 25, 286, 330, 50, 43805, 756, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInMemoryKeyManagement(t *testing.T) { // make the storage with reasonable defaults cstore := NewInMemory() algo := hd.Secp256k1 n1, n2, n3 := "personal", "business", "other" // Check empty state l, err := cstore.List() require.Nil(t, err) require.Empty(t, l) _, _, err = cstore.NewMnemonic(n1, English, sdk.FullFundraiserPath, notSupportedAlgo{}) require.Error(t, err, "ed25519 keys are currently not supported by keybase") // create some keys _, err = cstore.Key(n1) require.Error(t, err) i, _, err := cstore.NewMnemonic(n1, English, sdk.FullFundraiserPath, algo) require.NoError(t, err) require.Equal(t, n1, i.GetName()) _, _, err = cstore.NewMnemonic(n2, English, sdk.FullFundraiserPath, algo) require.NoError(t, err) // we can get these keys i2, err := cstore.Key(n2) require.NoError(t, err) _, err = cstore.Key(n3) require.NotNil(t, err) _, err = cstore.KeyByAddress(accAddr(i2)) require.NoError(t, err) addr, err := sdk.AccAddressFromBech32("cosmos1yq8lgssgxlx9smjhes6ryjasmqmd3ts2559g0t") require.NoError(t, err) _, err = cstore.KeyByAddress(addr) require.NotNil(t, err) // list shows them in order keyS, err := cstore.List() require.NoError(t, err) require.Equal(t, 2, len(keyS)) // note these are in alphabetical order require.Equal(t, n2, keyS[0].GetName()) require.Equal(t, n1, keyS[1].GetName()) require.Equal(t, i2.GetPubKey(), keyS[0].GetPubKey()) // deleting a key removes it err = cstore.Delete("bad name") require.NotNil(t, err) err = cstore.Delete(n1) require.NoError(t, err) keyS, err = cstore.List() require.NoError(t, err) require.Equal(t, 1, len(keyS)) _, err = cstore.Key(n1) require.Error(t, err) // create an offline key o1 := "offline" priv1 := ed25519.GenPrivKey() pub1 := priv1.PubKey() i, err = cstore.SavePubKey(o1, pub1, hd.Ed25519Type) require.Nil(t, err) require.Equal(t, pub1, i.GetPubKey()) require.Equal(t, o1, i.GetName()) iOffline := i.(*offlineInfo) require.Equal(t, hd.Ed25519Type, iOffline.GetAlgo()) keyS, err = cstore.List() require.NoError(t, err) require.Equal(t, 2, len(keyS)) // delete the offline key err = cstore.Delete(o1) require.NoError(t, err) keyS, err = cstore.List() require.NoError(t, err) require.Equal(t, 1, len(keyS)) // addr cache gets nuked - and test skip flag err = cstore.Delete(n2) require.NoError(t, err) }
explode_data.jsonl/73444
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1008 }
[ 2830, 3393, 641, 10642, 1592, 22237, 1155, 353, 8840, 836, 8, 341, 197, 322, 1281, 279, 5819, 448, 13276, 16674, 198, 1444, 4314, 1669, 1532, 641, 10642, 2822, 69571, 3346, 1669, 17907, 808, 757, 79, 17, 20, 21, 74, 16, 198, 9038, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSummaryValueAtPercentileSlice(t *testing.T) { es := NewSummaryValueAtPercentileSlice() assert.EqualValues(t, 0, es.Len()) es = newSummaryValueAtPercentileSlice(&[]*otlpmetrics.SummaryDataPoint_ValueAtPercentile{}) assert.EqualValues(t, 0, es.Len()) es.Resize(7) emptyVal := NewSummaryValueAtPercentile() emptyVal.InitEmpty() testVal := generateTestSummaryValueAtPercentile() assert.EqualValues(t, 7, es.Len()) for i := 0; i < es.Len(); i++ { assert.EqualValues(t, emptyVal, es.At(i)) fillTestSummaryValueAtPercentile(es.At(i)) assert.EqualValues(t, testVal, es.At(i)) } }
explode_data.jsonl/19581
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 228 }
[ 2830, 3393, 19237, 1130, 1655, 32010, 457, 33236, 1155, 353, 8840, 836, 8, 341, 78966, 1669, 1532, 19237, 1130, 1655, 32010, 457, 33236, 741, 6948, 12808, 6227, 1155, 11, 220, 15, 11, 1531, 65819, 2398, 78966, 284, 501, 19237, 1130, 165...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_l4(t *testing.T) { var cID uint32 = 100 currentStateMux.Lock() defer currentStateMux.Unlock() currentState = &state{streams: map[uint32]types.StreamContext{cID: &l4Context{}}} ctx, ok := currentState.streams[cID].(*l4Context) require.True(t, ok) proxyOnNewConnection(cID) require.True(t, ctx.onNewConnection) proxyOnDownstreamData(cID, 0, false) require.True(t, ctx.onDownstreamData) proxyOnDownstreamConnectionClose(cID, 0) require.True(t, ctx.onDownstreamClose) proxyOnUpstreamData(cID, 0, false) require.True(t, ctx.onUpstreamData) proxyOnUpstreamConnectionClose(cID, 0) require.True(t, ctx.onUpstreamStreamClose) }
explode_data.jsonl/64049
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 253 }
[ 2830, 3393, 907, 19, 1155, 353, 8840, 836, 8, 341, 2405, 272, 915, 2622, 18, 17, 284, 220, 16, 15, 15, 198, 20121, 1397, 44, 2200, 31403, 741, 16867, 57878, 44, 2200, 39188, 2822, 20121, 1397, 284, 609, 2454, 90, 60975, 25, 2415, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNoneSingleSQLTemplate(t *testing.T) { //Reset streamList := []string{"demo"} handleStream(false, streamList, t) //Data setup var tests = []ruleTest{ { name: `TestNoneSingleSQLTemplateRule1`, sql: `SELECT * FROM demo`, r: [][]byte{ []byte("<div>results</div><ul><li>red - 3</li></ul>"), []byte("<div>results</div><ul><li>blue - 6</li></ul>"), []byte("<div>results</div><ul><li>blue - 2</li></ul>"), []byte("<div>results</div><ul><li>yellow - 4</li></ul>"), []byte("<div>results</div><ul><li>red - 1</li></ul>"), }, m: map[string]interface{}{ "op_preprocessor_demo_0_exceptions_total": int64(0), "op_preprocessor_demo_0_process_latency_ms": int64(0), "op_preprocessor_demo_0_records_in_total": int64(5), "op_preprocessor_demo_0_records_out_total": int64(5), "op_project_0_exceptions_total": int64(0), "op_project_0_process_latency_ms": int64(0), "op_project_0_records_in_total": int64(5), "op_project_0_records_out_total": int64(5), "sink_mockSink_0_exceptions_total": int64(0), "sink_mockSink_0_records_in_total": int64(5), "sink_mockSink_0_records_out_total": int64(5), "source_demo_0_exceptions_total": int64(0), "source_demo_0_records_in_total": int64(5), "source_demo_0_records_out_total": int64(5), }, }, } handleStream(true, streamList, t) doRuleTestBySinkProps(t, tests, 0, &api.RuleOption{ BufferLength: 100, }, map[string]interface{}{ "dataTemplate": `<div>results</div><ul>{{range .}}<li>{{.color}} - {{.size}}</li>{{end}}</ul>`, }, func(result [][]byte) interface{} { return result }) }
explode_data.jsonl/76174
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 763 }
[ 2830, 3393, 4064, 10888, 6688, 7275, 1155, 353, 8840, 836, 8, 341, 197, 322, 14828, 198, 44440, 852, 1669, 3056, 917, 4913, 25762, 16707, 53822, 3027, 3576, 11, 4269, 852, 11, 259, 340, 197, 322, 1043, 6505, 198, 2405, 7032, 284, 3056...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBinance_FormatExchangeKlineInterval(t *testing.T) { testCases := []struct { name string interval kline.Interval output string }{ { "OneMin", kline.OneMin, "1m", }, { "OneDay", kline.OneDay, "1d", }, { "OneWeek", kline.OneWeek, "1w", }, { "OneMonth", kline.OneMonth, "1M", }, } for x := range testCases { test := testCases[x] t.Run(test.name, func(t *testing.T) { ret := b.FormatExchangeKlineInterval(test.interval) if ret != test.output { t.Fatalf("unexpected result return expected: %v received: %v", test.output, ret) } }) } }
explode_data.jsonl/76694
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 313 }
[ 2830, 3393, 33, 24387, 72999, 31564, 42, 1056, 10256, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 2084, 6152, 595, 1056, 70855, 198, 197, 21170, 256, 914, 198, 197, 59403, 197, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEachNode48(t *testing.T) { tree := NewArtTree() for i := 48; i > 0; i-- { tree.Insert([]byte{byte(i)}, []byte{byte(i)}) } traversal := []*ArtNode{} tree.Each(func(node *ArtNode) { traversal = append(traversal, node) }) // Order should be Node48, then the rest of the keys in sorted order if traversal[0] != tree.root || traversal[0].nodeType != NODE48 { t.Error("Unexpected node at begining of traversal") } for i := 1; i < 48; i++ { if bytes.Compare(traversal[i].key, append([]byte{byte(i)}, 0)) != 0 || traversal[i].nodeType != LEAF { t.Error("Unexpected node at second element of traversal") } } }
explode_data.jsonl/43966
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 251 }
[ 2830, 3393, 4854, 1955, 19, 23, 1155, 353, 8840, 836, 8, 341, 51968, 1669, 1532, 9286, 6533, 2822, 2023, 600, 1669, 220, 19, 23, 26, 600, 861, 220, 15, 26, 600, 313, 341, 197, 51968, 23142, 10556, 3782, 90, 3782, 1956, 42419, 3056, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValues(t *testing.T) { values := &Values{} assert.Equal(t, "", values.Encode()) assert.True(t, values.Empty()) values = &Values{} values.Add("foo", "bar") assert.Equal(t, "foo=bar", values.Encode()) assert.False(t, values.Empty()) assert.Equal(t, []string{"bar"}, values.Get("foo")) values = &Values{} values.Add("foo", "bar") values.Add("foo", "bar") values.Add("baz", "bar") assert.Equal(t, "foo=bar&foo=bar&baz=bar", values.Encode()) assert.Equal(t, []string{"bar", "bar"}, values.Get("foo")) assert.Equal(t, []string{"bar"}, values.Get("baz")) values.Set("foo", "firstbar") assert.Equal(t, "foo=firstbar&foo=bar&baz=bar", values.Encode()) assert.Equal(t, []string{"firstbar", "bar"}, values.Get("foo")) assert.Equal(t, []string{"bar"}, values.Get("baz")) values.Set("new", "appended") assert.Equal(t, "foo=firstbar&foo=bar&baz=bar&new=appended", values.Encode()) assert.Equal(t, url.Values{ "baz": {"bar"}, "foo": {"firstbar", "bar"}, "new": {"appended"}, }, values.ToValues()) assert.Equal(t, []string{"appended"}, values.Get("new")) assert.Nil(t, values.Get("boguskey")) }
explode_data.jsonl/30328
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 458 }
[ 2830, 3393, 6227, 1155, 353, 8840, 836, 8, 341, 45939, 1669, 609, 6227, 31483, 6948, 12808, 1155, 11, 7342, 2750, 50217, 2398, 6948, 32443, 1155, 11, 2750, 11180, 12367, 45939, 284, 609, 6227, 16094, 45939, 1904, 445, 7975, 497, 330, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPartialLocationParseRegression(t *testing.T) { gbk := Read("../../data/sample.gbk") for _, feature := range gbk.Features { if feature.GbkLocationString == "687..3158>" && (feature.SequenceLocation.Start != 686 || feature.SequenceLocation.End != 3158) { t.Errorf("Partial location for three prime location parsing has failed. Parsing the output of Build() does not produce the same output as parsing the original file read with Read()") } else if feature.GbkLocationString == "<1..206" && (feature.SequenceLocation.Start != 0 || feature.SequenceLocation.End != 206) { t.Errorf("Partial location for five prime location parsing has failed. Parsing the output of Build() does not produce the same output as parsing the original file read with Read().") } } }
explode_data.jsonl/74811
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 37314, 4707, 14463, 45200, 1155, 353, 8840, 836, 8, 341, 3174, 40029, 1669, 4457, 36800, 691, 69851, 1302, 40029, 5130, 2023, 8358, 4565, 1669, 2088, 342, 40029, 73257, 341, 197, 743, 4565, 1224, 40029, 4707, 703, 621, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestNasTypeGetSetNotificationResponseMessageIdentityMessageType(t *testing.T) { a := nasType.NewNotificationResponseMessageIdentity() for _, table := range nasTypeNotificationResponseMessageIdentityMessageTypeTable { a.SetMessageType(table.in) assert.Equal(t, table.out, a.GetMessageType()) } }
explode_data.jsonl/15915
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 96 }
[ 2830, 3393, 45, 300, 929, 1949, 1649, 11196, 2582, 2052, 18558, 82107, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 17141, 929, 7121, 11196, 2582, 2052, 18558, 741, 2023, 8358, 1965, 1669, 2088, 17141, 929, 11196, 2582, 2052, 18558, 82107, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestResourcePermissionsRead_some_error(t *testing.T) { _, err := qa.ResourceFixture{ Fixtures: []qa.HTTPFixture{ { Method: http.MethodGet, Resource: "/api/2.0/permissions/clusters/abc", Response: common.APIErrorBody{ ErrorCode: "INVALID_REQUEST", Message: "Internal error happened", }, Status: 400, }, }, Resource: ResourcePermissions(), Read: true, ID: "/clusters/abc", }.Apply(t) assert.Error(t, err) }
explode_data.jsonl/50871
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 210 }
[ 2830, 3393, 4783, 23851, 4418, 61855, 4096, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 88496, 20766, 18930, 515, 197, 12727, 941, 18513, 25, 3056, 15445, 27358, 18930, 515, 298, 197, 515, 571, 84589, 25, 256, 1758, 20798, 1949...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCheckMountPoint_Not_Exist(t *testing.T) { node, err := ioutil.TempDir("", "dir") // get unique folder name if err != nil { t.Fatalf("Failed to create folder: %s", err.Error()) } os.RemoveAll(node) // make sure folder does not exist defer os.RemoveAll(node) // if folder was created in function if err = CheckMountPoint(node); err != nil { t.Errorf("Function returned error: %s", err.Error()) } else if _, err := os.Stat(node); os.IsNotExist(err) { t.Error("Directory was not created") } }
explode_data.jsonl/18656
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 187 }
[ 2830, 3393, 3973, 16284, 2609, 60816, 62, 25613, 1155, 353, 8840, 836, 8, 341, 20831, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 3741, 899, 442, 633, 4911, 8527, 829, 198, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 9408, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestServiceListingFindWithTag(t *testing.T) { tag := "tag" in := serviceListing{ "svca": []string{"a", "b", tag, "c"}, "svcb": []string{"a", "b", "c"}, "svcc": []string{"a", "b", "c"}, "svcd": []string{"a", "b", "c", tag}, "svce": []string{tag, "a", "b", "c"}, } got := in.FindWithTag(tag) assert.DeepEqual(t, got, serviceListing{ "svca": []string{"a", "b", tag, "c"}, "svcd": []string{"a", "b", "c", tag}, "svce": []string{tag, "a", "b", "c"}, }) }
explode_data.jsonl/71893
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 1860, 52564, 9885, 43131, 1155, 353, 8840, 836, 8, 341, 60439, 1669, 330, 4578, 698, 17430, 1669, 2473, 52564, 515, 197, 197, 1, 3492, 924, 788, 3056, 917, 4913, 64, 497, 330, 65, 497, 4772, 11, 330, 66, 7115, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewEncoderRec(t *testing.T) { type args struct { w Writer iw io.ReadWriteSeeker in []*Rec } tests := []struct { name string args args wantErr assert.ErrorAssertionFunc }{ { name: "rec3-file", args: args{ iw: func() io.ReadWriteSeeker { os.Remove("./testdata/test-rec3.dbf") fs, err := os.Create("./testdata/test-rec3.dbf") assert.NoError(t, err) return fs }(), in: []*Rec{ &Rec{Name: "Abc", Flag: true, Count: 123, Price: 123.45, Date: time.Date(2021, 2, 12, 0, 0, 0, 0, time.UTC)}, nil, &Rec{Name: "Мышь", Flag: false, Count: -321, Price: -54.32, Date: time.Date(2021, 2, 12, 0, 0, 0, 0, time.UTC)}, }, }, wantErr: func(t assert.TestingT, err error, i ...interface{}) bool { return false }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { xb, err := New(tt.args.iw) assert.NoError(t, err) xb.SetCodePage(866) enc := NewEncoder(xb) err = enc.Encode(tt.args.in) if tt.wantErr(t, err, tt.args.in) { assert.Error(t, err) } else { assert.NoError(t, err) } //xb.Flush() xb.Close() if tt.name == "rec3-file" { wantBytes := readFile("./testdata/rec3.dbf") gotBytes := readFile("./testdata/test-rec3.dbf") assert.Equal(t, wantBytes, gotBytes) } }) } }
explode_data.jsonl/6193
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 657 }
[ 2830, 3393, 3564, 19921, 3820, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 6692, 220, 29404, 198, 197, 8230, 86, 6399, 6503, 7985, 39350, 261, 198, 197, 17430, 29838, 3820, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIssue11504(t *testing.T) { testReadMessageError(t, "9303000130303030303030303030983002303030303030030000000130") }
explode_data.jsonl/2293
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 51 }
[ 2830, 3393, 42006, 16, 16, 20, 15, 19, 1155, 353, 8840, 836, 8, 341, 18185, 4418, 2052, 1454, 1155, 11, 330, 24, 18, 15, 18, 15, 15, 15, 16, 18, 15, 18, 15, 18, 15, 18, 15, 18, 15, 18, 15, 18, 15, 18, 15, 18, 15, 18, 15,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContains(t *testing.T) { testCases := map[string]struct { a []api.ResourceName b api.ResourceName expected bool }{ "does-not-contain": { a: []api.ResourceName{api.ResourceMemory}, b: api.ResourceCPU, expected: false, }, "does-contain": { a: []api.ResourceName{api.ResourceMemory, api.ResourceCPU}, b: api.ResourceCPU, expected: true, }, } for testName, testCase := range testCases { if actual := Contains(testCase.a, testCase.b); actual != testCase.expected { t.Errorf("%s expected: %v, actual: %v", testName, testCase.expected, actual) } } }
explode_data.jsonl/59928
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 280 }
[ 2830, 3393, 23805, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 2415, 14032, 60, 1235, 341, 197, 11323, 286, 3056, 2068, 20766, 675, 198, 197, 2233, 286, 6330, 20766, 675, 198, 197, 42400, 1807, 198, 197, 59403, 197, 197, 1, 2705...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_DropIndex_Migration(t *testing.T) { argsss := []string{"add", "di", "test123", "first_col", "second_col", "third_col"} fileName, mm, _ := generateMigration(argsss) expectedString := `{"id":"` + getID(fileName) + `","up":{"dropIndex":[{"tableName":"test123","columns":[{"fieldname":"first_col"},{"fieldname":"second_col"},{"fieldname":"third_col"}]}]},"down":{"addIndex":[{"tableName":"test123","columns":[{"fieldname":"first_col"},{"fieldname":"second_col"},{"fieldname":"third_col"}]}]}}` content1, _ := json.Marshal(mm) checkError(t, expectedString, string(content1)) }
explode_data.jsonl/22637
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 194 }
[ 2830, 3393, 1557, 887, 1552, 1245, 5033, 1155, 353, 8840, 836, 8, 341, 47903, 84160, 1669, 3056, 917, 4913, 718, 497, 330, 8579, 497, 330, 1944, 16, 17, 18, 497, 330, 3896, 10211, 497, 330, 5569, 10211, 497, 330, 31727, 10211, 16707, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReconcileProvisioner_reconcileClusterRoleBinding(t *testing.T) { g := gomega.NewGomegaWithT(t) ctrl := gomock.NewController(t) defer ctrl.Finish() // Setup the Manager and Controller. Wrap the Controller Reconcile function so it writes each request to a // channel when it is finished. mgr, err := manager.New(cfg, manager.Options{ MetricsBindAddress: "0", }) g.Expect(err).NotTo(gomega.HaveOccurred()) c, err = client.New(cfg, client.Options{ Scheme: mgr.GetScheme(), Mapper: mgr.GetRESTMapper(), }) c2, err := client.New(cfg2, client.Options{ Scheme: mgr.GetScheme(), Mapper: mgr.GetRESTMapper(), }) // Delete clusterrolebinding in target cluster if present targetClusterRoleBinding := &v1.ClusterRoleBinding{} err = c2.Get(context.TODO(), types.NamespacedName{Name: "provisioner-clusterrolebinding", Namespace: constants.InteroperatorNamespace}, targetClusterRoleBinding) if err == nil { c2.Delete(context.TODO(), targetClusterRoleBinding) } mockProvisioner := mock_provisioner.NewMockProvisioner(ctrl) mockClusterRegistry := mock_clusterRegistry.NewMockClusterRegistry(ctrl) r := &ReconcileProvisioner{ Client: c, Log: ctrlrun.Log.WithName("mcd").WithName("provisioner"), scheme: mgr.GetScheme(), clusterRegistry: mockClusterRegistry, provisioner: mockProvisioner, } type args struct { namespace string clusterID string targetClient client.Client } tests := []struct { name string args args wantErr bool }{ { name: "Create if clusterrolebinding does not exists", args: args{ namespace: constants.InteroperatorNamespace, clusterID: "2", targetClient: c2, }, wantErr: false, }, { name: "Update if clusterrolebinding already exists", args: args{ namespace: constants.InteroperatorNamespace, clusterID: "2", targetClient: c2, }, wantErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if err := r.reconcileClusterRoleBinding(tt.args.namespace, tt.args.clusterID, tt.args.targetClient); (err != nil) != tt.wantErr { t.Errorf("ReconcileProvisioner.reconcileClusterRoleBinding() error = %v, wantErr %v", err, tt.wantErr) } }) targetClusterRoleBinding := &v1.ClusterRoleBinding{} g.Eventually(func() error { return c2.Get(context.TODO(), types.NamespacedName{Name: "provisioner-clusterrolebinding", Namespace: constants.InteroperatorNamespace}, targetClusterRoleBinding) }, timeout).Should(gomega.Succeed()) } }
explode_data.jsonl/58275
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1029 }
[ 2830, 3393, 693, 40446, 457, 1336, 13013, 261, 1288, 40446, 457, 28678, 9030, 15059, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 342, 32696, 7121, 38, 32696, 2354, 51, 1155, 340, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIntegration_RunAt(t *testing.T) { t.Parallel() rpcClient, gethClient, _, assertMocksCalled := cltest.NewEthMocksWithStartupAssertions(t) defer assertMocksCalled() app, cleanup := cltest.NewApplication(t, eth.NewClientWith(rpcClient, gethClient), ) defer cleanup() app.InstantClock() require.NoError(t, app.Start()) j := cltest.FixtureCreateJobViaWeb(t, app, "fixtures/web/run_at_job.json") initr := j.Initiators[0] assert.Equal(t, models.InitiatorRunAt, initr.Type) assert.Equal(t, "2018-01-08T18:12:01Z", utils.ISO8601UTC(initr.Time.Time)) jrs := cltest.WaitForRuns(t, j, app.Store, 1) cltest.WaitForJobRunToComplete(t, app.Store, jrs[0]) }
explode_data.jsonl/75890
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 279 }
[ 2830, 3393, 52464, 84158, 1655, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 7000, 3992, 2959, 11, 633, 71, 2959, 11, 8358, 2060, 72577, 20960, 1669, 1185, 1944, 7121, 65390, 11571, 16056, 39076, 90206, 1155, 340, 16867, 2060, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMarshalBool(t *testing.T) { buf := make([]byte, 2) a := assert.New(t) total := marshalBool(buf, false) total += marshalBool(buf[total:], true) a.Equal([]byte{0, 1}, buf[:total]) }
explode_data.jsonl/72599
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 85 }
[ 2830, 3393, 55438, 11233, 1155, 353, 8840, 836, 8, 341, 26398, 1669, 1281, 10556, 3782, 11, 220, 17, 340, 11323, 1669, 2060, 7121, 1155, 692, 34493, 1669, 60771, 11233, 10731, 11, 895, 340, 34493, 1421, 60771, 11233, 10731, 58, 5035, 45...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func Test_repeatedSubstringPattern(t *testing.T) { type args struct { s string } tests := []struct { name string args args want bool }{ { "[Test Case 1]", args{ "abcabcabcabc", }, true, }, { "[Test Case 2]", args{ "aabbaa", }, false, }, { "[Test Case 3]", args{ "a", }, false, }, { "[Test Case 4]", args{ "aabaaba", }, false, }, { "[Test Case 5]", args{ "bb", }, true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := repeatedSubstringPattern(tt.args.s); got != tt.want { t.Errorf("repeatedSubstringPattern() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/61512
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 381 }
[ 2830, 3393, 1288, 41954, 59075, 15760, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 1903, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197, 50780, 1807, 198, 197, 59403, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestParseErrBashConfirm(t *testing.T) { if testing.Short() { t.Skip("calling bash is slow.") } if !hasBash44 { t.Skip("bash 4.4 required to run") } i := 0 for _, c := range shellTests { want := c.common if c.bsmk != nil { want = c.bsmk } if c.bash != nil { want = c.bash } if want == nil { continue } wantErr := !strings.Contains(want.(string), " #NOERR") t.Run(fmt.Sprintf("%03d", i), confirmParse(c.in, "bash", wantErr)) i++ } }
explode_data.jsonl/31433
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 14463, 7747, 33, 988, 16728, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 445, 73726, 27023, 374, 6301, 13053, 197, 532, 743, 753, 4648, 33, 988, 19, 19, 341, 197, 3244, 57776, 445, 46216, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestSimpleConditionMatchPath(t *testing.T) { sc := NewSimpleCondition("/abc/s") res := sc.Match(context.Background(), httplib.Get("http://localhost:8080/abc/s")) assert.True(t, res) }
explode_data.jsonl/63450
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 16374, 10547, 8331, 1820, 1155, 353, 8840, 836, 8, 341, 29928, 1669, 1532, 16374, 10547, 4283, 13683, 2687, 1138, 10202, 1669, 1136, 36062, 5378, 19047, 1507, 54320, 81682, 2234, 445, 1254, 1110, 8301, 25, 23, 15, 23, 15, 14...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestCacheMountSharedRefsDeadlock(t *testing.T) { // not parallel ctx := namespaces.WithNamespace(context.Background(), "buildkit-test") tmpdir, err := ioutil.TempDir("", "cachemanager") require.NoError(t, err) defer os.RemoveAll(tmpdir) snapshotter, err := native.NewSnapshotter(filepath.Join(tmpdir, "snapshots")) require.NoError(t, err) co, cleanup, err := newCacheManager(ctx, cmOpt{ snapshotter: snapshotter, snapshotterName: "native", }) require.NoError(t, err) defer cleanup() var sharedCacheRefs = &cacheRefs{} g1 := newRefGetter(co.manager, co.md, sharedCacheRefs) g2 := newRefGetter(co.manager, co.md, sharedCacheRefs) ref, err := g1.getRefCacheDir(ctx, nil, "foo", pb.CacheSharingOpt_SHARED) require.NoError(t, err) cacheRefReleaseHijack = func() { time.Sleep(200 * time.Millisecond) } cacheRefCloneHijack = func() { time.Sleep(400 * time.Millisecond) } defer func() { cacheRefReleaseHijack = nil cacheRefCloneHijack = nil }() eg, _ := errgroup.WithContext(context.TODO()) eg.Go(func() error { return ref.Release(context.TODO()) }) eg.Go(func() error { _, err := g2.getRefCacheDir(ctx, nil, "foo", pb.CacheSharingOpt_SHARED) return err }) done := make(chan struct{}) go func() { err = eg.Wait() require.NoError(t, err) close(done) }() select { case <-done: case <-time.After(10 * time.Second): require.FailNow(t, "deadlock on releasing while getting new ref") } }
explode_data.jsonl/50742
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 579 }
[ 2830, 3393, 8233, 16284, 16997, 82807, 28320, 1023, 1155, 353, 8840, 836, 8, 341, 197, 322, 537, 15279, 198, 20985, 1669, 58091, 26124, 22699, 5378, 19047, 1507, 330, 5834, 8226, 16839, 5130, 20082, 3741, 11, 1848, 1669, 43144, 65009, 618...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGenerateManifestsWithAppParameterFile(t *testing.T) { t.Run("Single global override", func(t *testing.T) { runWithTempTestdata(t, "single-global", func(t *testing.T, path string) { service := newService(".") manifests, err := service.GenerateManifest(context.Background(), &apiclient.ManifestRequest{ Repo: &argoappv1.Repository{}, ApplicationSource: &argoappv1.ApplicationSource{ Path: path, }, }) require.NoError(t, err) resourceByKindName := make(map[string]*unstructured.Unstructured) for _, manifest := range manifests.Manifests { var un unstructured.Unstructured err := yaml.Unmarshal([]byte(manifest), &un) if !assert.NoError(t, err) { return } resourceByKindName[fmt.Sprintf("%s/%s", un.GetKind(), un.GetName())] = &un } deployment, ok := resourceByKindName["Deployment/guestbook-ui"] require.True(t, ok) containers, ok, _ := unstructured.NestedSlice(deployment.Object, "spec", "template", "spec", "containers") require.True(t, ok) image, ok, _ := unstructured.NestedString(containers[0].(map[string]interface{}), "image") require.True(t, ok) assert.Equal(t, "gcr.io/heptio-images/ks-guestbook-demo:0.2", image) }) }) t.Run("Application specific override", func(t *testing.T) { service := newService(".") runWithTempTestdata(t, "single-app-only", func(t *testing.T, path string) { manifests, err := service.GenerateManifest(context.Background(), &apiclient.ManifestRequest{ Repo: &argoappv1.Repository{}, ApplicationSource: &argoappv1.ApplicationSource{ Path: path, }, AppName: "testapp", }) require.NoError(t, err) resourceByKindName := make(map[string]*unstructured.Unstructured) for _, manifest := range manifests.Manifests { var un unstructured.Unstructured err := yaml.Unmarshal([]byte(manifest), &un) if !assert.NoError(t, err) { return } resourceByKindName[fmt.Sprintf("%s/%s", un.GetKind(), un.GetName())] = &un } deployment, ok := resourceByKindName["Deployment/guestbook-ui"] require.True(t, ok) containers, ok, _ := unstructured.NestedSlice(deployment.Object, "spec", "template", "spec", "containers") require.True(t, ok) image, ok, _ := unstructured.NestedString(containers[0].(map[string]interface{}), "image") require.True(t, ok) assert.Equal(t, "gcr.io/heptio-images/ks-guestbook-demo:0.3", image) }) }) t.Run("Application specific override for other app", func(t *testing.T) { service := newService(".") runWithTempTestdata(t, "single-app-only", func(t *testing.T, path string) { manifests, err := service.GenerateManifest(context.Background(), &apiclient.ManifestRequest{ Repo: &argoappv1.Repository{}, ApplicationSource: &argoappv1.ApplicationSource{ Path: path, }, AppName: "testapp2", }) require.NoError(t, err) resourceByKindName := make(map[string]*unstructured.Unstructured) for _, manifest := range manifests.Manifests { var un unstructured.Unstructured err := yaml.Unmarshal([]byte(manifest), &un) if !assert.NoError(t, err) { return } resourceByKindName[fmt.Sprintf("%s/%s", un.GetKind(), un.GetName())] = &un } deployment, ok := resourceByKindName["Deployment/guestbook-ui"] require.True(t, ok) containers, ok, _ := unstructured.NestedSlice(deployment.Object, "spec", "template", "spec", "containers") require.True(t, ok) image, ok, _ := unstructured.NestedString(containers[0].(map[string]interface{}), "image") require.True(t, ok) assert.Equal(t, "gcr.io/heptio-images/ks-guestbook-demo:0.1", image) }) }) }
explode_data.jsonl/5698
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1506 }
[ 2830, 3393, 31115, 38495, 16056, 2164, 4971, 1703, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 10888, 3644, 2812, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 56742, 2354, 12151, 2271, 691, 1155, 11, 330, 15338, 73319, 497, 2915, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDetachedSignatureDSA(t *testing.T) { kring, _ := ReadKeyRing(readerFromHex(dsaTestKeyHex)) testDetachedSignature(t, kring, readerFromHex(detachedSignatureDSAHex), signedInput, "binary", testKey3KeyId) }
explode_data.jsonl/2284
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 80 }
[ 2830, 3393, 17076, 3854, 25088, 72638, 1155, 353, 8840, 836, 8, 341, 197, 9855, 287, 11, 716, 1669, 4457, 1592, 43466, 21987, 3830, 20335, 1500, 9081, 2271, 1592, 20335, 1171, 18185, 17076, 3854, 25088, 1155, 11, 595, 12640, 11, 6604, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSortColors2(t *testing.T) { nums := []int{1, 2, 0} sortColors2(nums) assert.Equal(t, []int{0, 1, 2}, nums) nums1 := []int{2, 2} sortColors2(nums1) assert.Equal(t, []int{2, 2}, nums1) nums2 := []int{2, 0, 1} sortColors2(nums2) assert.Equal(t, []int{0, 1, 2}, nums2) nums3 := []int{2, 0, 2, 1, 1, 0} sortColors2(nums3) assert.Equal(t, []int{0, 0, 1, 1, 2, 2}, nums3) }
explode_data.jsonl/64403
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 213 }
[ 2830, 3393, 10231, 13108, 17, 1155, 353, 8840, 836, 8, 341, 22431, 82, 1669, 3056, 396, 90, 16, 11, 220, 17, 11, 220, 15, 532, 39487, 13108, 17, 21096, 340, 6948, 12808, 1155, 11, 3056, 396, 90, 15, 11, 220, 16, 11, 220, 17, 213...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEmptyState(t *testing.T) { defer server.SetupViper(t)() defer setupClientHome(t)() logger := log.NewNopLogger() cfg, err := tcmd.ParseConfig() require.Nil(t, err) ctx := server.NewContext(cfg, logger) cdc := app.MakeLatestCodec() cmd := InitCmd(ctx, cdc) require.NoError(t, cmd.RunE(nil, []string{"hsnode-test"})) old := os.Stdout r, w, _ := os.Pipe() os.Stdout = w cmd = server.ExportCmd(ctx, cdc, nil) err = cmd.RunE(nil, nil) require.NoError(t, err) outC := make(chan string) go func() { var buf bytes.Buffer io.Copy(&buf, r) outC <- buf.String() }() w.Close() os.Stdout = old out := <-outC require.Contains(t, out, "genesis_time") require.Contains(t, out, "chain_id") require.Contains(t, out, "consensus_params") require.Contains(t, out, "validators") require.Contains(t, out, "app_hash") }
explode_data.jsonl/17820
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 360 }
[ 2830, 3393, 3522, 1397, 1155, 353, 8840, 836, 8, 341, 16867, 3538, 39820, 53, 12858, 1155, 8, 741, 16867, 6505, 2959, 7623, 1155, 8, 2822, 17060, 1669, 1487, 7121, 45, 453, 7395, 741, 50286, 11, 1848, 1669, 259, 8710, 8937, 2648, 741,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUpdateMore(t *testing.T) { // Given rates := NewRateSet() require.NoError(t, rates.Add(1*time.Second, 10, 20)) require.NoError(t, rates.Add(10*time.Second, 20, 50)) require.NoError(t, rates.Add(20*time.Second, 45, 90)) clock := testutils.GetClock() tbs := NewTokenBucketSet(rates, clock) _, err := tbs.Consume(5) require.NoError(t, err) assert.Equal(t, "{1s: 15}, {10s: 45}, {20s: 85}", tbs.debugState()) rates = NewRateSet() require.NoError(t, rates.Add(10*time.Second, 30, 40)) require.NoError(t, rates.Add(11*time.Second, 30, 40)) require.NoError(t, rates.Add(12*time.Second, 30, 40)) require.NoError(t, rates.Add(13*time.Second, 30, 40)) // When tbs.Update(rates) // Then assert.Equal(t, "{10s: 40}, {11s: 40}, {12s: 40}, {13s: 40}", tbs.debugState()) assert.Equal(t, 13*time.Second, tbs.maxPeriod) }
explode_data.jsonl/25394
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 358 }
[ 2830, 3393, 4289, 7661, 1155, 353, 8840, 836, 8, 341, 197, 322, 16246, 198, 7000, 973, 1669, 1532, 11564, 1649, 741, 17957, 35699, 1155, 11, 7813, 1904, 7, 16, 77053, 32435, 11, 220, 16, 15, 11, 220, 17, 15, 1171, 17957, 35699, 1155...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWithMaxConcurrentMessages(t *testing.T) { t.Parallel() Convey("Given an int and dialer", t, func() { m := 2 dialer := &mockDialerStruct{} Convey("When Dial is called with max concurrent messages", func() { c, _ := mockDial(dialer, WithMaxConcurrentMessages(m)) Convey("Then the client request channel should be set", func() { So(c.request, ShouldNotBeNil) }) }) }) }
explode_data.jsonl/53404
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 151 }
[ 2830, 3393, 2354, 5974, 1109, 3231, 15820, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 93070, 5617, 445, 22043, 458, 526, 323, 27860, 261, 497, 259, 11, 2915, 368, 341, 197, 2109, 1669, 220, 17, 198, 197, 2698, 530, 261, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTypeSystem_ObjectsCanOnlyImplementInterfaces_AcceptsAnObjectImplementingAnInterface(t *testing.T) { anotherInterfaceType := graphql.NewInterface(graphql.InterfaceConfig{ Name: "AnotherInterface", ResolveType: func(p graphql.ResolveTypeParams) *graphql.Object { return nil }, Fields: graphql.Fields{ "f": &graphql.Field{ Type: graphql.String, }, }, }) _, err := schemaWithObjectImplementingType(anotherInterfaceType) if err != nil { t.Fatalf(`unexpected error: %v"`, err) } }
explode_data.jsonl/79178
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 191 }
[ 2830, 3393, 929, 2320, 62, 11543, 6713, 7308, 62980, 41066, 1566, 66, 57771, 2082, 1190, 62980, 287, 2082, 5051, 1155, 353, 8840, 836, 8, 341, 197, 41963, 5051, 929, 1669, 48865, 7121, 5051, 24312, 1470, 41065, 2648, 515, 197, 21297, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStreamEncode(t *testing.T) { state := &State{} codec := NewCodec(0) codec.StreamEncodeInit(state) file, err := os.Open("testdata/moby_dick_plain.txt") defer file.Close() if err != nil { t.Errorf("%v\n", err) return } var encodedBytes []byte expectedEncodeBytes, err := ioutil.ReadFile("testdata/moby_dick_base64.txt") if err != nil { t.Errorf("%v\n", err) return } maxChunkSize := 10 encodedChunkSize := codec.EncodedLen(maxChunkSize) var outSize int readBuff := make([]byte, maxChunkSize) outBuff := make([]byte, encodedChunkSize) for { nread, err := file.Read(readBuff) if err != nil { if err == io.EOF { break } else { t.Errorf("%v\n", err) return } } codec.StreamEncode(state, readBuff, nread, outBuff, &outSize) encodedBytes = append(encodedBytes, outBuff[:outSize]...) } codec.StreamEncodeFinal(state, outBuff, &outSize) if outSize > 0 { // write trailer if any encodedBytes = append(encodedBytes, outBuff[:outSize]...) } if !bytes.Equal(expectedEncodeBytes, encodedBytes) { t.Error("not equal") } }
explode_data.jsonl/17241
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 458 }
[ 2830, 3393, 3027, 32535, 1155, 353, 8840, 836, 8, 341, 24291, 1669, 609, 1397, 16094, 43343, 66, 1669, 1532, 36913, 7, 15, 340, 43343, 66, 33308, 32535, 3803, 8390, 692, 17661, 11, 1848, 1669, 2643, 12953, 445, 92425, 3183, 27015, 814, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestNetworkPolicyDelete(t *testing.T) { r := require.New(t) in := map[string]interface{}{ "name": "test-network-policy", "allowed_ip_list": []interface{}{"192.168.1.0/24"}, "blocked_ip_list": []interface{}{"155.548.2.98"}, "comment": "great comment", } d := schema.TestResourceDataRaw(t, resources.NetworkPolicy().Schema, in) d.SetId("test-network-policy") r.NotNil(d) WithMockDb(t, func(db *sql.DB, mock sqlmock.Sqlmock) { mock.ExpectExec(`^DROP NETWORK POLICY "test-network-policy"$`).WillReturnResult(sqlmock.NewResult(1, 1)) err := resources.DeleteNetworkPolicy(d, db) r.NoError(err) }) }
explode_data.jsonl/50826
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 275 }
[ 2830, 3393, 12320, 13825, 6435, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1373, 7121, 1155, 692, 17430, 1669, 2415, 14032, 31344, 67066, 197, 197, 31486, 788, 310, 330, 1944, 56732, 66420, 756, 197, 197, 1, 20967, 10385, 2019, 788, 3056...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPermsCreateAdmin(t *testing.T) { t.Parallel() cf, server, err := testutil.NewTestServerAndClient() if err != nil { t.Fatal(err) } defer server.Close() var b bytes.Buffer cmdr := DeisCmd{WOut: &b, ConfigFile: cf} server.Mux.HandleFunc("/v2/admin/perms/", func(w http.ResponseWriter, r *http.Request) { testutil.SetHeaders(w) }) err = cmdr.PermCreate("lorem-ipsum", "test-admin", true) assert.NoErr(t, err) assert.Equal(t, testutil.StripProgress(b.String()), `Adding test-admin to system administrators... done `, "output") }
explode_data.jsonl/74903
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 218 }
[ 2830, 3393, 3889, 1011, 4021, 7210, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 96989, 11, 3538, 11, 1848, 1669, 1273, 1314, 7121, 2271, 5475, 3036, 2959, 741, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestScreenshot(t *testing.T) { t.Parallel() ctx, cancel := testAllocate(t, "image2.html") defer cancel() tests := []struct { sel string by QueryOption size int }{ {`/html/body/img`, BySearch, 239}, {`img`, ByQueryAll, 239}, {`#icon-github`, ByID, 120}, {`document.querySelector('#imagething').shadowRoot.querySelector('.container')`, ByJSPath, 190}, } // a smaller viewport speeds up this test if err := Run(ctx, EmulateViewport(600, 400)); err != nil { t.Fatal(err) } for i, test := range tests { var buf []byte if err := Run(ctx, Screenshot(test.sel, &buf, test.by)); err != nil { t.Fatalf("test %d got error: %v", i, err) } if len(buf) == 0 { t.Fatalf("test %d failed to capture screenshot", i) } img, err := png.Decode(bytes.NewReader(buf)) if err != nil { t.Fatal(err) } size := img.Bounds().Size() if size.X != test.size || size.Y != test.size { t.Errorf("expected dimensions to be %d*%d, got %d*%d", test.size, test.size, size.X, size.Y) } } }
explode_data.jsonl/59484
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 435 }
[ 2830, 3393, 62522, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 20985, 11, 9121, 1669, 1273, 75380, 1155, 11, 330, 1805, 17, 2564, 1138, 16867, 9121, 2822, 78216, 1669, 3056, 1235, 341, 197, 1903, 301, 220, 914, 198, 197, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestProbe_Name(t *testing.T) { tests := []struct { name string probe *probe }{ { name: "OK", probe: &probe{ name: "my-service", }, }, } for _, tc := range tests { t.Run(tc.name, func(t *testing.T) { assert.Equal(t, tc.probe.name, tc.probe.Name()) }) } }
explode_data.jsonl/15674
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 150 }
[ 2830, 3393, 81426, 19015, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 220, 914, 198, 197, 197, 52329, 353, 52329, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 330, 3925, 756, 298, 197, 52329, 25, 609, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServiceCreateFromFile(t *testing.T) { testWithServiceFiles(t, func(t *testing.T, tempFile string) { for _, testArgs := range [][]string{ { "service", "create", "foo", "--filename", tempFile}, { "service", "create", "--filename", tempFile}, } { action, created, _, err := fakeServiceCreate(testArgs, false) assert.NilError(t, err) assert.Assert(t, action.Matches("create", "services")) assert.Equal(t, created.Name, "foo") assert.Equal(t, created.Spec.Template.Spec.GetContainer().Image, "gcr.io/foo/bar:baz") } }) }
explode_data.jsonl/42460
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 221 }
[ 2830, 3393, 1860, 4021, 43633, 1155, 353, 8840, 836, 8, 341, 18185, 2354, 1860, 10809, 1155, 11, 2915, 1155, 353, 8840, 836, 11, 2730, 1703, 914, 8, 341, 197, 2023, 8358, 1273, 4117, 1669, 2088, 52931, 917, 515, 298, 197, 515, 571, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFilterLoad(t *testing.T) { merkle := wire.MsgFilterLoad{} f := bloom.LoadFilter(&merkle) if !f.IsLoaded() { t.Errorf("TestFilterLoad IsLoaded test failed: want %v got %v", true, !f.IsLoaded()) return } f.Unload() if f.IsLoaded() { t.Errorf("TestFilterLoad IsLoaded test failed: want %v got %v", f.IsLoaded(), false) return } }
explode_data.jsonl/24699
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 155 }
[ 2830, 3393, 5632, 5879, 1155, 353, 8840, 836, 8, 341, 197, 1174, 23089, 1669, 9067, 30365, 5632, 5879, 31483, 1166, 1669, 51454, 13969, 5632, 2099, 1174, 23089, 340, 743, 753, 69, 4506, 22369, 368, 341, 197, 3244, 13080, 445, 2271, 5632...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSaveTokenFailsCantCreate(t *testing.T) { tokenPath := "/thiswontwork" if runtime.GOOS == "windows" { tokenPath = path.Join(os.Getenv("windir"), "system32") } err := SaveToken(tokenPath, 0644, *token()) expectedSubstring := "failed to create the temp file to write the token" if err == nil || !strings.Contains(err.Error(), expectedSubstring) { t.Fatalf("azure: failed to get correct error expected(%s) actual(%v)", expectedSubstring, err) } }
explode_data.jsonl/14996
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 168 }
[ 2830, 3393, 8784, 3323, 37, 6209, 34, 517, 4021, 1155, 353, 8840, 836, 8, 972, 43947, 1820, 1669, 3521, 574, 86, 544, 1778, 5031, 743, 15592, 97574, 3126, 621, 330, 27077, 1, 972, 197, 43947, 1820, 284, 1815, 22363, 9638, 64883, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestListAllListeners(t *testing.T) { th.SetupHTTP() defer th.TeardownHTTP() HandleListenerListSuccessfully(t) allPages, err := listeners.List(fake.ServiceClient(), listeners.ListOpts{}).AllPages() th.AssertNoErr(t, err) actual, err := listeners.ExtractListeners(allPages) th.AssertNoErr(t, err) th.CheckDeepEquals(t, ListenerWeb, actual[0]) th.CheckDeepEquals(t, ListenerDb, actual[1]) }
explode_data.jsonl/27549
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 852, 2403, 31570, 1155, 353, 8840, 836, 8, 341, 70479, 39820, 9230, 741, 16867, 270, 94849, 37496, 9230, 741, 197, 6999, 2743, 852, 35959, 1155, 692, 50960, 17713, 11, 1848, 1669, 23562, 5814, 74138, 13860, 2959, 1507, 23562, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestProbesToPortsConversion(t *testing.T) { expected := model.PortList{ { Name: "mgmt-3306", Port: 3306, Protocol: model.ProtocolTCP, }, { Name: "mgmt-9080", Port: 9080, Protocol: model.ProtocolHTTP, }, } handlers := []v1.Handler{ { TCPSocket: &v1.TCPSocketAction{ Port: intstr.IntOrString{StrVal: "mysql", Type: intstr.String}, }, }, { TCPSocket: &v1.TCPSocketAction{ Port: intstr.IntOrString{IntVal: 3306, Type: intstr.Int}, }, }, { HTTPGet: &v1.HTTPGetAction{ Path: "/foo", Port: intstr.IntOrString{StrVal: "http-two", Type: intstr.String}, }, }, { HTTPGet: &v1.HTTPGetAction{ Path: "/foo", Port: intstr.IntOrString{IntVal: 9080, Type: intstr.Int}, }, }, } podSpec := &v1.PodSpec{ Containers: []v1.Container{ { Name: "scooby", Ports: []v1.ContainerPort{ { Name: "mysql", ContainerPort: 3306, }, { Name: "http-two", ContainerPort: 9080, }, { Name: "http", ContainerPort: 80, }, }, LivenessProbe: &v1.Probe{}, ReadinessProbe: &v1.Probe{}, }, }, } for _, handler1 := range handlers { for _, handler2 := range handlers { if (handler1.TCPSocket != nil && handler2.TCPSocket != nil) || (handler1.HTTPGet != nil && handler2.HTTPGet != nil) { continue } podSpec.Containers[0].LivenessProbe.Handler = handler1 podSpec.Containers[0].ReadinessProbe.Handler = handler2 mgmtPorts, err := convertProbesToPorts(podSpec) if err != nil { t.Errorf("Failed to convert Probes to Ports: %v", err) } if !reflect.DeepEqual(mgmtPorts, expected) { t.Errorf("incorrect number of management ports => %v, want %v", len(mgmtPorts), len(expected)) } } } }
explode_data.jsonl/26714
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 908 }
[ 2830, 3393, 1336, 9433, 1249, 68273, 48237, 1155, 353, 8840, 836, 8, 1476, 42400, 1669, 1614, 43013, 852, 515, 197, 197, 515, 298, 21297, 25, 257, 330, 12311, 2501, 12, 18, 18, 15, 21, 756, 298, 98459, 25, 257, 220, 18, 18, 15, 21...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestTemplateFuncs(t *testing.T) { b := newTestSitesBuilder(t).WithDefaultMultiSiteConfig() homeTpl := `Site: {{ site.Language.Lang }} / {{ .Site.Language.Lang }} / {{ site.BaseURL }} Sites: {{ site.Sites.First.Home.Language.Lang }} Hugo: {{ hugo.Generator }} ` b.WithTemplatesAdded( "index.html", homeTpl, "index.fr.html", homeTpl, ) b.CreateSites().Build(BuildCfg{}) b.AssertFileContent("public/en/index.html", "Site: en / en / http://example.com/blog", "Sites: en", "Hugo: <meta name=\"generator\" content=\"Hugo") b.AssertFileContent("public/fr/index.html", "Site: fr / fr / http://example.com/blog", "Sites: en", "Hugo: <meta name=\"generator\" content=\"Hugo", ) }
explode_data.jsonl/60656
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 281 }
[ 2830, 3393, 7275, 9626, 82, 1155, 353, 8840, 836, 8, 341, 2233, 1669, 501, 2271, 93690, 3297, 1155, 568, 2354, 3675, 20358, 17597, 2648, 2822, 197, 5117, 87137, 1669, 1565, 17597, 25, 5867, 2747, 56958, 77916, 3869, 608, 5867, 659, 1759...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGitCommandStashDo(t *testing.T) { gitCmd := newDummyGitCommand() gitCmd.OSCommand.command = func(cmd string, args ...string) *exec.Cmd { assert.EqualValues(t, "git", cmd) assert.EqualValues(t, []string{"stash", "drop", "stash@{1}"}, args) return exec.Command("echo") } assert.NoError(t, gitCmd.StashDo(1, "drop")) }
explode_data.jsonl/38357
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 46562, 4062, 623, 988, 5404, 1155, 353, 8840, 836, 8, 341, 90731, 15613, 1669, 501, 43344, 46562, 4062, 741, 90731, 15613, 57054, 4062, 14143, 284, 2915, 14160, 914, 11, 2827, 2503, 917, 8, 353, 11748, 64512, 341, 197, 6948,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_deleteWorkflowEventIntegrationHandler(t *testing.T) { api, db, router := newTestAPI(t) // Init user u, pass := assets.InsertAdminUser(t, api.mustDB()) // Init project key := sdk.RandomString(10) proj := assets.InsertTestProject(t, db, api.Cache, key, key) // Init pipeline pip := sdk.Pipeline{ Name: "pipeline1", ProjectID: proj.ID, } test.NoError(t, pipeline.InsertPipeline(api.mustDB(), &pip)) //Prepare request vars := map[string]string{ "permProjectKey": proj.Key, } uri := router.GetRoute("POST", api.postWorkflowHandler, vars) test.NotEmpty(t, uri) var wf = &sdk.Workflow{ Name: "Name", Description: "Description", WorkflowData: sdk.WorkflowData{ Node: sdk.Node{ Type: sdk.NodeTypePipeline, Context: &sdk.NodeContext{ PipelineID: pip.ID, }, }, }, } req := assets.NewAuthentifiedRequest(t, u, pass, "POST", uri, &wf) //Do the request w := httptest.NewRecorder() router.Mux.ServeHTTP(w, req) assert.Equal(t, 201, w.Code) test.NoError(t, json.Unmarshal(w.Body.Bytes(), &wf)) //Prepare request vars = map[string]string{ "key": proj.Key, "permWorkflowName": "Name", } uri = router.GetRoute("PUT", api.putWorkflowHandler, vars) test.NotEmpty(t, uri) // Insert application app := sdk.Application{ Name: "app1", RepositoryFullname: "test/app1", VCSServer: "github", } test.NoError(t, application.Insert(api.mustDB(), *proj, &app)) model := sdk.IntegrationModel{ Name: sdk.RandomString(10), Event: true, } test.NoError(t, integration.InsertModel(api.mustDB(), &model)) projInt := sdk.ProjectIntegration{ Config: sdk.IntegrationConfig{ "test": sdk.IntegrationConfigValue{ Description: "here is a test", Type: sdk.IntegrationConfigTypeString, Value: "test", }, }, Name: sdk.RandomString(10), ProjectID: proj.ID, Model: model, IntegrationModelID: model.ID, } test.NoError(t, integration.InsertIntegration(db, &projInt)) var workflow1 = &sdk.Workflow{ Name: "Name", Description: "Description 2", WorkflowData: sdk.WorkflowData{ Node: sdk.Node{ Type: sdk.NodeTypePipeline, Context: &sdk.NodeContext{ PipelineID: pip.ID, ApplicationID: app.ID, }, }, }, EventIntegrations: []sdk.ProjectIntegration{projInt}, } req = assets.NewAuthentifiedRequest(t, u, pass, "PUT", uri, &workflow1) //Do the request w = httptest.NewRecorder() router.Mux.ServeHTTP(w, req) assert.Equal(t, 200, w.Code) test.NoError(t, json.Unmarshal(w.Body.Bytes(), &workflow1)) assert.NotEqual(t, 0, workflow1.ID) assert.Equal(t, "Description 2", workflow1.Description) assert.NotEqual(t, 0, workflow1.WorkflowData.Node.Context.ApplicationID) assert.NotNil(t, workflow1.WorkflowData.Node.Context.DefaultPayload) assert.NotNil(t, workflow1.EventIntegrations) assert.Equal(t, len(workflow1.EventIntegrations), 1) vars["integrationID"] = fmt.Sprintf("%d", projInt.ID) uri = router.GetRoute("DELETE", api.deleteWorkflowEventsIntegrationHandler, vars) req = assets.NewAuthentifiedRequest(t, u, pass, "DELETE", uri, nil) //Do the request w = httptest.NewRecorder() router.Mux.ServeHTTP(w, req) assert.Equal(t, 200, w.Code) wfUpdated, err := workflow.Load(context.TODO(), api.mustDB(), api.Cache, *proj, wf.Name, workflow.LoadOptions{WithIntegrations: true}) test.NoError(t, err, "cannot load workflow") test.Equal(t, 0, len(wfUpdated.EventIntegrations)) }
explode_data.jsonl/31076
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1510 }
[ 2830, 3393, 11353, 62768, 1556, 52464, 3050, 1155, 353, 8840, 836, 8, 341, 54299, 11, 2927, 11, 9273, 1669, 501, 2271, 7082, 1155, 692, 197, 322, 15690, 1196, 198, 10676, 11, 1494, 1669, 11770, 23142, 7210, 1474, 1155, 11, 6330, 69419, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTidbClusterControllerAddStatefuSet(t *testing.T) { g := NewGomegaWithT(t) type testcase struct { name string modifySet func(*v1alpha1.TidbCluster) *apps.StatefulSet addTidbClusterToIndexer bool expectedLen int } testFn := func(test *testcase, t *testing.T) { t.Log("test: ", test.name) tc := newTidbCluster() set := test.modifySet(tc) tcc, tcIndexer, _ := newFakeTidbClusterController() if test.addTidbClusterToIndexer { err := tcIndexer.Add(tc) g.Expect(err).NotTo(HaveOccurred()) } tcc.addStatefulSet(set) g.Expect(tcc.queue.Len()).To(Equal(test.expectedLen)) } tests := []testcase{ { name: "normal", modifySet: func(tc *v1alpha1.TidbCluster) *apps.StatefulSet { return newStatefuSet(tc) }, addTidbClusterToIndexer: true, expectedLen: 1, }, { name: "have deletionTimestamp", modifySet: func(tc *v1alpha1.TidbCluster) *apps.StatefulSet { set := newStatefuSet(tc) set.DeletionTimestamp = &metav1.Time{Time: time.Now().Add(30 * time.Second)} return set }, addTidbClusterToIndexer: true, expectedLen: 1, }, { name: "without controllerRef", modifySet: func(tc *v1alpha1.TidbCluster) *apps.StatefulSet { set := newStatefuSet(tc) set.OwnerReferences = nil return set }, addTidbClusterToIndexer: true, expectedLen: 0, }, { name: "without tidbcluster", modifySet: func(tc *v1alpha1.TidbCluster) *apps.StatefulSet { return newStatefuSet(tc) }, addTidbClusterToIndexer: false, expectedLen: 0, }, } for i := range tests { testFn(&tests[i], t) } }
explode_data.jsonl/68174
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 826 }
[ 2830, 3393, 51, 307, 65, 28678, 2051, 2212, 1397, 32621, 1649, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1532, 38, 32696, 2354, 51, 1155, 340, 13158, 70080, 2036, 341, 197, 11609, 503, 914, 198, 197, 42228, 1437, 1649, 2290, 2915, 407...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIntegrationsNewAwsCfgIntegration(t *testing.T) { subject := api.NewAwsCfgIntegration("integration_name", api.AwsIntegrationData{ Credentials: &api.AwsCrossAccountCreds{ RoleArn: "arn:foo:bar", ExternalID: "0123456789", }, }, ) assert.Equal(t, api.AwsCfgIntegration.String(), subject.Type) }
explode_data.jsonl/81109
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 140 }
[ 2830, 3393, 1072, 14412, 804, 3564, 47359, 42467, 52464, 1155, 353, 8840, 836, 8, 341, 28624, 583, 1669, 6330, 7121, 47359, 42467, 52464, 445, 60168, 1269, 756, 197, 54299, 875, 8915, 52464, 1043, 515, 298, 6258, 15735, 25, 609, 2068, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetFundingPayments(t *testing.T) { t.Parallel() if !areTestAPIKeysSet() { t.Skip() } // optional params _, err := f.GetFundingPayments(context.Background(), time.Time{}, time.Time{}, "") if err != nil { t.Error(err) } _, err = f.GetFundingPayments(context.Background(), time.Unix(authStartTime, 0), time.Unix(authEndTime, 0), futuresPair) if err != nil { t.Error(err) } _, err = f.GetFundingPayments(context.Background(), time.Unix(authEndTime, 0), time.Unix(authStartTime, 0), futuresPair) if err != errStartTimeCannotBeAfterEndTime { t.Errorf("should have thrown errStartTimeCannotBeAfterEndTime, got %v", err) } }
explode_data.jsonl/15197
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 256 }
[ 2830, 3393, 1949, 37, 37189, 87646, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 743, 753, 546, 2271, 7082, 8850, 1649, 368, 341, 197, 3244, 57776, 741, 197, 532, 197, 322, 10101, 3628, 198, 197, 6878, 1848, 1669, 282, 2234, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestClient_Lstat(t *testing.T) { if !isTestManual { t.Skipf("%s not set", envNameTestManual) } remoteFile := "/etc/hosts" fa, err := testClient.Lstat(remoteFile) if err != nil { t.Fatal(err) } t.Logf("Lstat: %s: %+v", remoteFile, fa) }
explode_data.jsonl/66413
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 115 }
[ 2830, 3393, 2959, 2351, 9878, 1155, 353, 8840, 836, 8, 341, 743, 753, 285, 2271, 52092, 341, 197, 3244, 57776, 69, 4430, 82, 537, 738, 497, 6105, 675, 2271, 52092, 340, 197, 630, 197, 18147, 1703, 1669, 3521, 12107, 14, 44692, 698, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_core_PodStatus_to_v1_PodStatus(t *testing.T) { // core to v1 testInputs := []core.PodStatus{ { // one IP PodIPs: []core.PodIP{ { IP: "1.1.1.1", }, }, }, { // no ips PodIPs: nil, }, { // list of ips PodIPs: []core.PodIP{ { IP: "1.1.1.1", }, { IP: "2000::", }, }, }, } for i, input := range testInputs { v1PodStatus := v1.PodStatus{} if err := corev1.Convert_core_PodStatus_To_v1_PodStatus(&input, &v1PodStatus, nil); nil != err { t.Errorf("%v: Convert core.PodStatus to v1.PodStatus failed with error %v", i, err.Error()) } if len(input.PodIPs) == 0 { // no more work needed continue } // Primary IP was not set.. if len(v1PodStatus.PodIP) == 0 { t.Errorf("%v: Convert core.PodStatus to v1.PodStatus failed out.PodIP is empty, should be %v", i, v1PodStatus.PodIP) } // Primary should always == in.PodIPs[0].IP if len(input.PodIPs) > 0 && v1PodStatus.PodIP != input.PodIPs[0].IP { t.Errorf("%v: Convert core.PodStatus to v1.PodStatus failed out.PodIP != in.PodIP[0].IP expected %v found %v", i, input.PodIPs[0].IP, v1PodStatus.PodIP) } // match v1.PodIPs to core.PodIPs for idx := range input.PodIPs { if v1PodStatus.PodIPs[idx].IP != input.PodIPs[idx].IP { t.Errorf("%v: Convert core.PodStatus to v1.PodStatus failed. Expected v1.PodStatus[%v]=%v but found %v", i, idx, input.PodIPs[idx].IP, v1PodStatus.PodIPs[idx].IP) } } } }
explode_data.jsonl/27251
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 706 }
[ 2830, 3393, 15467, 1088, 347, 2522, 2346, 2273, 16, 1088, 347, 2522, 1155, 353, 8840, 836, 8, 341, 197, 322, 6200, 311, 348, 16, 198, 18185, 31946, 1669, 3056, 2153, 88823, 2522, 515, 197, 197, 515, 298, 197, 322, 825, 6790, 198, 29...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestGetModificationNumber(t *testing.T) { visitor := createVisitor("test", nil) // Test before sync _, err := visitor.GetModificationNumber("not_exists", 10, true) assert.NotEqual(t, nil, err, "Should have an error as modifications are not synced") visitor.SynchronizeModifications() // Test default value val, err := visitor.GetModificationNumber("not_exists", 10, true) assert.NotEqual(t, nil, err, "Should have an error as flag does not exists") assert.Equal(t, 10., val, "Expected default value getting nil flag") // Test wrong type value val, err = visitor.GetModificationNumber("test_string", 10, true) assert.NotEqual(t, nil, err, "Should have an error as flag test_string is not of type float") assert.Equal(t, 10., val, "Expected default value getting nil flag") // Test nil value val, err = visitor.GetModificationNumber("test_nil", 10, true) assert.Equal(t, nil, err, "Did not expect error when getting nil flag") assert.Equal(t, 10., val, "Expected default value getting nil flag") // Test response value val, err = visitor.GetModificationNumber("test_number", 10, true) assert.Equal(t, nil, err, "Should not have an error as flag does exists") assert.Equal(t, 35.6, val, "Expected value 36.5") }
explode_data.jsonl/12291
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 395 }
[ 2830, 3393, 1949, 80795, 2833, 1155, 353, 8840, 836, 8, 341, 197, 39985, 1669, 1855, 16796, 445, 1944, 497, 2092, 692, 197, 322, 3393, 1573, 12811, 198, 197, 6878, 1848, 1669, 20181, 2234, 80795, 2833, 445, 1921, 9766, 497, 220, 16, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestJSONBindingBindBody(t *testing.T) { type sampleStruct struct { ABCFoo string Bar string Box string `json:"_box"` } var s sampleStruct err := jsonBinding{}.BindBody([]byte(`{"Abc_foo": "FOO", "Box": "zzz", "Bar": "xyz"}`), &s) require.NoError(t, err) require.Equal(t, "FOO", s.ABCFoo) require.Equal(t, "xyz", s.Bar) require.Equal(t, "", s.Box) s = sampleStruct{} err = jsonBinding{}.BindBody([]byte(`{"ABCFoo": "z", "_Box": "yo"}`), &s) require.NoError(t, err) require.Equal(t, "", s.ABCFoo) require.Equal(t, "", s.Bar) require.Equal(t, "yo", s.Box) s = sampleStruct{} err = jsonBinding{}.BindBody([]byte(`{"abc_foo": "x", "_box": "yoo", "bar": "jojo"}`), &s) require.NoError(t, err) require.Equal(t, "x", s.ABCFoo) require.Equal(t, "jojo", s.Bar) require.Equal(t, "yoo", s.Box) }
explode_data.jsonl/52792
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 386 }
[ 2830, 3393, 5370, 15059, 9950, 5444, 1155, 353, 8840, 836, 8, 341, 13158, 6077, 9422, 2036, 341, 197, 197, 1867, 9650, 2624, 914, 198, 197, 197, 3428, 262, 914, 198, 197, 197, 1611, 262, 914, 1565, 2236, 2974, 62, 2011, 8805, 197, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrintComponentStatus(t *testing.T) { tests := []struct { componentStatus api.ComponentStatus expected []metav1.TableRow }{ // Basic component status without conditions { componentStatus: api.ComponentStatus{ ObjectMeta: metav1.ObjectMeta{ Name: "cs1", }, Conditions: []api.ComponentCondition{}, }, // Columns: Name, Status, Message, Error expected: []metav1.TableRow{{Cells: []interface{}{"cs1", "Unknown", "", ""}}}, }, // Basic component status with healthy condition. { componentStatus: api.ComponentStatus{ ObjectMeta: metav1.ObjectMeta{ Name: "cs2", }, Conditions: []api.ComponentCondition{ { Type: "Healthy", Status: api.ConditionTrue, Message: "test message", Error: "test error", }, }, }, // Columns: Name, Status, Message, Error expected: []metav1.TableRow{{Cells: []interface{}{"cs2", "Healthy", "test message", "test error"}}}, }, // Basic component status with healthy condition. { componentStatus: api.ComponentStatus{ ObjectMeta: metav1.ObjectMeta{ Name: "cs3", }, Conditions: []api.ComponentCondition{ { Type: "Healthy", Status: api.ConditionFalse, Message: "test message", Error: "test error", }, }, }, // Columns: Name, Status, Message, Error expected: []metav1.TableRow{{Cells: []interface{}{"cs3", "Unhealthy", "test message", "test error"}}}, }, } for i, test := range tests { rows, err := printComponentStatus(&test.componentStatus, printers.GenerateOptions{}) if err != nil { t.Fatal(err) } for i := range rows { rows[i].Object.Object = nil } if !reflect.DeepEqual(test.expected, rows) { t.Errorf("%d mismatch: %s", i, diff.ObjectReflectDiff(test.expected, rows)) } } }
explode_data.jsonl/21628
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 755 }
[ 2830, 3393, 8994, 2189, 2522, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 52228, 2522, 6330, 5119, 2522, 198, 197, 42400, 286, 3056, 4059, 402, 16, 18257, 3102, 198, 197, 59403, 197, 197, 322, 14625, 3692, 2639, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestExemplar_IntVal(t *testing.T) { ms := NewExemplar() assert.EqualValues(t, int64(0), ms.IntVal()) testValIntVal := int64(17) ms.SetIntVal(testValIntVal) assert.EqualValues(t, testValIntVal, ms.IntVal()) }
explode_data.jsonl/32770
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 92 }
[ 2830, 3393, 840, 25892, 277, 32054, 2208, 1155, 353, 8840, 836, 8, 341, 47691, 1669, 1532, 840, 25892, 277, 741, 6948, 12808, 6227, 1155, 11, 526, 21, 19, 7, 15, 701, 9829, 7371, 2208, 2398, 18185, 2208, 1072, 2208, 1669, 526, 21, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReadChallengeTx_invalidCorrupted(t *testing.T) { serverKP := newKeypair0() clientKP := newKeypair1() txSource := NewSimpleAccount(serverKP.Address(), -1) op := ManageData{ SourceAccount: clientKP.Address(), Name: "testanchor.stellar.org auth", Value: []byte(base64.StdEncoding.EncodeToString(make([]byte, 48))), } webAuthDomainOp := ManageData{ SourceAccount: serverKP.Address(), Name: "web_auth_domain", Value: []byte("testwebauth.stellar.org"), } tx, err := NewTransaction( TransactionParams{ SourceAccount: &txSource, IncrementSequenceNum: true, Operations: []Operation{&op, &webAuthDomainOp}, BaseFee: MinBaseFee, Timebounds: NewTimeout(1000), }, ) assert.NoError(t, err) tx, err = tx.Sign(network.TestNetworkPassphrase, serverKP) assert.NoError(t, err) tx64, err := tx.Base64() require.NoError(t, err) tx64 = strings.ReplaceAll(tx64, "A", "B") readTx, readClientAccountID, _, err := ReadChallengeTx(tx64, serverKP.Address(), network.TestNetworkPassphrase, "testwebauth.stellar.org", []string{"testanchor.stellar.org"}) assert.Nil(t, readTx) assert.Equal(t, "", readClientAccountID) assert.EqualError( t, err, "could not parse challenge: unable to unmarshal transaction envelope: "+ "xdr:decode: switch '68174086' is not valid enum value for union", ) }
explode_data.jsonl/20703
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 584 }
[ 2830, 3393, 4418, 62078, 31584, 31433, 10580, 85954, 1155, 353, 8840, 836, 8, 341, 41057, 65036, 1669, 501, 6608, 1082, 1310, 15, 741, 25291, 65036, 1669, 501, 6608, 1082, 1310, 16, 741, 46237, 3608, 1669, 1532, 16374, 7365, 21421, 65036,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBasicAuthRoundTripper(t *testing.T) { rt := &testRoundTripper{} req := &http.Request{} NewBasicAuthRoundTripper("user", "pass", rt).RoundTrip(req) if rt.Request == nil { t.Fatalf("unexpected nil request: %v", rt) } if rt.Request == req { t.Fatalf("round tripper should have copied request object: %#v", rt.Request) } if rt.Request.Header.Get("Authorization") != "Basic "+base64.StdEncoding.EncodeToString([]byte("user:pass")) { t.Errorf("unexpected authorization header: %#v", rt.Request) } }
explode_data.jsonl/27367
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 197 }
[ 2830, 3393, 15944, 5087, 27497, 21884, 6922, 1155, 353, 8840, 836, 8, 341, 55060, 1669, 609, 1944, 27497, 21884, 6922, 16094, 24395, 1669, 609, 1254, 9659, 16094, 197, 3564, 15944, 5087, 27497, 21884, 6922, 445, 872, 497, 330, 6385, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCloseConversation(t *testing.T) { http.HandleFunc("/conversations.close", closeConversationHandler) once.Do(startServer) api := New("testing-token", OptionAPIURL("http://"+serverAddr+"/")) _, _, err := api.CloseConversation("CXXXXXXXX") if err != nil { t.Errorf("Unexpected error: %s", err) return } }
explode_data.jsonl/78546
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 121 }
[ 2830, 3393, 7925, 60313, 1155, 353, 8840, 836, 8, 341, 28080, 63623, 4283, 443, 72995, 4653, 497, 3265, 60313, 3050, 340, 197, 13184, 33596, 10639, 5475, 340, 54299, 1669, 1532, 445, 8840, 34841, 497, 6959, 7082, 3144, 445, 1254, 1110, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestWriteDirFileToUserHomeDir(t *testing.T) { hds := NewHomedirService() content := []byte(`t`) pathToFile := "./testfile" err := hds.WriteFileToUserHomeDir(content, pathToFile) assert.FileExists(t, pathToFile) assert.Nil(t, err) os.RemoveAll(pathToFile) }
explode_data.jsonl/35865
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 108 }
[ 2830, 3393, 7985, 6184, 1703, 1249, 1474, 7623, 6184, 1155, 353, 8840, 836, 8, 341, 9598, 5356, 1669, 1532, 39, 24139, 404, 1860, 741, 27751, 1669, 3056, 3782, 5809, 83, 24183, 26781, 41550, 1669, 5924, 1944, 1192, 698, 9859, 1669, 305,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCallCreateTxJSON(t *testing.T) { modify := &ModifyConfig{ Key: "token-finisher", Value: "xxxxxxxxxxxxxxxxxxxxxxxxxxxx", Op: "add", Addr: "", } data, err := json.Marshal(modify) assert.Equal(t, err, nil) result, err := CallCreateTxJSON("manage", "Modify", data) assert.Equal(t, err, nil) assert.NotEqual(t, result, nil) var tx Transaction err = Decode(result, &tx) assert.Equal(t, err, nil) assert.Equal(t, tx.Execer, []byte("manage")) fee, _ := tx.GetRealFee(GInt("MinFee")) assert.Equal(t, tx.Fee, fee) _, err = CallCreateTxJSON("coins", "Modify", data) assert.NotEqual(t, err, nil) _, err = CallCreateTxJSON("xxxx", "xxx", data) assert.NotEqual(t, err, nil) modify = &ModifyConfig{ Key: "token-finisher", Value: "xxxxxxxxxxxxxxxxxxxxxxxxxxxx", Op: "delete", Addr: "", } data, err = json.Marshal(modify) assert.Equal(t, err, nil) result, err = CallCreateTxJSON("manage", "Modify", data) assert.Equal(t, err, nil) assert.NotEqual(t, result, nil) err = Decode(result, &tx) assert.Equal(t, err, nil) assert.Equal(t, tx.Execer, []byte("manage")) fee, _ = tx.GetRealFee(GInt("MinFee")) assert.Equal(t, tx.Fee, fee) }
explode_data.jsonl/52308
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 560 }
[ 2830, 3393, 7220, 4021, 31584, 5370, 1155, 353, 8840, 836, 8, 972, 42228, 1437, 1669, 609, 44427, 2648, 1666, 197, 55242, 25, 256, 330, 5839, 2220, 18176, 261, 4723, 197, 47399, 25, 330, 44102, 44102, 44102, 18516, 4723, 197, 197, 7125,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPreserveCommentsWithAdjust(t *testing.T) { kustomizationContentWithComments := []byte(` # shem qing some comments # This is some comment we should preserve # don't delete it resources: - pod.yaml # See which field this comment goes into - service.yaml apiVersion: kustomize.config.k8s.io/v1beta1 kind: kustomization # something you may want to keep vars: - fieldref: fieldPath: metadata.name name: MY_SERVICE_NAME objref: apiVersion: v1 kind: Service name: my-service BASES: - ../namespaces # some descriptions for the patches patchesStrategicMerge: - service.yaml - pod.yaml # generator options generatorOptions: disableNameSuffixHash: true `) expected := []byte(` # shem qing some comments # This is some comment we should preserve # don't delete it # See which field this comment goes into resources: - pod.yaml - service.yaml apiVersion: kustomize.config.k8s.io/v1beta1 kind: kustomization # something you may want to keep vars: - fieldref: fieldPath: metadata.name name: MY_SERVICE_NAME objref: apiVersion: v1 kind: Service name: my-service bases: - ../namespaces # some descriptions for the patches patchesStrategicMerge: - service.yaml - pod.yaml # generator options generatorOptions: disableNameSuffixHash: true `) fSys := fs.MakeFakeFS() fSys.WriteTestKustomizationWith(kustomizationContentWithComments) mf, err := NewKustomizationFile(fSys) if err != nil { t.Fatalf("Unexpected Error: %v", err) } kustomization, err := mf.Read() if err != nil { t.Fatalf("Unexpected Error: %v", err) } if err = mf.Write(kustomization); err != nil { t.Fatalf("Unexpected Error: %v", err) } bytes, _ := fSys.ReadFile(mf.path) if !reflect.DeepEqual(expected, bytes) { t.Fatal("written kustomization with comments is not the same as original one\n", string(bytes)) } }
explode_data.jsonl/68521
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 675 }
[ 2830, 3393, 14367, 5852, 17373, 2354, 38616, 1155, 353, 8840, 836, 8, 341, 16463, 1450, 2022, 2762, 2354, 17373, 1669, 3056, 3782, 5809, 271, 14808, 2, 557, 336, 2804, 287, 1045, 6042, 198, 2, 1096, 374, 1045, 3980, 582, 1265, 21129, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestUnchangingEndpointsDoesNotCommit(t *testing.T) { router := newTestRouter(make(map[string]ServiceUnit)) plugin := newDefaultTemplatePlugin(router, true) endpoints := &kapi.Endpoints{ ObjectMeta: kapi.ObjectMeta{ Namespace: "foo", Name: "test", }, Subsets: []kapi.EndpointSubset{{ Addresses: []kapi.EndpointAddress{{IP: "1.1.1.1"}, {IP: "2.2.2.2"}}, Ports: []kapi.EndpointPort{{Port: 0}}, }}, } changedEndpoints := &kapi.Endpoints{ ObjectMeta: kapi.ObjectMeta{ Namespace: "foo", Name: "test", }, Subsets: []kapi.EndpointSubset{{ Addresses: []kapi.EndpointAddress{{IP: "3.3.3.3"}, {IP: "2.2.2.2"}}, Ports: []kapi.EndpointPort{{Port: 0}}, }}, } testCases := []struct { name string event watch.EventType endpoints *kapi.Endpoints expectCommit bool }{ { name: "initial add", event: watch.Added, endpoints: endpoints, expectCommit: true, }, { name: "mod with no change", event: watch.Modified, endpoints: endpoints, expectCommit: false, }, { name: "add with change", event: watch.Added, endpoints: changedEndpoints, expectCommit: true, }, { name: "add with no change", event: watch.Added, endpoints: changedEndpoints, expectCommit: false, }, } for _, v := range testCases { err := plugin.HandleEndpoints(v.event, v.endpoints) if err != nil { t.Errorf("%s had unexpected error in handle endpoints %v", v.name, err) continue } if router.Committed != v.expectCommit { t.Errorf("%s expected router commit to be %v but found %v", v.name, v.expectCommit, router.Committed) } } }
explode_data.jsonl/5230
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 809 }
[ 2830, 3393, 1806, 51713, 80786, 21468, 2623, 33441, 1155, 353, 8840, 836, 8, 341, 67009, 1669, 501, 2271, 9523, 36944, 9147, 14032, 60, 1860, 4562, 1171, 197, 9138, 1669, 501, 3675, 7275, 11546, 61210, 11, 830, 340, 6246, 7706, 1669, 60...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestInputService11ProtocolTestHeaderMapsCase1(t *testing.T) { sess := session.New() svc := NewInputService11ProtocolTest(sess, &aws.Config{Endpoint: aws.String("https://test")}) input := &InputService11TestShapeInputService11TestCaseOperation1Input{ Foo: map[string]*string{ "a": aws.String("b"), "c": aws.String("d"), }, } req, _ := svc.InputService11TestCaseOperation1Request(input) r := req.HTTPRequest // build request restxml.Build(req) assert.NoError(t, req.Error) // assert URL awstesting.AssertURL(t, "https://test/", r.URL.String()) // assert headers assert.Equal(t, "b", r.Header.Get("x-foo-a")) assert.Equal(t, "d", r.Header.Get("x-foo-c")) }
explode_data.jsonl/46481
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 270 }
[ 2830, 3393, 2505, 1860, 16, 16, 20689, 2271, 4047, 36562, 4207, 16, 1155, 353, 8840, 836, 8, 341, 1903, 433, 1669, 3797, 7121, 741, 1903, 7362, 1669, 1532, 2505, 1860, 16, 16, 20689, 2271, 57223, 11, 609, 8635, 10753, 90, 27380, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStreamWriter5(t *testing.T) { runBadgerTest(t, nil, func(t *testing.T, db *DB) { list := &pb.KVList{} left := make([]byte, 6) left[0] = 0x00 copy(left[1:], []byte("break")) right := make([]byte, 6) right[0] = 0xff copy(right[1:], []byte("break")) list.Kv = append(list.Kv, &pb.KV{ Key: left, Value: []byte("val"), Version: 1, }) list.Kv = append(list.Kv, &pb.KV{ Key: right, Value: []byte("val"), Version: 1, }) sw := db.NewStreamWriter() require.NoError(t, sw.Prepare(), "sw.Prepare() failed") require.NoError(t, sw.Write(list), "sw.Write() failed") require.NoError(t, sw.Flush(), "sw.Flush() failed") require.NoError(t, db.Close()) var err error _, err = Open(db.opt) require.NoError(t, err) }) }
explode_data.jsonl/17983
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 371 }
[ 2830, 3393, 93031, 20, 1155, 353, 8840, 836, 8, 341, 56742, 17082, 1389, 2271, 1155, 11, 2092, 11, 2915, 1155, 353, 8840, 836, 11, 2927, 353, 3506, 8, 341, 197, 14440, 1669, 609, 16650, 11352, 53, 852, 31483, 197, 35257, 1669, 1281, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPerformBufferEdits(t *testing.T) { tests := []struct { name string fileEdits []*FileEdit s string want string }{ {name: "no edits", s: "my\nshort\nfile\n", want: "my\nshort\nfile\n"}, { name: "one edit", fileEdits: []*FileEdit{ {pos: token.Position{Offset: 3}, fromText: "short", toText: "one edit"}, }, s: "my\nshort\nfile\n", want: "my\none edit\nfile\n", }, { name: "one insert", fileEdits: []*FileEdit{ {pos: token.Position{Offset: 2}, fromText: "", toText: "\none insert"}, }, s: "my\nshort\nfile\n", want: "my\none insert\nshort\nfile\n", }, { name: "two inserts at same offset", fileEdits: []*FileEdit{ {pos: token.Position{Offset: 2}, fromText: "", toText: "\none insert"}, {pos: token.Position{Offset: 2}, fromText: "", toText: "\nsecond insert"}, }, s: "my\nshort\nfile\n", want: "my\none insert\nsecond insert\nshort\nfile\n", }, } for i, tt := range tests { t.Run(fmt.Sprintf("test #%v: %v", i, tt.name), func(t *testing.T) { b := performBufferEdits([]byte(tt.s), tt.fileEdits) got := string(b) if len(got) != len(tt.want) { t.Errorf("len(got) = %v, len(want) = %v", len(got), len(tt.want)) } if got != tt.want { t.Errorf("got = %v, want = %v", got, tt.want) } }) } }
explode_data.jsonl/50640
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 647 }
[ 2830, 3393, 46951, 4095, 2715, 1199, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 414, 914, 198, 197, 17661, 2715, 1199, 29838, 1703, 4036, 198, 197, 1903, 260, 914, 198, 197, 50780, 414, 914, 198, 197, 5940...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestParseWait(t *testing.T) { incorrectIntervals := []string{ "500x", // Incorrect min interval "500s:4x", // Incorrect max interval "1m:1s", // Min interval larger than max interval } for _, intervalString := range incorrectIntervals { wait, err := ParseWait(intervalString) assert.Error(t, err) assert.Nil(t, wait) } correctIntervals := map[string]Wait{ "": {0, 0}, // Empty time interval string "1ms": {1000000, 4000000}, // Correct min interval without max "1ms:111ms": {1000000, 111000000}, // Correct min:max time interval } for intervalString, expectedWait := range correctIntervals { wait, err := ParseWait(intervalString) assert.NoError(t, err) assert.Equal(t, &expectedWait, wait) } }
explode_data.jsonl/78336
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 296 }
[ 2830, 3393, 14463, 14190, 1155, 353, 8840, 836, 8, 341, 197, 61954, 1072, 42198, 1669, 3056, 917, 515, 197, 197, 1, 20, 15, 15, 87, 497, 262, 442, 81857, 1308, 9873, 198, 197, 197, 1, 20, 15, 15, 82, 25, 19, 87, 497, 442, 81857,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDaoSetReplySetRds(t *testing.T) { convey.Convey("SetReplySetRds", t, func(ctx convey.C) { var ( oid = int64(0) tp = int(0) rpIDs = []int64{} c = context.Background() ) for i := 0; i < 10000; i++ { rpIDs = append(rpIDs, int64(i)) } ctx.Convey("When everything goes positive", func(ctx convey.C) { err := d.SetReplySetRds(c, oid, tp, rpIDs) ctx.Convey("Then err should be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) }) receivedRpIDs, err := d.ReplySetRds(c, oid, tp) if err != nil { t.Fatal(err) } ctx.So(len(receivedRpIDs), convey.ShouldEqual, len(rpIDs)) }) d.DelReplySetRds(c, oid, tp) }) }
explode_data.jsonl/21793
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 341 }
[ 2830, 3393, 12197, 1649, 20841, 1649, 49, 5356, 1155, 353, 8840, 836, 8, 341, 37203, 5617, 4801, 5617, 445, 1649, 20841, 1649, 49, 5356, 497, 259, 11, 2915, 7502, 20001, 727, 8, 341, 197, 2405, 2399, 298, 197, 588, 256, 284, 526, 21...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetTagsArtifact(t *testing.T) { art, err := NewArtifact("file", "./testData/thehive.txt") if err != nil { t.Errorf("unexpected error: %s", err.Error()) } if len(art.Tags) != 0 { t.Errorf("expected Tags to be empty, but found %d", len(art.Tags)) } art.SetTags([]string{"one", "two"}) if len(art.Tags) != 2 { t.Errorf("expected Tags to have two, but found %d", len(art.Tags)) } if art.Tags[0] != "one" || art.Tags[1] != "two" { t.Errorf("expected Tags to be [one, two], but found %s", art.Tags) } err = art.SetTags([]string{}) if err != nil && err.Error() != "tags could not be empty" { t.Errorf("expected tags could not be empty as error, but none was found") } }
explode_data.jsonl/29244
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 276 }
[ 2830, 3393, 1649, 15930, 85578, 1155, 353, 8840, 836, 8, 341, 197, 471, 11, 1848, 1669, 1532, 85578, 445, 1192, 497, 5924, 1944, 1043, 51257, 88568, 3909, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 13080, 445, 53859, 1465, 25, 1018, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestGoBuildIsSupportedRef(t *testing.T) { base, err := random.Image(1024, 3) if err != nil { t.Fatalf("random.Image() = %v", err) } ng, err := NewGo(context.Background(), "", WithBaseImages(func(context.Context, string) (name.Reference, Result, error) { return nil, base, nil })) if err != nil { t.Fatalf("NewGo() = %v", err) } // Supported import paths. for _, importpath := range []string{ "ko://github.com/google/ko", // ko can build itself. } { t.Run(importpath, func(t *testing.T) { if err := ng.IsSupportedReference(importpath); err != nil { t.Errorf("IsSupportedReference(%q) = (%v), want nil", importpath, err) } }) } // Unsupported import paths. for _, importpath := range []string{ "ko://github.com/google/ko/pkg/build", // not a command. "ko://github.com/google/ko/pkg/nonexistent", // does not exist. } { t.Run(importpath, func(t *testing.T) { if err := ng.IsSupportedReference(importpath); err == nil { t.Errorf("IsSupportedReference(%v) = nil, want error", importpath) } }) } }
explode_data.jsonl/2479
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 409 }
[ 2830, 3393, 10850, 11066, 3872, 34636, 3945, 1155, 353, 8840, 836, 8, 341, 24195, 11, 1848, 1669, 4194, 7528, 7, 16, 15, 17, 19, 11, 220, 18, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 11463, 7528, 368, 284, 1018, 85, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAuthorizeV0(t *testing.T) { a, err := newWithContents(t, `{ "readonly": true, "resource": "events" } {"user":"scheduler", "readonly": true, "resource": "pods" } {"user":"scheduler", "resource": "bindings" } {"user":"kubelet", "readonly": true, "resource": "bindings" } {"user":"kubelet", "resource": "events" } {"user":"alice", "namespace": "projectCaribou"} {"user":"bob", "readonly": true, "namespace": "projectCaribou"} `) if err != nil { t.Fatalf("unable to read policy file: %v", err) } authenticatedGroup := []string{user.AllAuthenticated} uScheduler := user.DefaultInfo{Name: "scheduler", UID: "uid1", Groups: authenticatedGroup} uAlice := user.DefaultInfo{Name: "alice", UID: "uid3", Groups: authenticatedGroup} uChuck := user.DefaultInfo{Name: "chuck", UID: "uid5", Groups: authenticatedGroup} testCases := []struct { User user.DefaultInfo Verb string Resource string NS string APIGroup string Path string ExpectAllow bool }{ // Scheduler can read pods {User: uScheduler, Verb: "list", Resource: "pods", NS: "ns1", ExpectAllow: true}, {User: uScheduler, Verb: "list", Resource: "pods", NS: "", ExpectAllow: true}, // Scheduler cannot write pods {User: uScheduler, Verb: "create", Resource: "pods", NS: "ns1", ExpectAllow: false}, {User: uScheduler, Verb: "create", Resource: "pods", NS: "", ExpectAllow: false}, // Scheduler can write bindings {User: uScheduler, Verb: "get", Resource: "bindings", NS: "ns1", ExpectAllow: true}, {User: uScheduler, Verb: "get", Resource: "bindings", NS: "", ExpectAllow: true}, // Alice can read and write anything in the right namespace. {User: uAlice, Verb: "get", Resource: "pods", NS: "projectCaribou", ExpectAllow: true}, {User: uAlice, Verb: "get", Resource: "widgets", NS: "projectCaribou", ExpectAllow: true}, {User: uAlice, Verb: "get", Resource: "", NS: "projectCaribou", ExpectAllow: true}, {User: uAlice, Verb: "update", Resource: "pods", NS: "projectCaribou", ExpectAllow: true}, {User: uAlice, Verb: "update", Resource: "widgets", NS: "projectCaribou", ExpectAllow: true}, {User: uAlice, Verb: "update", Resource: "", NS: "projectCaribou", ExpectAllow: true}, {User: uAlice, Verb: "update", Resource: "foo", NS: "projectCaribou", APIGroup: "bar", ExpectAllow: true}, // .. but not the wrong namespace. {User: uAlice, Verb: "get", Resource: "pods", NS: "ns1", ExpectAllow: false}, {User: uAlice, Verb: "get", Resource: "widgets", NS: "ns1", ExpectAllow: false}, {User: uAlice, Verb: "get", Resource: "", NS: "ns1", ExpectAllow: false}, // Chuck can read events, since anyone can. {User: uChuck, Verb: "get", Resource: "events", NS: "ns1", ExpectAllow: true}, {User: uChuck, Verb: "get", Resource: "events", NS: "", ExpectAllow: true}, // Chuck can't do other things. {User: uChuck, Verb: "update", Resource: "events", NS: "ns1", ExpectAllow: false}, {User: uChuck, Verb: "get", Resource: "pods", NS: "ns1", ExpectAllow: false}, {User: uChuck, Verb: "get", Resource: "floop", NS: "ns1", ExpectAllow: false}, // Chunk can't access things with no kind or namespace {User: uChuck, Verb: "get", Path: "/", Resource: "", NS: "", ExpectAllow: false}, } for i, tc := range testCases { attr := authorizer.AttributesRecord{ User: &tc.User, Verb: tc.Verb, Resource: tc.Resource, Namespace: tc.NS, APIGroup: tc.APIGroup, Path: tc.Path, ResourceRequest: len(tc.NS) > 0 || len(tc.Resource) > 0, } authorized, _, _ := a.Authorize(attr) if tc.ExpectAllow != authorized { t.Logf("tc: %v -> attr %v", tc, attr) t.Errorf("%d: Expected allowed=%v but actually allowed=%v\n\t%v", i, tc.ExpectAllow, authorized, tc) } } }
explode_data.jsonl/46828
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1558 }
[ 2830, 3393, 37483, 53, 15, 1155, 353, 8840, 836, 8, 341, 11323, 11, 1848, 1669, 501, 2354, 14803, 1155, 11, 53692, 503, 330, 22569, 788, 830, 11, 330, 9233, 788, 330, 12389, 1, 256, 456, 4913, 872, 3252, 63122, 497, 330, 22569, 788,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestRotateBySignal(t *testing.T) { n := 0 var buf [3]bytes.Buffer c := make(chan struct{}, 3) f := CreateLogrotate("name") f.Fopen = func(name string, mode os.FileMode) (io.Writer, error) { n++; c <- struct{}{}; return &buf[n-1], nil } q := make(chan os.Signal, 1) signal.Notify(q, syscall.SIGUSR1) _, _ = f.Write([]byte("before")) err := syscall.Kill(os.Getpid(), syscall.SIGUSR1) require.NoError(t, err) <-q loop: for { select { case <-c: case <-time.After(100 * time.Millisecond): break loop } } _, _ = f.Write([]byte("after")) // t.Logf("n: %v", n) assert.True(t, n >= 2) assert.Equal(t, "before", buf[0].String()) assert.Equal(t, "after", buf[1].String()) // assert.Equal(t, "beforeafter", buf[0].String()+buf[1].String()+buf[2].String()) }
explode_data.jsonl/3485
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 353 }
[ 2830, 3393, 34540, 1359, 26810, 1155, 353, 8840, 836, 8, 341, 9038, 1669, 220, 15, 198, 2405, 6607, 508, 18, 60, 9651, 22622, 198, 1444, 1669, 1281, 35190, 2036, 22655, 220, 18, 692, 1166, 1669, 4230, 2201, 16213, 445, 606, 1138, 1166...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNodeEqual(t *testing.T) { nodeHandles := []string{ "ID", "StableID", "Name", "DisplayName", "User", "Sharer", "Key", "KeyExpiry", "Machine", "DiscoKey", "Addresses", "AllowedIPs", "Endpoints", "DERP", "Hostinfo", "Created", "LastSeen", "KeepAlive", "MachineAuthorized", } if have := fieldsOf(reflect.TypeOf(Node{})); !reflect.DeepEqual(have, nodeHandles) { t.Errorf("Node.Equal check might be out of sync\nfields: %q\nhandled: %q\n", have, nodeHandles) } newPublicKey := func(t *testing.T) wgkey.Key { t.Helper() k, err := wgkey.NewPrivate() if err != nil { t.Fatal(err) } return k.Public() } n1 := newPublicKey(t) now := time.Now() tests := []struct { a, b *Node want bool }{ { &Node{}, nil, false, }, { nil, &Node{}, false, }, { &Node{}, &Node{}, true, }, { &Node{}, &Node{}, true, }, { &Node{ID: 1}, &Node{}, false, }, { &Node{ID: 1}, &Node{ID: 1}, true, }, { &Node{StableID: "node-abcd"}, &Node{}, false, }, { &Node{StableID: "node-abcd"}, &Node{StableID: "node-abcd"}, true, }, { &Node{User: 0}, &Node{User: 1}, false, }, { &Node{User: 1}, &Node{User: 1}, true, }, { &Node{Key: NodeKey(n1)}, &Node{Key: NodeKey(newPublicKey(t))}, false, }, { &Node{Key: NodeKey(n1)}, &Node{Key: NodeKey(n1)}, true, }, { &Node{KeyExpiry: now}, &Node{KeyExpiry: now.Add(60 * time.Second)}, false, }, { &Node{KeyExpiry: now}, &Node{KeyExpiry: now}, true, }, { &Node{Machine: MachineKey(n1)}, &Node{Machine: MachineKey(newPublicKey(t))}, false, }, { &Node{Machine: MachineKey(n1)}, &Node{Machine: MachineKey(n1)}, true, }, { &Node{Addresses: []netaddr.IPPrefix{}}, &Node{Addresses: nil}, false, }, { &Node{Addresses: []netaddr.IPPrefix{}}, &Node{Addresses: []netaddr.IPPrefix{}}, true, }, { &Node{AllowedIPs: []netaddr.IPPrefix{}}, &Node{AllowedIPs: nil}, false, }, { &Node{Addresses: []netaddr.IPPrefix{}}, &Node{Addresses: []netaddr.IPPrefix{}}, true, }, { &Node{Endpoints: []string{}}, &Node{Endpoints: nil}, false, }, { &Node{Endpoints: []string{}}, &Node{Endpoints: []string{}}, true, }, { &Node{Hostinfo: Hostinfo{Hostname: "alice"}}, &Node{Hostinfo: Hostinfo{Hostname: "bob"}}, false, }, { &Node{Hostinfo: Hostinfo{}}, &Node{Hostinfo: Hostinfo{}}, true, }, { &Node{Created: now}, &Node{Created: now.Add(60 * time.Second)}, false, }, { &Node{Created: now}, &Node{Created: now}, true, }, { &Node{LastSeen: &now}, &Node{LastSeen: nil}, false, }, { &Node{LastSeen: &now}, &Node{LastSeen: &now}, true, }, { &Node{DERP: "foo"}, &Node{DERP: "bar"}, false, }, } for i, tt := range tests { got := tt.a.Equal(tt.b) if got != tt.want { t.Errorf("%d. Equal = %v; want %v", i, got, tt.want) } } }
explode_data.jsonl/48386
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1611 }
[ 2830, 3393, 1955, 2993, 1155, 353, 8840, 836, 8, 341, 20831, 65928, 1669, 3056, 917, 515, 197, 197, 1, 915, 497, 330, 623, 480, 915, 497, 330, 675, 497, 330, 26456, 497, 330, 1474, 497, 330, 42215, 261, 756, 197, 197, 1, 1592, 497...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPrepareErrorRequiredDownloadAndInstall(t *testing.T) { rh := newFakeSTI(&FakeSTI{}) rh.SetScripts([]string{api.Assemble, api.Run}, []string{api.SaveArtifacts}) rh.installer.(*test.FakeInstaller).Error = fmt.Errorf("%v", api.Assemble) err := rh.Prepare(rh.config) if err == nil || err.Error() != api.Assemble { t.Errorf("An error was expected for required DownloadAndInstall, but got different: %v", err) } }
explode_data.jsonl/59448
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 151 }
[ 2830, 3393, 50590, 1454, 8164, 11377, 3036, 24690, 1155, 353, 8840, 836, 8, 341, 7000, 71, 1669, 501, 52317, 784, 40, 2099, 52317, 784, 40, 37790, 7000, 71, 4202, 44942, 10556, 917, 90, 2068, 20242, 15790, 11, 6330, 16708, 2137, 3056, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestEtcdBank(t *testing.T) { _ = failpoint.Enable("github.com/pingcap/tiflow/pkg/orchestrator/InjectProgressRequestAfterCommit", "10%return(true)") defer func() { _ = failpoint.Disable("github.com/pingcap/tiflow/pkg/orchestrator/InjectProgressRequestAfterCommit") }() totalAccountNumber := 25 workerNumber := 10 var wg sync.WaitGroup ctx, cancel := context.WithTimeout(context.Background(), time.Second*10) defer cancel() newClient, closer := setUpTest(t) defer closer() cli := newClient() defer func() { _ = cli.Unwrap().Close() }() for i := 0; i < totalAccountNumber; i++ { _, err := cli.Put(ctx, fmt.Sprintf("%s%d", bankTestPrefix, i), "0") require.Nil(t, err) } for i := 0; i < workerNumber; i++ { i := i wg.Add(1) go func() { defer wg.Done() for { worker, err := NewEtcdWorker(cli, bankTestPrefix, &bankReactor{ accountNumber: totalAccountNumber, }, &bankReactorState{t: t, index: i, account: make([]int, totalAccountNumber)}) require.Nil(t, err) err = worker.Run(ctx, nil, 100*time.Millisecond, "127.0.0.1", "") if err == nil || err.Error() == "etcdserver: request timed out" { continue } require.Contains(t, err.Error(), "context deadline exceeded") return } }() } wg.Wait() }
explode_data.jsonl/22750
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 529 }
[ 2830, 3393, 31860, 4385, 25828, 1155, 353, 8840, 836, 8, 341, 197, 62, 284, 3690, 2768, 32287, 445, 5204, 905, 4322, 287, 11346, 5523, 333, 10303, 22523, 5144, 331, 15111, 850, 14, 13738, 9496, 1900, 6025, 33441, 497, 330, 16, 15, 4, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParsingCorrect(t *testing.T) { buf := bytes.NewBuffer(eventJSON(&testEvent1)) p := &JSONEventParser{} evs, err := p.Parse(buf) assert.NoError(t, err) assert.Len(t, evs, 1) assert.True(t, cmp.Equal(testEvent1, evs[0])) }
explode_data.jsonl/10881
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 104 }
[ 2830, 3393, 68839, 33092, 1155, 353, 8840, 836, 8, 341, 26398, 1669, 5820, 7121, 4095, 6235, 5370, 2099, 1944, 1556, 16, 1171, 3223, 1669, 609, 5370, 1556, 6570, 16094, 74837, 82, 11, 1848, 1669, 281, 8937, 10731, 340, 6948, 35699, 1155...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1