text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestValidateErrorPageReturnFails(t *testing.T) { tests := []struct { msg string epr v1.ErrorPageReturn }{ { msg: "empty body", epr: v1.ErrorPageReturn{ ActionReturn: v1.ActionReturn{ Code: 200, Type: "application/json", Body: "", }, }, }, { msg: "unescaped double quotes", epr: v1.ErrorPageReturn{ ActionReturn: v1.ActionReturn{ Code: 200, Type: "", Body: ` "Oops, Could not process request"`, }, }, }, { msg: "invalid variable", epr: v1.ErrorPageReturn{ ActionReturn: v1.ActionReturn{ Code: 0, Type: "", Body: "Could not process request, response with invalid var: ${invalid}", }, }, }, { msg: "invalid cookie name", epr: v1.ErrorPageReturn{ ActionReturn: v1.ActionReturn{ Code: 200, Type: "application/json", Body: `{\"message\": \"Could not process request, try again\", \"status\": \"${status}\"}`, }, Headers: []v1.Header{ { Name: "Set-Cookie$_%^$ -", Value: "mycookie=true", }, }, }, }, } for _, test := range tests { allErrs := validateErrorPageReturn(&test.epr, field.NewPath("return")) if len(allErrs) == 0 { t.Errorf("validateErrorPageReturn(%v) returned no errors for invalid input for the case of %v", test.epr, test.msg) } } }
explode_data.jsonl/65918
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 619 }
[ 2830, 3393, 17926, 1454, 2665, 5598, 37, 6209, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 21169, 914, 198, 197, 7727, 649, 348, 16, 6141, 2665, 5598, 198, 197, 59403, 197, 197, 515, 298, 21169, 25, 330, 3194, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUnbondingDelegationEqual(t *testing.T) { ubd1 := NewUnbondingDelegation(sdk.AccAddress(addr1), addr2, 0, time.Unix(0, 0), sdk.NewInt(0)) ubd2 := ubd1 ok := ubd1.Equal(ubd2) require.True(t, ok) ubd2.ValidatorAddress = addr3 ubd2.Entries[0].CompletionTime = time.Unix(20*20*2, 0) ok = ubd1.Equal(ubd2) require.False(t, ok) }
explode_data.jsonl/58936
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 165 }
[ 2830, 3393, 1806, 64239, 287, 1912, 87566, 2993, 1155, 353, 8840, 836, 8, 341, 197, 392, 67, 16, 1669, 1532, 1806, 64239, 287, 1912, 87566, 1141, 7584, 77538, 4286, 24497, 16, 701, 10789, 17, 11, 220, 15, 345, 197, 21957, 10616, 941, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_validateRestartCounts(t *testing.T) { tests := []struct { name string metrics *systemPodsMetrics config *measurement.MeasurementConfig wantErr bool }{ { name: "check-disabled", metrics: generatePodMetrics("p", "c", 1), config: buildConfig(t, false, nil), wantErr: false, }, { name: "check-enabled-violation", metrics: generatePodMetrics("p", "c", 1), config: buildConfig(t, true, nil), wantErr: true, }, { name: "check-enabled-ok", metrics: generatePodMetrics("p", "c", 0), config: buildConfig(t, true, nil), wantErr: false, }, { name: "override-equal-to-actual-count", metrics: generatePodMetrics("p", "c", 3), config: buildConfig(t, true, map[string]int{"c": 3}), wantErr: false, }, { name: "override-default-used", metrics: generatePodMetrics("p", "c", 3), config: buildConfig(t, true, map[string]int{"default": 3}), wantErr: false, }, { name: "override-default-not-used", metrics: generatePodMetrics("p", "c", 3), config: buildConfig(t, true, map[string]int{ "default": 5, "c": 0, }), wantErr: true, }, { name: "override-below-actual-count", metrics: generatePodMetrics("p", "c", 3), config: buildConfig(t, true, map[string]int{"c": 2}), wantErr: true, }, { name: "override-for-different-container", metrics: generatePodMetrics("p", "c1", 3), config: buildConfig(t, true, map[string]int{"c2": 4}), wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { overrides, err := getThresholdOverrides(tt.config) if err != nil { t.Fatalf("getThresholdOverrides() error = %v", err) } if err := validateRestartCounts(tt.metrics, tt.config, overrides); (err != nil) != tt.wantErr { t.Errorf("verifyViolations() error = %v, wantErr %v", err, tt.wantErr) } }) } }
explode_data.jsonl/18531
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 886 }
[ 2830, 3393, 42681, 59354, 63731, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 2109, 13468, 353, 8948, 23527, 82, 27328, 198, 197, 25873, 220, 353, 81425, 53447, 24359, 2648, 198, 197, 50780...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetAlertConfig(t *testing.T) { scheme := testScheme() scenarios := []struct { Name string InitialObjs []runtime.Object Namespace string Assert func(client.Client, map[string]*AlertConfig, error) error }{ { Name: "Success", Namespace: "redhat-test-operator", InitialObjs: []runtime.Object{ &corev1.ConfigMap{ ObjectMeta: v1.ObjectMeta{ Name: "rate-limit-alerts", Namespace: "redhat-test-operator", }, Data: map[string]string{ "alerts": ` { "alert-1": { "type": "Threshold", "ruleName": "Rule1", "level": "warning", "period": "2h", "threshold": { "minRate": "80%", "maxRate": "90%" } } } `, }, }, }, Assert: func(c client.Client, config map[string]*AlertConfig, err error) error { if err != nil { return fmt.Errorf("Unexpected error: %v", err) } alertConfig, ok := config["alert-1"] if !ok { return fmt.Errorf("expected key alert-1 not found in resulting config") } maxRate := "90%" expectedConfig := &AlertConfig{ RuleName: "Rule1", Level: "warning", Threshold: &AlertThresholdConfig{ MaxRate: &maxRate, MinRate: "80%", }, Period: "2h", Type: AlertTypeThreshold, } if !reflect.DeepEqual(alertConfig, expectedConfig) { return fmt.Errorf("Obtained invalid config. Expected %v, but got %v", expectedConfig, alertConfig) } return nil }, }, } for _, scenario := range scenarios { t.Run(scenario.Name, func(t *testing.T) { client := fake.NewFakeClientWithScheme(scheme, scenario.InitialObjs...) config, err := GetAlertConfig(context.TODO(), client, scenario.Namespace) if err := scenario.Assert(client, config, err); err != nil { t.Error(err) } }) } }
explode_data.jsonl/62774
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 896 }
[ 2830, 3393, 1949, 9676, 2648, 1155, 353, 8840, 836, 8, 341, 1903, 8058, 1669, 1273, 28906, 2822, 29928, 60494, 1669, 3056, 1235, 341, 197, 21297, 286, 914, 198, 197, 197, 6341, 4121, 2519, 3056, 22255, 8348, 198, 197, 90823, 256, 914, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestSearchUsers(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() search := &model.UserSearch{Term: th.BasicUser.Username} users, _, err := th.Client.SearchUsers(search) require.NoError(t, err) require.True(t, findUserInList(th.BasicUser.Id, users), "should have found user") _, appErr := th.App.UpdateActive(th.Context, th.BasicUser2, false) require.Nil(t, appErr) search.Term = th.BasicUser2.Username search.AllowInactive = false users, _, err = th.Client.SearchUsers(search) require.NoError(t, err) require.False(t, findUserInList(th.BasicUser2.Id, users), "should not have found user") search.AllowInactive = true users, _, err = th.Client.SearchUsers(search) require.NoError(t, err) require.True(t, findUserInList(th.BasicUser2.Id, users), "should have found user") search.Term = th.BasicUser.Username search.AllowInactive = false search.TeamId = th.BasicTeam.Id users, _, err = th.Client.SearchUsers(search) require.NoError(t, err) require.True(t, findUserInList(th.BasicUser.Id, users), "should have found user") search.NotInChannelId = th.BasicChannel.Id users, _, err = th.Client.SearchUsers(search) require.NoError(t, err) require.False(t, findUserInList(th.BasicUser.Id, users), "should not have found user") search.TeamId = "" search.NotInChannelId = "" search.InChannelId = th.BasicChannel.Id users, _, err = th.Client.SearchUsers(search) require.NoError(t, err) require.True(t, findUserInList(th.BasicUser.Id, users), "should have found user") search.InChannelId = "" search.NotInChannelId = th.BasicChannel.Id _, resp, err := th.Client.SearchUsers(search) require.Error(t, err) CheckBadRequestStatus(t, resp) search.NotInChannelId = model.NewId() search.TeamId = model.NewId() _, resp, err = th.Client.SearchUsers(search) require.Error(t, err) CheckForbiddenStatus(t, resp) search.NotInChannelId = "" search.TeamId = model.NewId() _, resp, err = th.Client.SearchUsers(search) require.Error(t, err) CheckForbiddenStatus(t, resp) search.InChannelId = model.NewId() search.TeamId = "" _, resp, err = th.Client.SearchUsers(search) require.Error(t, err) CheckForbiddenStatus(t, resp) // Test search for users not in any team search.TeamId = "" search.NotInChannelId = "" search.InChannelId = "" search.NotInTeamId = th.BasicTeam.Id users, _, err = th.Client.SearchUsers(search) require.NoError(t, err) require.False(t, findUserInList(th.BasicUser.Id, users), "should not have found user") oddUser := th.CreateUser() search.Term = oddUser.Username users, _, err = th.Client.SearchUsers(search) require.NoError(t, err) require.True(t, findUserInList(oddUser.Id, users), "should have found user") _, _, err = th.SystemAdminClient.AddTeamMember(th.BasicTeam.Id, oddUser.Id) require.NoError(t, err) users, _, err = th.Client.SearchUsers(search) require.NoError(t, err) require.False(t, findUserInList(oddUser.Id, users), "should not have found user") search.NotInTeamId = model.NewId() _, resp, err = th.Client.SearchUsers(search) require.Error(t, err) CheckForbiddenStatus(t, resp) search.Term = th.BasicUser.Username th.App.UpdateConfig(func(cfg *model.Config) { *cfg.PrivacySettings.ShowEmailAddress = false }) th.App.UpdateConfig(func(cfg *model.Config) { *cfg.PrivacySettings.ShowFullName = false }) _, appErr = th.App.UpdateActive(th.Context, th.BasicUser2, true) require.Nil(t, appErr) search.InChannelId = "" search.NotInTeamId = "" search.Term = th.BasicUser2.Email users, _, err = th.Client.SearchUsers(search) require.NoError(t, err) require.False(t, findUserInList(th.BasicUser2.Id, users), "should not have found user") search.Term = th.BasicUser2.FirstName users, _, err = th.Client.SearchUsers(search) require.NoError(t, err) require.False(t, findUserInList(th.BasicUser2.Id, users), "should not have found user") search.Term = th.BasicUser2.LastName users, _, err = th.Client.SearchUsers(search) require.NoError(t, err) require.False(t, findUserInList(th.BasicUser2.Id, users), "should not have found user") search.Term = th.BasicUser.FirstName search.InChannelId = th.BasicChannel.Id search.NotInChannelId = th.BasicChannel.Id search.TeamId = th.BasicTeam.Id users, _, err = th.SystemAdminClient.SearchUsers(search) require.NoError(t, err) require.True(t, findUserInList(th.BasicUser.Id, users), "should have found user") id := model.NewId() group, appErr := th.App.CreateGroup(&model.Group{ DisplayName: "dn-foo_" + id, Name: model.NewString("name" + id), Source: model.GroupSourceLdap, Description: "description_" + id, RemoteId: model.NewString(model.NewId()), }) assert.Nil(t, appErr) search = &model.UserSearch{Term: th.BasicUser.Username, InGroupId: group.Id} t.Run("Requires ldap license when searching in group", func(t *testing.T) { _, resp, err = th.SystemAdminClient.SearchUsers(search) require.Error(t, err) CheckNotImplementedStatus(t, resp) }) th.App.Srv().SetLicense(model.NewTestLicense("ldap")) t.Run("Requires manage system permission when searching for users in a group", func(t *testing.T) { _, resp, err = th.Client.SearchUsers(search) require.Error(t, err) CheckForbiddenStatus(t, resp) }) t.Run("Returns empty list when no users found searching for users in a group", func(t *testing.T) { users, _, err = th.SystemAdminClient.SearchUsers(search) require.NoError(t, err) require.Empty(t, users) }) _, appErr = th.App.UpsertGroupMember(group.Id, th.BasicUser.Id) assert.Nil(t, appErr) t.Run("Returns user in group user found in group", func(t *testing.T) { users, _, err = th.SystemAdminClient.SearchUsers(search) require.NoError(t, err) require.Equal(t, users[0].Id, th.BasicUser.Id) }) }
explode_data.jsonl/47495
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2111 }
[ 2830, 3393, 5890, 7137, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 2822, 45573, 1669, 609, 2528, 7344, 5890, 90, 17249, 25, 270, 48868, 1474, 42777, 630, 90896, 11, 8358, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateStructure(t *testing.T) { extractHash := func(ctx context.Context) []byte { return nil } // Scenarios I-V without TLS, scenarios VI onwards TLS // Scenario I: Nil request res, err := validateStructure(context.Background(), nil, "", false, extractHash) assert.Nil(t, res) assert.Equal(t, "nil request", err.Error()) // Scenario II: Malformed envelope res, err = validateStructure(context.Background(), &discovery.SignedRequest{ Payload: []byte{1, 2, 3}, }, "", false, extractHash) assert.Nil(t, res) assert.Contains(t, err.Error(), "failed parsing request") // Scenario III: Empty request res, err = validateStructure(context.Background(), &discovery.SignedRequest{}, "", false, extractHash) assert.Nil(t, res) assert.Equal(t, "access denied, no authentication info in request", err.Error()) // Scenario IV: request without a client identity req := &discovery.Request{ Authentication: &discovery.AuthInfo{}, } b, _ := proto.Marshal(req) res, err = validateStructure(context.Background(), &discovery.SignedRequest{ Payload: b, }, "", false, extractHash) assert.Nil(t, res) assert.Equal(t, "access denied, client identity wasn't supplied", err.Error()) // Scenario V: request with a client identity, should succeed because no TLS is used req = &discovery.Request{ Authentication: &discovery.AuthInfo{ ClientIdentity: []byte{1, 2, 3}, }, } b, _ = proto.Marshal(req) res, err = validateStructure(context.Background(), &discovery.SignedRequest{ Payload: b, }, "", false, extractHash) assert.NoError(t, err) // Ensure returned request is as before serialization to bytes assert.Equal(t, req, res) // Scenario VI: request with a client identity but with TLS enabled but client doesn't send a TLS cert req = &discovery.Request{ Authentication: &discovery.AuthInfo{ ClientIdentity: []byte{1, 2, 3}, }, } b, _ = proto.Marshal(req) res, err = validateStructure(context.Background(), &discovery.SignedRequest{ Payload: b, }, "", true, extractHash) assert.Nil(t, res) assert.Equal(t, "client didn't send a TLS certificate", err.Error()) // Scenario VII: request with a client identity and with TLS enabled but the TLS cert hash doesn't match // the computed one extractHash = func(ctx context.Context) []byte { return []byte{1, 2} } req = &discovery.Request{ Authentication: &discovery.AuthInfo{ ClientIdentity: []byte{1, 2, 3}, ClientTlsCertHash: []byte{1, 2, 3}, }, } b, _ = proto.Marshal(req) res, err = validateStructure(context.Background(), &discovery.SignedRequest{ Payload: b, }, "", true, extractHash) assert.Nil(t, res) assert.Equal(t, "client claimed TLS hash doesn't match computed TLS hash from gRPC stream", err.Error()) // Scenario VIII: request with a client identity and with TLS enabled and the TLS cert hash doesn't match // the computed one extractHash = func(ctx context.Context) []byte { return []byte{1, 2, 3} } req = &discovery.Request{ Authentication: &discovery.AuthInfo{ ClientIdentity: []byte{1, 2, 3}, ClientTlsCertHash: []byte{1, 2, 3}, }, } b, _ = proto.Marshal(req) res, err = validateStructure(context.Background(), &discovery.SignedRequest{ Payload: b, }, "", true, extractHash) }
explode_data.jsonl/26903
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1122 }
[ 2830, 3393, 17926, 22952, 1155, 353, 8840, 836, 8, 341, 8122, 2144, 6370, 1669, 2915, 7502, 2266, 9328, 8, 3056, 3782, 341, 197, 853, 2092, 198, 197, 532, 197, 322, 2463, 60494, 358, 19625, 2041, 41654, 11, 25283, 29668, 59425, 41654, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClient_AppToAppAuth(t *testing.T) { t.Log("========== AppToAppAuth ==========") var result, err = client.AppToAppAuth("http://127.0.0.1") t.Log(result, err) }
explode_data.jsonl/15850
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 2959, 36117, 1249, 2164, 5087, 1155, 353, 8840, 836, 8, 341, 3244, 5247, 445, 43256, 1845, 1249, 2164, 5087, 284, 885, 428, 340, 2405, 1102, 11, 1848, 284, 2943, 5105, 1249, 2164, 5087, 445, 1254, 1110, 16, 17, 22, 13, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSetExternalTagsUnicodeUnsuported(t *testing.T) { code := ` tags = [ ('hostname1', {'source_type1': [u'tag1', 123, u'tag2\u00E1']}), ('hostname2', {'source_type2': [u'tag3', [], u'tag4']}), ('hostname3', {'source_type3': [1,2,3]}), ] datadog_agent.set_external_tags(tags) ` out, err := run(code) if err != nil { t.Fatal(err) } if out != "hostname1,source_type1,tag1\nhostname2,source_type2,tag3,tag4\nhostname3,source_type3," { t.Errorf("Unexpected printed value: '%s'", out) } }
explode_data.jsonl/34603
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 239 }
[ 2830, 3393, 1649, 25913, 15930, 33920, 1806, 27051, 35526, 1155, 353, 8840, 836, 8, 341, 43343, 1669, 22074, 3244, 2032, 284, 2278, 197, 197, 492, 27806, 16, 516, 5360, 2427, 1819, 16, 1210, 508, 84, 944, 351, 16, 516, 220, 16, 17, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestResolveSliceWithAMap(t *testing.T) { cxt := context.NewTestContext(t) cxt.AddTestFile("testdata/slice-test.yaml", config.Name) m, err := LoadManifestFrom(cxt.Context, config.Name) require.NoError(t, err, "could not load manifest") rm := RuntimeManifest{Manifest: m} installStep := rm.Install[0] os.Setenv("COMMAND", "echo hello world") err = rm.ResolveStep(installStep) assert.NoError(t, err) assert.NotNil(t, installStep.Data) t.Logf("install data %v", installStep.Data) exec := installStep.Data["exec"].(map[interface{}]interface{}) assert.NotNil(t, exec) flags := exec["flags"].(map[interface{}]interface{}) assert.Len(t, flags, 1) assert.Equal(t, "echo hello world", flags["c"].(string)) assert.NotNil(t, flags) }
explode_data.jsonl/37717
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 287 }
[ 2830, 3393, 56808, 33236, 2354, 1402, 391, 1155, 353, 8840, 836, 8, 341, 1444, 2252, 1669, 2266, 7121, 2271, 1972, 1155, 692, 1444, 2252, 1904, 2271, 1703, 445, 92425, 2687, 4754, 16839, 33406, 497, 2193, 2967, 692, 2109, 11, 1848, 1669...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateMux(t *testing.T) { proxyMux := CreateMux() if proxyMux == nil { t.Fail() t.Logf("proxyMux was not created") } }
explode_data.jsonl/48433
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 59 }
[ 2830, 3393, 4021, 44, 2200, 1155, 353, 8840, 836, 8, 341, 197, 22803, 44, 2200, 1669, 4230, 44, 2200, 741, 743, 13291, 44, 2200, 621, 2092, 341, 197, 3244, 57243, 741, 197, 3244, 98954, 445, 22803, 44, 2200, 572, 537, 3465, 1138, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestVersionUpgrade1_0To1_1(t *testing.T) { v1_0ControlPlane := newControlPlaneWithVersion("my-smcp", "istio-system", "v1.0") v1_1ControlPlane := newControlPlaneWithVersion("my-smcp", "istio-system", "v1.1") v1_0ControlPlane.SetUID("random-uid") v1_1ControlPlane.SetUID("random-uid") cases := []struct { name string allowed bool resources []runtime.Object }{ { name: "valid", allowed: true, }, { name: "unsupported-resource-other-namespace", allowed: true, resources: []runtime.Object{ &configv1alpha2.Stdio{ Base: simple.Base{ TypeMeta: metav1.TypeMeta{ APIVersion: configv1alpha2.SchemeGroupVersion.String(), Kind: "stdio", }, ObjectMeta: metav1.ObjectMeta{ Name: "dummy-stdio", Namespace: "other-namespace", }, }, }, }, }, { name: "unsupported-resource-controller-owned", allowed: true, resources: []runtime.Object{ &configv1alpha2.Stdio{ Base: simple.Base{ TypeMeta: metav1.TypeMeta{ APIVersion: configv1alpha2.SchemeGroupVersion.String(), Kind: "stdio", }, ObjectMeta: metav1.ObjectMeta{ Name: "dummy-stdio", Namespace: "istio-system", OwnerReferences: []metav1.OwnerReference{ *metav1.NewControllerRef(v1_0ControlPlane, maistrav1.SchemeGroupVersion.WithKind("ServiceMeshControlPlane")), }, }, }, }, }, }, { name: "unsupported-resource", allowed: false, resources: []runtime.Object{ &configv1alpha2.Stdio{ Base: simple.Base{ TypeMeta: metav1.TypeMeta{ APIVersion: configv1alpha2.SchemeGroupVersion.String(), Kind: "stdio", }, ObjectMeta: metav1.ObjectMeta{ Name: "dummy-stdio", Namespace: "app-namespace", }, }, }, }, }, { name: "service-with-http-ports", allowed: true, resources: []runtime.Object{ &corev1.Service{ ObjectMeta: metav1.ObjectMeta{ Name: "some-service", Namespace: "app-namespace", }, Spec: corev1.ServiceSpec{ Ports: []corev1.ServicePort{ corev1.ServicePort{ Name: "http-test", Port: 80, }, corev1.ServicePort{ Name: "http", Port: 81, }, corev1.ServicePort{ Name: "http2-test", Port: 82, }, corev1.ServicePort{ Name: "http2", Port: 84, }, }, }, }, }, }, { name: "service-with-secure-http-prefixed-port", allowed: false, resources: []runtime.Object{ &corev1.Service{ ObjectMeta: metav1.ObjectMeta{ Name: "some-service", Namespace: "app-namespace", }, Spec: corev1.ServiceSpec{ Ports: []corev1.ServicePort{ corev1.ServicePort{ Name: "http-test", Port: 443, }, }, }, }, }, }, { name: "service-with-secure-http-port", allowed: false, resources: []runtime.Object{ &corev1.Service{ ObjectMeta: metav1.ObjectMeta{ Name: "some-service", Namespace: "app-namespace", }, Spec: corev1.ServiceSpec{ Ports: []corev1.ServicePort{ corev1.ServicePort{ Name: "http", Port: 443, }, }, }, }, }, }, { name: "service-with-secure-http2-prefixed-port", allowed: false, resources: []runtime.Object{ &corev1.Service{ ObjectMeta: metav1.ObjectMeta{ Name: "some-service", Namespace: "app-namespace", }, Spec: corev1.ServiceSpec{ Ports: []corev1.ServicePort{ corev1.ServicePort{ Name: "http2-test", Port: 443, }, }, }, }, }, }, { name: "service-with-secure-http2-port", allowed: false, resources: []runtime.Object{ &corev1.Service{ ObjectMeta: metav1.ObjectMeta{ Name: "some-service", Namespace: "app-namespace", }, Spec: corev1.ServiceSpec{ Ports: []corev1.ServicePort{ corev1.ServicePort{ Name: "http2", Port: 443, }, }, }, }, }, }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { memberRoll := newMemberRoll("default", "istio-system", "app-namespace") memberRoll.Status.ConfiguredMembers = append([]string{}, memberRoll.Spec.Members...) resources := append(tc.resources, &corev1.Namespace{ ObjectMeta: metav1.ObjectMeta{ Name: "istio-system", Labels: map[string]string{ common.MemberOfKey: "istio-system", }, }, }, &corev1.Namespace{ ObjectMeta: metav1.ObjectMeta{ Name: "app-namespace", Labels: map[string]string{ common.MemberOfKey: "istio-system", }, }, }, &corev1.Namespace{ ObjectMeta: metav1.ObjectMeta{ Name: "other-namespace", }, }, memberRoll) validator, _, _ := createControlPlaneValidatorTestFixture(resources...) response := validator.Handle(ctx, createUpdateRequest(v1_0ControlPlane, v1_1ControlPlane)) if tc.allowed { defer func() { if t.Failed() { t.Logf("Unexpected validation Error: %s", response.Response.Result.Message) } }() assert.True(response.Response.Allowed, "Expected validator to accept ServiceMeshControlPlane", t) } else { assert.False(response.Response.Allowed, "Expected validator to reject ServiceMeshControlPlane", t) t.Logf("Validation Error: %s", response.Response.Result.Message) } }) } }
explode_data.jsonl/20793
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2840 }
[ 2830, 3393, 5637, 43861, 16, 62, 15, 1249, 16, 62, 16, 1155, 353, 8840, 836, 8, 341, 5195, 16, 62, 15, 3273, 34570, 1669, 501, 3273, 34570, 2354, 5637, 445, 2408, 4668, 4672, 497, 330, 380, 815, 36648, 497, 330, 85, 16, 13, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestVolumeLimits(t *testing.T) { const ( volumeLimitKey = "attachable-volumes-fake-provider" volumeLimitVal = 16 ) var cases = []struct { desc string volumePluginList []volume.VolumePluginWithAttachLimits expectNode *v1.Node }{ { desc: "translate limits to capacity and allocatable for plugins that return successfully from GetVolumeLimits", volumePluginList: []volume.VolumePluginWithAttachLimits{ &volumetest.FakeVolumePlugin{ VolumeLimits: map[string]int64{volumeLimitKey: volumeLimitVal}, }, }, expectNode: &v1.Node{ Status: v1.NodeStatus{ Capacity: v1.ResourceList{ volumeLimitKey: *resource.NewQuantity(volumeLimitVal, resource.DecimalSI), }, Allocatable: v1.ResourceList{ volumeLimitKey: *resource.NewQuantity(volumeLimitVal, resource.DecimalSI), }, }, }, }, { desc: "skip plugins that return errors from GetVolumeLimits", volumePluginList: []volume.VolumePluginWithAttachLimits{ &volumetest.FakeVolumePlugin{ VolumeLimitsError: fmt.Errorf("foo"), }, }, expectNode: &v1.Node{}, }, { desc: "no plugins", expectNode: &v1.Node{}, }, } for _, tc := range cases { t.Run(tc.desc, func(t *testing.T) { volumePluginListFunc := func() []volume.VolumePluginWithAttachLimits { return tc.volumePluginList } // construct setter setter := VolumeLimits(volumePluginListFunc) // call setter on node node := &v1.Node{} if err := setter(node); err != nil { t.Fatalf("unexpected error: %v", err) } // check expected node assert.True(t, apiequality.Semantic.DeepEqual(tc.expectNode, node), "Diff: %s", diff.ObjectDiff(tc.expectNode, node)) }) } }
explode_data.jsonl/31862
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 718 }
[ 2830, 3393, 18902, 94588, 1155, 353, 8840, 836, 8, 341, 4777, 2399, 197, 5195, 4661, 16527, 1592, 284, 330, 16330, 480, 8273, 19705, 2220, 726, 81532, 698, 197, 5195, 4661, 16527, 2208, 284, 220, 16, 21, 198, 197, 692, 2405, 5048, 284...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPropagateEnvironment(t *testing.T) { defer viper.Reset() viper.Set("peer.address", "localhost:8080") viper.Set("chaincode.externalBuilders", &[]ExternalBuilder{ { Name: "testName", Environment: []string{"KEY=VALUE"}, Path: "/testPath", }, { Name: "testName", PropagateEnvironment: []string{"KEY=VALUE"}, Path: "/testPath", }, { Name: "testName", Environment: []string{"KEY=VALUE"}, PropagateEnvironment: []string{"KEY=VALUE2"}, Path: "/testPath", }, }) coreConfig, err := GlobalConfig() require.NoError(t, err) expectedConfig := &Config{ AuthenticationTimeWindow: 15 * time.Minute, PeerAddress: "localhost:8080", ValidatorPoolSize: runtime.NumCPU(), VMNetworkMode: "host", DeliverClientKeepaliveOptions: comm.DefaultKeepaliveOptions, ExternalBuilders: []ExternalBuilder{ { Name: "testName", Environment: []string{"KEY=VALUE"}, PropagateEnvironment: []string{"KEY=VALUE"}, Path: "/testPath", }, { Name: "testName", PropagateEnvironment: []string{"KEY=VALUE"}, Path: "/testPath", }, { Name: "testName", Environment: []string{"KEY=VALUE"}, PropagateEnvironment: []string{"KEY=VALUE2"}, Path: "/testPath", }, }, GatewayOptions: gateway.GetOptions(viper.GetViper()), } require.Equal(t, expectedConfig, coreConfig) }
explode_data.jsonl/71576
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 782 }
[ 2830, 3393, 2008, 46836, 12723, 1155, 353, 8840, 836, 8, 341, 16867, 95132, 36660, 741, 5195, 12858, 4202, 445, 16537, 13792, 497, 330, 8301, 25, 23, 15, 23, 15, 1138, 5195, 12858, 4202, 445, 8819, 1851, 64227, 62306, 497, 609, 1294, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTrieDB_IterateWithTopicMatched(t *testing.T) { a := assert.New(t) db := NewStore() tt := []struct { clientID string topic packets.Topic }{ {clientID: "id0", topic: packets.Topic{Name: "/a/b/c", Qos: packets.QOS_0}}, {clientID: "id1", topic: packets.Topic{Name: "/a/b/+", Qos: packets.QOS_1}}, {clientID: "id2", topic: packets.Topic{Name: "/a/b/c/d", Qos: packets.QOS_2}}, } for _, v := range tt { db.Subscribe(v.clientID, v.topic) } var expected = subscription.ClientTopics{ "id0": {{ Qos: packets.QOS_0, Name: "/a/b/c", }}, "id1": {{ Qos: packets.QOS_1, Name: "/a/b/+", }}, } rs := db.GetTopicMatched("/a/b/c") a.Len(rs, 2) a.Equal(expected, rs) }
explode_data.jsonl/80979
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 343 }
[ 2830, 3393, 51, 7231, 3506, 7959, 465, 349, 2354, 26406, 8331, 291, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 2060, 7121, 1155, 340, 20939, 1669, 1532, 6093, 741, 3244, 83, 1669, 3056, 1235, 341, 197, 25291, 915, 914, 198, 197, 3244,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestActSubjectProtocol(t *testing.T) { convey.Convey("LikeContent", t, func(ctx convey.C) { var ( c = context.Background() sid = int64(10298) ) ctx.Convey("When everything goes positive", func(ctx convey.C) { res, err := d.ActSubjectProtocol(c, sid) ctx.Convey("Then err should be nil.res should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) fmt.Printf("%+v", res) }) }) }) }
explode_data.jsonl/11270
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 186 }
[ 2830, 3393, 2414, 13019, 20689, 1155, 353, 8840, 836, 8, 341, 37203, 5617, 4801, 5617, 445, 12949, 2762, 497, 259, 11, 2915, 7502, 20001, 727, 8, 341, 197, 2405, 2399, 298, 1444, 256, 284, 2266, 19047, 741, 298, 1903, 307, 284, 526, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPersistenceLayer_Purge(t *testing.T) { tests := []struct { name string db *mockPurgeEventsDatabase expectError bool argAssertions []assertion }{ { "account lookup error", &mockPurgeEventsDatabase{ findAccountsErr: errors.New("did not work"), }, true, []assertion{ func(q interface{}) error { if _, ok := q.(FindAccountsQueryAllAccounts); ok { return nil } return fmt.Errorf("unexpected argument %v", q) }, }, }, { "delete events error", &mockPurgeEventsDatabase{ findAccountsResult: []Account{ {UserSalt: "JF+rNeViJeJb0jth6ZheWg=="}, {UserSalt: "D6xdWYfRqbuWrkg4OWVgGQ=="}, }, deleteEventsErr: errors.New("did not work"), }, true, []assertion{ func(q interface{}) error { if _, ok := q.(FindAccountsQueryAllAccounts); ok { return nil } return fmt.Errorf("unexpected argument %v", q) }, func(q interface{}) error { if hashes, ok := q.(DeleteEventsQueryBySecretIDs); ok { for _, hash := range hashes { if hash == "user-id" { return errors.New("encountered plain user id when hash was expected") } } return nil } return fmt.Errorf("unexpected argument %v", q) }, }, }, { "ok", &mockPurgeEventsDatabase{ findAccountsResult: []Account{ {UserSalt: "JF+rNeViJeJb0jth6ZheWg=="}, {UserSalt: "D6xdWYfRqbuWrkg4OWVgGQ=="}, }, }, false, []assertion{ func(q interface{}) error { if _, ok := q.(FindAccountsQueryAllAccounts); ok { return nil } return fmt.Errorf("unexpected argument %v", q) }, func(q interface{}) error { if hashes, ok := q.(DeleteEventsQueryBySecretIDs); ok { for _, hash := range hashes { if hash == "user-id" { return errors.New("encountered plain user id when hash was expected") } } return nil } return fmt.Errorf("unexpected argument %v", q) }, }, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { r := &persistenceLayer{ dal: test.db, } err := r.Purge("user-id") if (err != nil) != test.expectError { t.Errorf("Unexpected error value %v", err) } if expected, found := len(test.argAssertions), len(test.db.methodArgs); expected != found { t.Fatalf("Number of assertions did not match number of calls, got %d and expected %d", found, expected) } for i, a := range test.argAssertions { if err := a(test.db.methodArgs[i]); err != nil { t.Errorf("Assertion error when checking arguments: %v", err) } } }) } }
explode_data.jsonl/45974
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1237 }
[ 2830, 3393, 71562, 9188, 1088, 39823, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 688, 914, 198, 197, 20939, 310, 353, 16712, 47, 39823, 7900, 5988, 198, 197, 24952, 1454, 256, 1807, 198, 197, 47903, 90206, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestProxy(t *testing.T) { ing := buildIngress() data := map[string]string{} data[parser.GetAnnotationWithPrefix("proxy-connect-timeout")] = "1" data[parser.GetAnnotationWithPrefix("proxy-send-timeout")] = "2" data[parser.GetAnnotationWithPrefix("proxy-read-timeout")] = "3" data[parser.GetAnnotationWithPrefix("proxy-buffer-size")] = "1k" data[parser.GetAnnotationWithPrefix("proxy-body-size")] = "2k" ing.SetAnnotations(data) i, err := NewParser(&resolver.Mock{}).Parse(ing) if err != nil { t.Fatalf("unexpected error parsing a valid") } p, ok := i.(*Config) if !ok { t.Fatalf("expected a Config type") } if !ok { t.Fatalf("expected a Config type") } if p.ConnectTimeout != 1 { t.Errorf("expected 1 as connect-timeout but returned %v", p.ConnectTimeout) } if p.SendTimeout != 2 { t.Errorf("expected 2 as send-timeout but returned %v", p.SendTimeout) } if p.ReadTimeout != 3 { t.Errorf("expected 3 as read-timeout but returned %v", p.ReadTimeout) } if p.BufferSize != "1k" { t.Errorf("expected 1k as buffer-size but returned %v", p.BufferSize) } if p.BodySize != "2k" { t.Errorf("expected 2k as body-size but returned %v", p.BodySize) } }
explode_data.jsonl/52489
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 459 }
[ 2830, 3393, 16219, 1155, 353, 8840, 836, 8, 341, 197, 287, 1669, 1936, 641, 2483, 2822, 8924, 1669, 2415, 14032, 30953, 16094, 8924, 58, 9657, 2234, 19711, 2354, 14335, 445, 22803, 85470, 7246, 411, 30917, 284, 330, 16, 698, 8924, 58, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestBloomFilter_Add(t *testing.T) { bf, err := NewBloomFilter(100, 7) if err != nil { t.Fatal(err) } set := &Set{ st: make(map[string]bool), } for i := 0; i <= 100000; i += 1 { word := randWord(5) if !bf.Has(String(word)) && set.Contains(word) { t.Fatalf("Could not find word '%s' while it should have", word) } set.Add(word) bf.Add(String(word)) } }
explode_data.jsonl/50983
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 172 }
[ 2830, 3393, 33, 18474, 5632, 21346, 1155, 353, 8840, 836, 8, 341, 2233, 69, 11, 1848, 1669, 1532, 33, 18474, 5632, 7, 16, 15, 15, 11, 220, 22, 340, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 8196, 1669, 609,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestPersistentClaimReadOnlyFlag(t *testing.T) { pv := &v1.PersistentVolume{ ObjectMeta: metav1.ObjectMeta{ Name: "pvA", }, Spec: v1.PersistentVolumeSpec{ PersistentVolumeSource: v1.PersistentVolumeSource{ GCEPersistentDisk: &v1.GCEPersistentDiskVolumeSource{}, }, ClaimRef: &v1.ObjectReference{ Name: "claimA", }, }, } claim := &v1.PersistentVolumeClaim{ ObjectMeta: metav1.ObjectMeta{ Name: "claimA", Namespace: "nsA", }, Spec: v1.PersistentVolumeClaimSpec{ VolumeName: "pvA", }, Status: v1.PersistentVolumeClaimStatus{ Phase: v1.ClaimBound, }, } client := fake.NewSimpleClientset(pv, claim) tmpDir, err := utiltesting.MkTmpdir("gcepdTest") if err != nil { t.Fatalf("can't make a temp dir: %v", err) } defer os.RemoveAll(tmpDir) plugMgr := volume.VolumePluginMgr{} plugMgr.InitPlugins(ProbeVolumePlugins(), nil /* prober */, volumetest.NewFakeVolumeHost(tmpDir, client, nil)) plug, _ := plugMgr.FindPluginByName(gcePersistentDiskPluginName) // readOnly bool is supplied by persistent-claim volume source when its mounter creates other volumes spec := volume.NewSpecFromPersistentVolume(pv, true) pod := &v1.Pod{ObjectMeta: metav1.ObjectMeta{UID: types.UID("poduid")}} mounter, _ := plug.NewMounter(spec, pod, volume.VolumeOptions{}) if mounter == nil { t.Fatalf("Got a nil Mounter") } if !mounter.GetAttributes().ReadOnly { t.Errorf("Expected true for mounter.IsReadOnly") } }
explode_data.jsonl/65184
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 583 }
[ 2830, 3393, 53194, 45544, 20914, 12135, 1155, 353, 8840, 836, 8, 341, 3223, 85, 1669, 609, 85, 16, 61655, 18902, 515, 197, 23816, 12175, 25, 77520, 16, 80222, 515, 298, 21297, 25, 330, 30168, 32, 756, 197, 197, 1583, 197, 7568, 992, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestOTLPClientIP(t *testing.T) { systemtest.CleanupElasticsearch(t) srv := apmservertest.NewServer(t) ctx, cancel := context.WithCancel(context.Background()) defer cancel() exporter := newOTLPTraceExporter(t, srv) err := sendOTLPTrace(ctx, newOTLPTracerProvider(exporter)) assert.NoError(t, err) err = sendOTLPTrace(ctx, newOTLPTracerProvider(exporter, sdktrace.WithResource( sdkresource.NewSchemaless(attribute.String("service.name", "service1")), ))) require.NoError(t, err) err = sendOTLPTrace(ctx, newOTLPTracerProvider(exporter, sdktrace.WithResource( sdkresource.NewSchemaless( attribute.String("service.name", "service2"), attribute.String("telemetry.sdk.name", "iOS"), attribute.String("telemetry.sdk.language", "swift"), ), ))) require.NoError(t, err) // Non-iOS agent documents should have no client.ip field set. result := systemtest.Elasticsearch.ExpectDocs(t, "traces-apm*", estest.TermQuery{ Field: "service.name", Value: "service1", }) assert.False(t, gjson.GetBytes(result.Hits.Hits[0].RawSource, "client.ip").Exists()) // iOS agent documents should have a client.ip field set. result = systemtest.Elasticsearch.ExpectDocs(t, "traces-apm*", estest.TermQuery{ Field: "service.name", Value: "service2", }) assert.True(t, gjson.GetBytes(result.Hits.Hits[0].RawSource, "client.ip").Exists()) }
explode_data.jsonl/34663
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 504 }
[ 2830, 3393, 1793, 12567, 2959, 3298, 1155, 353, 8840, 836, 8, 341, 40293, 1944, 727, 60639, 36, 51179, 1836, 1155, 340, 1903, 10553, 1669, 1443, 76, 799, 1621, 477, 7121, 5475, 1155, 692, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCallGlobalObject(t *testing.T) { const SCRIPT = ` var rv; try { this(); } catch (e) { rv = e instanceof TypeError } ` testScript(SCRIPT, valueTrue, t) }
explode_data.jsonl/75266
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 75 }
[ 2830, 3393, 7220, 11646, 1190, 1155, 353, 8840, 836, 8, 341, 4777, 53679, 284, 22074, 2405, 17570, 280, 6799, 341, 197, 2046, 543, 197, 92, 2287, 320, 68, 8, 341, 197, 78484, 284, 384, 8083, 25030, 198, 197, 532, 197, 19324, 18185, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSuccessRevTagParse(t *testing.T) { testRepo := newTestRepo(t) defer testRepo.cleanup(t) mainRev, err := testRepo.sut.RevParseTag(git.DefaultBranch) require.Nil(t, err) require.Equal(t, testRepo.firstCommit, mainRev) branchRev, err := testRepo.sut.RevParseTag(testRepo.branchName) require.Nil(t, err) require.Equal(t, testRepo.thirdBranchCommit, branchRev) tagRev, err := testRepo.sut.RevParseTag(testRepo.firstTagName) require.Nil(t, err) require.Equal(t, testRepo.firstCommit, tagRev) }
explode_data.jsonl/13986
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 218 }
[ 2830, 3393, 7188, 36184, 5668, 14463, 1155, 353, 8840, 836, 8, 341, 18185, 25243, 1669, 501, 2271, 25243, 1155, 340, 16867, 1273, 25243, 87689, 1155, 692, 36641, 36184, 11, 1848, 1669, 1273, 25243, 514, 332, 2817, 85, 14463, 5668, 3268, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNoConsentButAllowByDefault(t *testing.T) { perms := permissionsImpl{ cfg: config.GDPR{ HostVendorID: 3, UsersyncIfAmbiguous: true, }, vendorIDs: nil, fetchVendorList: map[uint8]func(ctx context.Context, id uint16) (vendorlist.VendorList, error){ tcf1SpecVersion: failedListFetcher, tcf2SpecVersion: failedListFetcher, }, } allowSync, err := perms.BidderSyncAllowed(context.Background(), openrtb_ext.BidderAppnexus, "") assertBoolsEqual(t, true, allowSync) assertNilErr(t, err) allowSync, err = perms.HostCookiesAllowed(context.Background(), "") assertBoolsEqual(t, true, allowSync) assertNilErr(t, err) }
explode_data.jsonl/46155
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 266 }
[ 2830, 3393, 2753, 15220, 306, 3983, 18605, 1359, 3675, 1155, 353, 8840, 836, 8, 341, 197, 87772, 1669, 8541, 9673, 515, 197, 50286, 25, 2193, 1224, 35, 6480, 515, 298, 197, 9296, 44691, 915, 25, 286, 220, 18, 345, 298, 197, 7137, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAssociateManagerByUsername(t *testing.T) { Convey("Associate manager by username", t, func() { expectedBody := `{"username":"user-name"}` setup(MockRoute{"PUT", "/v2/organizations/bc7b4caf-f4b8-4d85-b126-0729b9351e56/managers", []string{associateOrgUserPayload}, "", 201, "", &expectedBody}, t) defer teardown() c := &Config{ ApiAddress: server.URL, Token: "foobar", } client, err := NewClient(c) So(err, ShouldBeNil) org := &Org{ Guid: "bc7b4caf-f4b8-4d85-b126-0729b9351e56", c: client, } newOrg, err := org.AssociateManagerByUsername("user-name") So(err, ShouldBeNil) So(newOrg.Guid, ShouldEqual, "bc7b4caf-f4b8-4d85-b126-0729b9351e56") }) }
explode_data.jsonl/4443
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 328 }
[ 2830, 3393, 95540, 2043, 91519, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 95540, 6645, 553, 5934, 497, 259, 11, 2915, 368, 341, 197, 42400, 5444, 1669, 1565, 4913, 5113, 3252, 872, 11494, 9207, 3989, 197, 84571, 66436, 4899, 4913,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAppDelete(t *testing.T) { // clear the envs table when we're finished defer truncate("123") app := App{ EnvID: "123", ID: "123_dev", Name: "dev", } if err := app.Save(); err != nil { t.Error(err) } if err := app.Delete(); err != nil { t.Error(err) } // make sure the app is gone keys, err := keys(app.EnvID) if err != nil { t.Error(err) } if len(keys) > 0 { t.Errorf("app was not deleted") } }
explode_data.jsonl/32899
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 194 }
[ 2830, 3393, 2164, 6435, 1155, 353, 8840, 836, 8, 341, 197, 322, 2797, 279, 6105, 82, 1965, 979, 582, 2299, 8060, 198, 16867, 56772, 445, 16, 17, 18, 5130, 28236, 1669, 1845, 515, 197, 197, 14359, 915, 25, 330, 16, 17, 18, 756, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestInterpolationErrors(t *testing.T) { field := defineField(13, t) ring := DefRing(field, DegLex(true)) a := [2]ff.Element{field.Zero(), field.Zero()} b := [2]ff.Element{field.Zero(), field.ElementFromUnsigned(5)} _, err := ring.Interpolate( [][2]ff.Element{a, b}, []ff.Element{field.Zero()}, ) assertError(t, err, errors.InputValue, "Interpolation with more points than values") _, err = ring.Interpolate( [][2]ff.Element{a, a}, []ff.Element{field.Zero(), field.Zero()}, ) assertError(t, err, errors.InputValue, "Interpolation on duplicate points") }
explode_data.jsonl/54128
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 222 }
[ 2830, 3393, 3306, 44686, 13877, 1155, 353, 8840, 836, 8, 341, 39250, 1669, 6979, 1877, 7, 16, 18, 11, 259, 340, 7000, 287, 1669, 3892, 43466, 15573, 11, 57237, 47778, 3715, 4390, 11323, 1669, 508, 17, 60, 542, 20139, 90, 2566, 35489, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestScalarBls12377G1Random(t *testing.T) { bls12377g1 := BLS12377G1() sc := bls12377g1.Scalar.Random(testRng()) s, ok := sc.(*ScalarBls12377) require.True(t, ok) expected, _ := new(big.Int).SetString("022a7db6fad5d5ff49108230818187de316bd0b3e5e96f190397bbb9f28e7a8b", 16) require.Equal(t, s.value, expected) // Try 10 random values for i := 0; i < 10; i++ { sc := bls12377g1.Scalar.Random(crand.Reader) _, ok := sc.(*ScalarBls12377) require.True(t, ok) require.True(t, !sc.IsZero()) } }
explode_data.jsonl/15744
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 250 }
[ 2830, 3393, 20639, 33, 4730, 16, 17, 18, 22, 22, 38, 16, 13999, 1155, 353, 8840, 836, 8, 341, 96421, 82, 16, 17, 18, 22, 22, 70, 16, 1669, 425, 7268, 16, 17, 18, 22, 22, 38, 16, 741, 29928, 1669, 1501, 82, 16, 17, 18, 22, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestArray_Sum(t *testing.T) { gtest.C(t, func(t *gtest.T) { a1 := []interface{}{0, 1, 2, 3} a2 := []interface{}{"a", "b", "c"} a3 := []interface{}{"a", "1", "2"} array1 := garray.NewArrayFrom(a1) array2 := garray.NewArrayFrom(a2) array3 := garray.NewArrayFrom(a3) t.Assert(array1.Sum(), 6) t.Assert(array2.Sum(), 0) t.Assert(array3.Sum(), 3) }) }
explode_data.jsonl/13911
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 186 }
[ 2830, 3393, 1857, 1098, 372, 1155, 353, 8840, 836, 8, 341, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 11323, 16, 1669, 3056, 4970, 6257, 90, 15, 11, 220, 16, 11, 220, 17, 11, 220, 18, 532, 197, 11323, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNextBreakpointKeepsSteppingBreakpoints(t *testing.T) { protest.AllowRecording(t) withTestProcess("testnextprog", t, func(p *proc.Target, fixture protest.Fixture) { p.KeepSteppingBreakpoints = proc.TracepointKeepsSteppingBreakpoints bp := setFileBreakpoint(p, t, fixture.Source, 34) assertNoError(p.Continue(), t, "Continue()") p.ClearBreakpoint(bp.Addr) // Next should be interrupted by a tracepoint on the same goroutine. bp = setFileBreakpoint(p, t, fixture.Source, 14) bp.Tracepoint = true assertNoError(p.Next(), t, "Next()") assertLineNumber(p, t, 14, "wrong line number") if !p.Breakpoints().HasSteppingBreakpoints() { t.Fatal("does not have internal breakpoints after hitting tracepoint on same goroutine") } // Continue to complete next. assertNoError(p.Continue(), t, "Continue()") assertLineNumber(p, t, 35, "wrong line number") if p.Breakpoints().HasSteppingBreakpoints() { t.Fatal("has internal breakpoints after completing next") } }) }
explode_data.jsonl/56272
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 351 }
[ 2830, 3393, 5847, 22524, 2768, 6608, 7124, 20903, 10732, 22524, 7706, 1155, 353, 8840, 836, 8, 341, 197, 776, 1944, 29081, 52856, 1155, 340, 46948, 2271, 7423, 445, 1944, 3600, 32992, 497, 259, 11, 2915, 1295, 353, 15782, 35016, 11, 125...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNewClientWithBadTLSConfig(t *testing.T) { cfg := config_util.HTTPClientConfig{ TLSConfig: config_util.TLSConfig{ CAFile: "testdata/nonexistent_ca.cer", CertFile: "testdata/nonexistent_client.cer", KeyFile: "testdata/nonexistent_client.key", }, } _, err := config_util.NewClientFromConfig(cfg, "test") if err == nil { t.Fatalf("Expected error, got nil.") } }
explode_data.jsonl/16318
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 158 }
[ 2830, 3393, 3564, 2959, 2354, 17082, 45439, 2648, 1155, 353, 8840, 836, 8, 341, 50286, 1669, 2193, 18974, 27358, 2959, 2648, 515, 197, 10261, 7268, 2648, 25, 2193, 18974, 836, 7268, 2648, 515, 298, 197, 5049, 1703, 25, 256, 330, 92425, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_VMupdateNicParameters_singleNIC(t *testing.T) { // Mock VM struct c := Client{} vm := NewVM(&c) tfCfgDHCP := []map[string]interface{}{ map[string]interface{}{ "network_name": "multinic-net", "ip": "dhcp", }, } tfCfgAllocated := []map[string]interface{}{ map[string]interface{}{ "network_name": "multinic-net", "ip": "allocated", }, } tfCfgNone := []map[string]interface{}{ map[string]interface{}{ "network_name": "multinic-net", "ip": "none", }, } tfCfgManual := []map[string]interface{}{ map[string]interface{}{ "network_name": "multinic-net", "ip": "1.1.1.1", }, } tfCfgInvalidIp := []map[string]interface{}{ map[string]interface{}{ "network_name": "multinic-net", "ip": "invalidIp", }, } tfCfgNoNetworkName := []map[string]interface{}{ map[string]interface{}{ "ip": "invalidIp", }, } vcdConfig := types.NetworkConnectionSection{ PrimaryNetworkConnectionIndex: 1, NetworkConnection: []*types.NetworkConnection{ &types.NetworkConnection{ Network: "singlenic-net", NetworkConnectionIndex: 0, IPAddress: "", IsConnected: true, MACAddress: "00:00:00:00:00:00", IPAddressAllocationMode: "POOL", NetworkAdapterType: "VMXNET3", }, }, } var tableTests = []struct { title string tfConfig []map[string]interface{} expectedIPAddressAllocationMode string expectedIPAddress string mustNotError bool }{ {"IPAllocationModeDHCP", tfCfgDHCP, types.IPAllocationModeDHCP, "Any", true}, {"IPAllocationModePool", tfCfgAllocated, types.IPAllocationModePool, "Any", true}, {"IPAllocationModeNone", tfCfgNone, types.IPAllocationModeNone, "Any", true}, {"IPAllocationModeManual", tfCfgManual, types.IPAllocationModeManual, tfCfgManual[0]["ip"].(string), true}, {"IPAllocationModeDHCPInvalidIP", tfCfgInvalidIp, types.IPAllocationModeDHCP, "Any", true}, {"ErrNoNetworkName", tfCfgNoNetworkName, types.IPAllocationModeDHCP, "Any", false}, } for _, tableTest := range tableTests { t.Run(tableTest.title, func(t *testing.T) { vcdCfg := &vcdConfig err := vm.updateNicParameters(tableTest.tfConfig, vcdCfg) // Execute parsing procedure // if we got an error which was expected abandon the subtest if err != nil && tableTest.mustNotError { t.Errorf("unexpected error got: %s", err) return } if vcdCfg.PrimaryNetworkConnectionIndex != 0 { t.Errorf("PrimaryNetworkConnectionIndex expected: 0, got: %d", vcdCfg.PrimaryNetworkConnectionIndex) } if vcdCfg.NetworkConnection[0].IPAddressAllocationMode != tableTest.expectedIPAddressAllocationMode { t.Errorf("IPAddressAllocationMode expected: %s, got: %s", tableTest.expectedIPAddressAllocationMode, vcdCfg.NetworkConnection[0].IPAddressAllocationMode) } if vcdCfg.NetworkConnection[0].IPAddress != tableTest.expectedIPAddress { t.Errorf("IPAddress expected: %s, got: %s", tableTest.expectedIPAddress, vcdCfg.NetworkConnection[0].IPAddress) } }) } }
explode_data.jsonl/68072
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1352 }
[ 2830, 3393, 65396, 2386, 57816, 9706, 19487, 44947, 1155, 353, 8840, 836, 8, 341, 197, 322, 14563, 17792, 2036, 198, 1444, 1669, 8423, 16094, 54879, 1669, 1532, 11187, 2099, 66, 692, 3244, 69, 42467, 51326, 7123, 1669, 3056, 2186, 14032, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestCache_Expired(t *testing.T) { controller := gomock.NewController(t) defer controller.Finish() mockUser := &core.User{ Login: "octocat", } mockOrgService := mock.NewMockOrganizationService(controller) mockOrgService.EXPECT().Membership(gomock.Any(), gomock.Any(), "github").Return(true, true, nil).Times(1) service := NewCache(mockOrgService, 10, time.Minute).(*cacher) service.cache.Add("octocat/github", &item{ expiry: time.Now().Add(time.Hour * -1), member: true, admin: true, }) admin, member, err := service.Membership(noContext, mockUser, "github") if err != nil { t.Error(err) } if got, want := service.cache.Len(), 1; got != want { t.Errorf("Expect cache size still %d, got %d", want, got) } if admin == false { t.Errorf("Expect cached admin true, got false") } if member == false { t.Errorf("Expect cached member true, got false") } }
explode_data.jsonl/42272
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 337 }
[ 2830, 3393, 8233, 62, 54349, 1155, 353, 8840, 836, 8, 341, 61615, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 6461, 991, 18176, 2822, 77333, 1474, 1669, 609, 2153, 7344, 515, 197, 197, 6231, 25, 330, 41692, 509, 266, 756, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestCC2CC(t *testing.T) { streamGetter = mockChaincodeStreamGetter cc := &shimTestCC{} //viper.Set("chaincode.logging.shim", "debug") ccname := "shimTestCC" peerSide := setupcc(ccname, cc) defer mockPeerCCSupport.RemoveCC(ccname) //start the shim+chaincode go Start(cc) done := setuperror() errorFunc := func(ind int, err error) { done <- err } peerDone := make(chan struct{}) defer close(peerDone) //start the mock peer go func() { respSet := &mockpeer.MockResponseSet{ DoneFunc: errorFunc, ErrorFunc: nil, Responses: []*mockpeer.MockResponse{ {RecvMsg: &pb.ChaincodeMessage{Type: pb.ChaincodeMessage_REGISTER}, RespMsg: &pb.ChaincodeMessage{Type: pb.ChaincodeMessage_REGISTERED}}, }, } peerSide.SetResponses(respSet) peerSide.SetKeepAlive(&pb.ChaincodeMessage{Type: pb.ChaincodeMessage_KEEPALIVE}) err := peerSide.Run(peerDone) assert.NoError(t, err, "peer side run failed") }() //wait for init processDone(t, done, false) channelId := "testchannel" peerSide.Send(&pb.ChaincodeMessage{Type: pb.ChaincodeMessage_READY, Txid: "1", ChannelId: channelId}) ci := &pb.ChaincodeInput{Args: [][]byte{[]byte("init"), []byte("A"), []byte("100"), []byte("B"), []byte("200")}, Decorations: nil} payload := utils.MarshalOrPanic(ci) respSet := &mockpeer.MockResponseSet{ DoneFunc: errorFunc, ErrorFunc: errorFunc, Responses: []*mockpeer.MockResponse{ {RecvMsg: &pb.ChaincodeMessage{Type: pb.ChaincodeMessage_PUT_STATE, Txid: "2", ChannelId: channelId}, RespMsg: &pb.ChaincodeMessage{Type: pb.ChaincodeMessage_RESPONSE, Txid: "2", ChannelId: channelId}}, {RecvMsg: &pb.ChaincodeMessage{Type: pb.ChaincodeMessage_PUT_STATE, Txid: "2", ChannelId: channelId}, RespMsg: &pb.ChaincodeMessage{Type: pb.ChaincodeMessage_RESPONSE, Txid: "2", ChannelId: channelId}}, {RecvMsg: &pb.ChaincodeMessage{Type: pb.ChaincodeMessage_COMPLETED, Txid: "2", ChannelId: channelId}, RespMsg: nil}, }, } peerSide.SetResponses(respSet) //use the payload computed from prev init peerSide.Send(&pb.ChaincodeMessage{Type: pb.ChaincodeMessage_INIT, Payload: payload, Txid: "2", ChannelId: channelId}) //wait for done processDone(t, done, false) //cc2cc innerResp := utils.MarshalOrPanic(&pb.Response{Status: OK, Payload: []byte("CC2CC rocks")}) cc2ccresp := utils.MarshalOrPanic(&pb.ChaincodeMessage{Type: pb.ChaincodeMessage_COMPLETED, Payload: innerResp}) respSet = &mockpeer.MockResponseSet{ DoneFunc: errorFunc, ErrorFunc: errorFunc, Responses: []*mockpeer.MockResponse{ {RecvMsg: &pb.ChaincodeMessage{Type: pb.ChaincodeMessage_INVOKE_CHAINCODE, Txid: "3", ChannelId: channelId}, RespMsg: &pb.ChaincodeMessage{Type: pb.ChaincodeMessage_RESPONSE, Payload: cc2ccresp, Txid: "3", ChannelId: channelId}}, {RecvMsg: &pb.ChaincodeMessage{Type: pb.ChaincodeMessage_COMPLETED, Txid: "3", ChannelId: channelId}, RespMsg: nil}, }, } peerSide.SetResponses(respSet) ci = &pb.ChaincodeInput{Args: [][]byte{[]byte("cc2cc"), []byte("othercc"), []byte("arg1"), []byte("arg2")}, Decorations: nil} payload = utils.MarshalOrPanic(ci) peerSide.Send(&pb.ChaincodeMessage{Type: pb.ChaincodeMessage_TRANSACTION, Payload: payload, Txid: "3", ChannelId: channelId}) //wait for done processDone(t, done, false) //error response cc2cc respSet = &mockpeer.MockResponseSet{ DoneFunc: errorFunc, ErrorFunc: errorFunc, Responses: []*mockpeer.MockResponse{ {RecvMsg: &pb.ChaincodeMessage{Type: pb.ChaincodeMessage_INVOKE_CHAINCODE, Txid: "4", ChannelId: channelId}, RespMsg: &pb.ChaincodeMessage{Type: pb.ChaincodeMessage_ERROR, Payload: cc2ccresp, Txid: "4", ChannelId: channelId}}, {RecvMsg: &pb.ChaincodeMessage{Type: pb.ChaincodeMessage_COMPLETED, Txid: "4", ChannelId: channelId}, RespMsg: nil}, }, } peerSide.SetResponses(respSet) peerSide.Send(&pb.ChaincodeMessage{Type: pb.ChaincodeMessage_TRANSACTION, Payload: payload, Txid: "4", ChannelId: channelId}) //wait for done processDone(t, done, false) }
explode_data.jsonl/78009
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1561 }
[ 2830, 3393, 3706, 17, 3706, 1155, 353, 8840, 836, 8, 341, 44440, 31485, 284, 7860, 18837, 1851, 3027, 31485, 198, 63517, 1669, 609, 927, 318, 2271, 3706, 16094, 197, 322, 85, 12858, 4202, 445, 8819, 1851, 16572, 2395, 318, 497, 330, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRunTwice(t *testing.T) { // Run once. scr := NewScript() sum := 0 scr.AppendStmt(nil, func(s *Script) { sum += s.F(1).Int() * s.NR }) err := scr.Run(strings.NewReader("1\n3\n5\n7\n")) if err != nil { t.Fatal(err) } if sum != 50 { t.Fatalf("Expected 50 but received %d on the first trial", sum) } // Run again. sum = 0 err = scr.Run(strings.NewReader("1\n3\n5\n7\n")) if err != nil { t.Fatal(err) } if sum != 50 { t.Fatalf("Expected 50 but received %d on the second trial", sum) } }
explode_data.jsonl/3006
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 235 }
[ 2830, 3393, 6727, 22816, 558, 1155, 353, 8840, 836, 8, 341, 197, 322, 6452, 3055, 624, 1903, 5082, 1669, 1532, 5910, 741, 31479, 1669, 220, 15, 198, 1903, 5082, 8982, 31063, 27907, 11, 2915, 1141, 353, 5910, 8, 314, 2629, 1421, 274, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetNotExistingKeyWithDefault(t *testing.T) { name, ok := Config().Get("XXXXX", "FOUND") if name != "FOUND" { t.Errorf("Expected 'FOUND' and got '%s'", name) } if ok { t.Errorf("Expected ok=false and got ok=%+v", ok) } }
explode_data.jsonl/51566
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 103 }
[ 2830, 3393, 1949, 2623, 53067, 1592, 2354, 3675, 1155, 353, 8840, 836, 8, 341, 11609, 11, 5394, 1669, 5532, 1005, 1949, 445, 23830, 55, 497, 330, 58847, 5130, 743, 829, 961, 330, 58847, 1, 341, 197, 3244, 13080, 445, 18896, 364, 58847...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTaskWaitForExecutionCredentials(t *testing.T) { tcs := []struct { errs []error result bool msg string }{ { errs: []error{ dependencygraph.CredentialsNotResolvedErr, dependencygraph.ContainerPastDesiredStatusErr, fmt.Errorf("other error"), }, result: true, msg: "managed task should wait for credentials if the credentials dependency is not resolved", }, { result: false, msg: "managed task does not need to wait for credentials if there is no error", }, { errs: []error{ dependencygraph.ContainerPastDesiredStatusErr, dependencygraph.DependentContainerNotResolvedErr, fmt.Errorf("other errors"), }, result: false, msg: "managed task does not need to wait for credentials if there is no credentials dependency error", }, } for _, tc := range tcs { t.Run(fmt.Sprintf("%v", tc.errs), func(t *testing.T) { ctrl := gomock.NewController(t) mockTime := mock_ttime.NewMockTime(ctrl) mockTimer := mock_ttime.NewMockTimer(ctrl) ctx, cancel := context.WithCancel(context.TODO()) defer cancel() task := &managedTask{ ctx: ctx, Task: &apitask.Task{ KnownStatusUnsafe: apitaskstatus.TaskRunning, DesiredStatusUnsafe: apitaskstatus.TaskRunning, }, _time: mockTime, acsMessages: make(chan acsTransition), } if tc.result { mockTime.EXPECT().AfterFunc(gomock.Any(), gomock.Any()).Return(mockTimer) mockTimer.EXPECT().Stop() go func() { task.acsMessages <- acsTransition{desiredStatus: apitaskstatus.TaskRunning} }() } assert.Equal(t, tc.result, task.isWaitingForACSExecutionCredentials(tc.errs), tc.msg) }) } }
explode_data.jsonl/24584
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 693 }
[ 2830, 3393, 6262, 14190, 2461, 20294, 27025, 1155, 353, 8840, 836, 8, 341, 3244, 4837, 1669, 3056, 1235, 341, 197, 9859, 82, 256, 3056, 841, 198, 197, 9559, 1807, 198, 197, 21169, 262, 914, 198, 197, 59403, 197, 197, 515, 298, 9859, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPermsListUsers(t *testing.T) { t.Parallel() cf, server, err := testutil.NewTestServerAndClient() if err != nil { t.Fatal(err) } defer server.Close() server.Mux.HandleFunc("/v2/apps/foo/perms/", func(w http.ResponseWriter, r *http.Request) { testutil.SetHeaders(w) fmt.Fprintf(w, `{ "users": [ "baz", "bar" ] }`) }) var b bytes.Buffer cmdr := DeisCmd{WOut: &b, ConfigFile: cf} err = cmdr.PermsList("foo", false, -1) assert.NoErr(t, err) assert.Equal(t, testutil.StripProgress(b.String()), `=== foo's Users baz bar `, "output") }
explode_data.jsonl/74898
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 255 }
[ 2830, 3393, 3889, 1011, 852, 7137, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 96989, 11, 3538, 11, 1848, 1669, 1273, 1314, 7121, 2271, 5475, 3036, 2959, 741, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestProcessPostResponse(t *testing.T) { // mock account // PubKey field ensures amino encoding is used first since standard // JSON encoding will panic on crypto.PubKey t.Parallel() type mockAccount struct { Address types.AccAddress `json:"address"` Coins types.Coins `json:"coins"` PubKey crypto.PubKey `json:"public_key"` AccountNumber uint64 `json:"account_number"` Sequence uint64 `json:"sequence"` } // setup viper.Set(flags.FlagOffline, true) ctx := client.Context{} height := int64(194423) privKey := secp256k1.GenPrivKey() pubKey := privKey.PubKey() addr := types.AccAddress(pubKey.Address()) coins := types.NewCoins(types.NewCoin("atom", types.NewInt(100)), types.NewCoin("tree", types.NewInt(125))) accNumber := uint64(104) sequence := uint64(32) acc := mockAccount{addr, coins, pubKey, accNumber, sequence} cdc := codec.New() cryptocodec.RegisterCrypto(cdc) cdc.RegisterConcrete(&mockAccount{}, "cosmos-sdk/mockAccount", nil) ctx = ctx.WithCodec(cdc) // setup expected results jsonNoIndent, err := ctx.Codec.MarshalJSON(acc) require.Nil(t, err) respNoIndent := rest.NewResponseWithHeight(height, jsonNoIndent) expectedNoIndent, err := ctx.Codec.MarshalJSON(respNoIndent) require.Nil(t, err) // check that negative height writes an error w := httptest.NewRecorder() ctx = ctx.WithHeight(-1) rest.PostProcessResponse(w, ctx, acc) require.Equal(t, http.StatusInternalServerError, w.Code) // check that height returns expected response ctx = ctx.WithHeight(height) runPostProcessResponse(t, ctx, acc, expectedNoIndent) }
explode_data.jsonl/55929
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 612 }
[ 2830, 3393, 7423, 4133, 2582, 1155, 353, 8840, 836, 8, 341, 197, 322, 7860, 2692, 198, 197, 322, 22611, 1592, 2070, 25351, 41400, 11170, 374, 1483, 1156, 2474, 5297, 198, 197, 322, 4718, 11170, 686, 21975, 389, 19028, 1069, 392, 1592, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTxnPutOutOfOrder(t *testing.T) { defer leaktest.AfterTest(t) key := "key" // Set up a filter to so that the get operation at Step 3 will return an error. var numGets int32 storage.TestingCommandFilter = func(args roachpb.Request, h roachpb.Header) error { if _, ok := args.(*roachpb.GetRequest); ok && args.Header().Key.Equal(roachpb.Key(key)) && h.Txn == nil { // The Reader executes two get operations, each of which triggers two get requests // (the first request fails and triggers txn push, and then the second request // succeeds). Returns an error for the fourth get request to avoid timestamp cache // update after the third get operation pushes the txn timestamp. if atomic.AddInt32(&numGets, 1) == 4 { return util.Errorf("Test") } } return nil } defer func() { storage.TestingCommandFilter = nil }() manualClock := hlc.NewManualClock(0) clock := hlc.NewClock(manualClock.UnixNano) stopper := stop.NewStopper() defer stopper.Stop() store := createTestStoreWithEngine(t, engine.NewInMem(roachpb.Attributes{}, 10<<20, stopper), clock, true, nil, stopper) // Put an initial value. initVal := []byte("initVal") err := store.DB().Put(key, initVal) if err != nil { t.Fatalf("failed to put: %s", err) } waitPut := make(chan struct{}) waitFirstGet := make(chan struct{}) waitTxnRestart := make(chan struct{}) waitSecondGet := make(chan struct{}) waitTxnComplete := make(chan struct{}) // Start the Writer. go func() { epoch := -1 // Start a txn that does read-after-write. // The txn will be restarted twice, and the out-of-order put // will happen in the second epoch. if err := store.DB().Txn(func(txn *client.Txn) error { epoch++ if epoch == 1 { // Wait until the second get operation is issued. close(waitTxnRestart) <-waitSecondGet } updatedVal := []byte("updatedVal") if err := txn.Put(key, updatedVal); err != nil { return err } // Make sure a get will return the value that was just written. actual, err := txn.Get(key) if err != nil { return err } if !bytes.Equal(actual.ValueBytes(), updatedVal) { t.Fatalf("unexpected get result: %s", actual) } if epoch == 0 { // Wait until the first get operation will push the txn timestamp. close(waitPut) <-waitFirstGet } b := txn.NewBatch() err = txn.CommitInBatch(b) return err }); err != nil { t.Fatal(err) } if epoch != 2 { t.Fatalf("unexpected number of txn retries: %d", epoch) } close(waitTxnComplete) }() <-waitPut // Start the Reader. // Advance the clock and send a get operation with higher // priority to trigger the txn restart. manualClock.Increment(100) priority := int32(math.MaxInt32) requestHeader := roachpb.Span{ Key: roachpb.Key(key), } ts := clock.Now() if _, err := client.SendWrappedWith(rg1(store), nil, roachpb.Header{ Timestamp: ts, UserPriority: &priority, }, &roachpb.GetRequest{Span: requestHeader}); err != nil { t.Fatalf("failed to get: %s", err) } // Wait until the writer restarts the txn. close(waitFirstGet) <-waitTxnRestart // Advance the clock and send a get operation again. This time // we use TestingCommandFilter so that a get operation is not // processed after the write intent is resolved (to prevent the // timestamp cache from being updated). manualClock.Increment(100) ts = clock.Now() if _, err := client.SendWrappedWith(rg1(store), nil, roachpb.Header{ Timestamp: ts, UserPriority: &priority, }, &roachpb.GetRequest{Span: requestHeader}); err == nil { t.Fatal("unexpected success of get") } close(waitSecondGet) <-waitTxnComplete }
explode_data.jsonl/8001
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1372 }
[ 2830, 3393, 31584, 77, 19103, 31731, 4431, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 692, 23634, 1669, 330, 792, 698, 197, 322, 2573, 705, 264, 4051, 311, 773, 429, 279, 633, 5666, 518, 14822, 220, 18, 686, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestMapProxy_PutWithNonSerializableValue(t *testing.T) { _, err := mp.Put("test", student{}) AssertErrorNotNil(t, err, "put did not return an error for nonserializable value") mp.Clear() }
explode_data.jsonl/57043
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 2227, 16219, 1088, 332, 2354, 8121, 29268, 1130, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 10490, 39825, 445, 1944, 497, 5458, 37790, 18017, 1454, 96144, 1155, 11, 1848, 11, 330, 628, 1521, 537, 470, 458, 1465, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestImageOptionsAuthfileOverride(t *testing.T) { for _, testCase := range []struct { flagPrefix string cmdFlags []string expectedAuthfilePath string }{ // if there is no prefix, only authfile is allowed. {"", []string{ "--authfile", "/srv/authfile", }, "/srv/authfile"}, // if authfile and dest-authfile is provided, dest-authfile wins {"dest-", []string{ "--authfile", "/srv/authfile", "--dest-authfile", "/srv/dest-authfile", }, "/srv/dest-authfile", }, // if only the shared authfile is provided, authfile must be present in system context {"dest-", []string{ "--authfile", "/srv/authfile", }, "/srv/authfile", }, // if only the dest authfile is provided, dest-authfile must be present in system context {"dest-", []string{ "--dest-authfile", "/srv/dest-authfile", }, "/srv/dest-authfile", }, } { opts := fakeImageOptions(t, testCase.flagPrefix, []string{}, testCase.cmdFlags) res, err := opts.newSystemContext() require.NoError(t, err) assert.Equal(t, &types.SystemContext{ AuthFilePath: testCase.expectedAuthfilePath, DockerRegistryUserAgent: defaultUserAgent, }, res) } }
explode_data.jsonl/81904
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 493 }
[ 2830, 3393, 1906, 3798, 5087, 1192, 2177, 1155, 353, 8840, 836, 8, 1476, 2023, 8358, 54452, 1669, 2088, 3056, 1235, 341, 197, 30589, 14335, 1843, 914, 198, 197, 25920, 9195, 1797, 3056, 917, 198, 197, 42400, 5087, 36697, 914, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUpgrade_removeACRInProfiles(t *testing.T) { yaml := `apiVersion: skaffold/v1alpha5 kind: Config build: artifacts: - image: myregistry.azurecr.io/skaffold-example deploy: kubectl: manifests: - k8s-* profiles: - name: test profile build: acr: {} ` upgradeShouldFail(t, yaml) }
explode_data.jsonl/71756
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 135 }
[ 2830, 3393, 43861, 18193, 1706, 49, 641, 62719, 1155, 353, 8840, 836, 8, 341, 14522, 9467, 1669, 1565, 2068, 5637, 25, 1901, 2649, 813, 5457, 16, 7141, 20, 198, 15314, 25, 5532, 198, 5834, 510, 220, 35036, 510, 220, 481, 2168, 25, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_MailMergeFileds_GetDocumentFieldNamesOnline(t *testing.T) { config := ReadConfiguration(t) client, ctx := PrepareTest(t, config) mailMergeFolder := "DocumentActions/MailMerge" localDocumentFile := "SampleExecuteTemplate.docx" options := map[string]interface{}{ "useNonMergeFields": true, } request := &models.GetDocumentFieldNamesOnlineRequest{ Document: OpenFile(t, mailMergeFolder + "/" + localDocumentFile), Optionals: options, } actual, _, err := client.WordsApi.GetDocumentFieldNamesOnline(ctx, request) if err != nil { t.Error(err) } assert.NotNil(t, actual.FieldNames, "Validate GetDocumentFieldNamesOnline response."); assert.NotNil(t, actual.FieldNames.Names, "Validate GetDocumentFieldNamesOnline response."); assert.Equal(t, 15, len(actual.FieldNames.Names), "Validate GetDocumentFieldNamesOnline response."); assert.Equal(t, "TableStart:Order", actual.FieldNames.Names[0], "Validate GetDocumentFieldNamesOnline response."); }
explode_data.jsonl/64357
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 367 }
[ 2830, 3393, 1245, 604, 52096, 1703, 5356, 13614, 7524, 1877, 7980, 19598, 1155, 353, 8840, 836, 8, 341, 262, 2193, 1669, 4457, 7688, 1155, 340, 262, 2943, 11, 5635, 1669, 31166, 2271, 1155, 11, 2193, 340, 262, 8072, 52096, 13682, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFindPath(t *testing.T) { processor := GetMetricProcessor() tests := []struct { a string expected PathConfig }{ { a: "/foo/bar", expected: processor.paths["/foo"], }, { a: "/", expected: processor.defaultPath, }, { a: "/abc", expected: processor.defaultPath, }, } for i, test := range tests { a, expected := test.a, test.expected name := fmt.Sprintf("%v: %v = %v", i, a, expected) t.Run(name, func(t *testing.T) { b := processor.findPath(a) assert.Equal(t, expected, *b) }) } }
explode_data.jsonl/70873
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 267 }
[ 2830, 3393, 9885, 1820, 1155, 353, 8840, 836, 8, 341, 197, 29474, 1669, 2126, 54310, 22946, 741, 78216, 1669, 3056, 1235, 341, 197, 11323, 286, 914, 198, 197, 42400, 7933, 2648, 198, 197, 59403, 197, 197, 515, 298, 11323, 25, 286, 352...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStartCmdContents(t *testing.T) { startCmd := GetStartCmd(&mockServer{}) require.Equal(t, "start", startCmd.Use) require.Equal(t, "Start issuer", startCmd.Short) require.Equal(t, "Start issuer", startCmd.Long) checkFlagPropertiesCorrect(t, startCmd, hostURLFlagName, hostURLFlagShorthand, hostURLFlagUsage) }
explode_data.jsonl/23754
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 112 }
[ 2830, 3393, 3479, 15613, 14803, 1155, 353, 8840, 836, 8, 341, 21375, 15613, 1669, 2126, 3479, 15613, 2099, 16712, 5475, 6257, 692, 17957, 12808, 1155, 11, 330, 2468, 497, 1191, 15613, 9046, 340, 17957, 12808, 1155, 11, 330, 3479, 54835, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEvalPointerExpressions(t *testing.T) { var data = make(VarMap) var s *string data.Set("stringPointer", s) RunJetTest(t, data, nil, "StringPointer_1", `{{ stringPointer }}`, "") s2 := "test" data.Set("stringPointer2", &s2) RunJetTest(t, data, nil, "StringPointer_2", `{{ stringPointer2 }}`, "test") i2 := 10 data.Set("intPointer2", &i2) RunJetTest(t, data, nil, "IntPointer_2", `{{ intPointer2 }}`, "10") var i *int data.Set("intPointer", &i) RunJetTest(t, data, nil, "IntPointer_i", `{{ intPointer }}`, "") }
explode_data.jsonl/22901
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 54469, 9084, 40315, 1155, 353, 8840, 836, 8, 341, 2405, 821, 284, 1281, 7, 3962, 2227, 340, 2405, 274, 353, 917, 198, 8924, 4202, 445, 917, 9084, 497, 274, 340, 85952, 35641, 2271, 1155, 11, 821, 11, 2092, 11, 330, 703, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAuthSuccess(t *testing.T) { httpmock, cas := buildCAS() httpresp := &http.Response{ StatusCode: http.StatusOK, Body: bodySample(), } reqMatcher := func(req *http.Request) bool { return req.URL.String() == cas.URL && req.Header.Get("Content-Type") == "application/cas" && req.Header.Get("Authorization") == "Basic YWxpY2U6dzBuZDNybDRuZA==" } httpmock.On("Do", mock.MatchedBy(reqMatcher)).Return(httpresp, nil) resp, err := cas.Auth("alice", "w0nd3rl4nd") assert.Nil(t, err) assert.NotEmpty(t, resp) httpmock.AssertExpectations(t) }
explode_data.jsonl/25335
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 239 }
[ 2830, 3393, 5087, 7188, 1155, 353, 8840, 836, 8, 341, 28080, 16712, 11, 4760, 1669, 1936, 87516, 741, 28080, 18243, 1669, 609, 1254, 12574, 515, 197, 197, 15872, 25, 1758, 52989, 345, 197, 197, 5444, 25, 981, 2487, 17571, 3148, 197, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCrop(t *testing.T) { msg := "TestCrop" inFile := filepath.Join(inDir, "test.pdf") outFile := filepath.Join(outDir, "out.pdf") for _, tt := range []struct { s string u pdfcpu.DisplayUnit }{ {"[0 0 5 5]", pdfcpu.CENTIMETRES}, {"100", pdfcpu.POINTS}, {"20% 40%", pdfcpu.POINTS}, {"dim:30 30", pdfcpu.POINTS}, {"dim:50% 50%", pdfcpu.POINTS}, {"pos:bl, dim:50% 50%", pdfcpu.POINTS}, {"pos:tl, off: 10 -10, dim:50% 50%", pdfcpu.POINTS}, {"-1", pdfcpu.INCHES}, {"-25%", pdfcpu.POINTS}, } { box, err := api.Box(tt.s, tt.u) if err != nil { t.Fatalf("%s: %v\n", msg, err) } if err := api.CropFile(inFile, outFile, nil, box, nil); err != nil { t.Fatalf("%s: %v\n", msg, err) } } }
explode_data.jsonl/37697
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 362 }
[ 2830, 3393, 62146, 1155, 353, 8840, 836, 8, 341, 21169, 1669, 330, 2271, 62146, 698, 17430, 1703, 1669, 26054, 22363, 5900, 6184, 11, 330, 1944, 15995, 1138, 13967, 1703, 1669, 26054, 22363, 9828, 6184, 11, 330, 411, 15995, 5130, 2023, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestPipelinesBuilder_Error(t *testing.T) { receiverFactories, processorsFactories, exporterFactories, err := config.ExampleComponents() assert.Nil(t, err) attrFactory := &addattributesprocessor.Factory{} processorsFactories[attrFactory.Type()] = attrFactory cfg, err := config.LoadConfigFile( t, "testdata/pipelines_builder.yaml", receiverFactories, processorsFactories, exporterFactories, ) require.Nil(t, err) // Corrupt the pipeline, change data type to metrics. We have to forcedly do it here // since there is no way to have such config loaded by LoadConfigFile, it would not // pass validation. We are doing this to test failure mode of PipelinesBuilder. pipeline := cfg.Pipelines["traces"] pipeline.InputType = configmodels.MetricsDataType exporters, err := NewExportersBuilder(zap.NewNop(), cfg, exporterFactories).Build() assert.NoError(t, err) // This should fail because "attributes" processor defined in the config does // not support metrics data type. _, err = NewPipelinesBuilder(zap.NewNop(), cfg, exporters, processorsFactories).Build() assert.NotNil(t, err) }
explode_data.jsonl/10202
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 346 }
[ 2830, 3393, 47, 93997, 3297, 28651, 1155, 353, 8840, 836, 8, 341, 17200, 12862, 17417, 2433, 11, 36586, 17417, 2433, 11, 57378, 17417, 2433, 11, 1848, 1669, 2193, 5121, 1516, 10443, 741, 6948, 59678, 1155, 11, 1848, 340, 60943, 4153, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTagValueMarshaling(t *testing.T) { for i, tt := range stringtests { json, err := json.Marshal(tt.tv) if err != nil { t.Errorf("%d. Marshal(%q) returned err: %s", i, tt.tv, err) } else { if !bytes.Equal(json, tt.json) { t.Errorf( "%d. Marshal(%q) => %q, want %q", i, tt.tv, json, tt.json, ) } } } }
explode_data.jsonl/14047
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 180 }
[ 2830, 3393, 5668, 1130, 79712, 6132, 1155, 353, 8840, 836, 8, 341, 2023, 600, 11, 17853, 1669, 2088, 914, 23841, 341, 197, 30847, 11, 1848, 1669, 2951, 37271, 47152, 14485, 340, 197, 743, 1848, 961, 2092, 341, 298, 3244, 13080, 4430, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestPdscXML(t *testing.T) { assert := assert.New(t) t.Run("test NewPdscXML", func(t *testing.T) { var fileName = "somefile.pdsc" pdscXML := xml.NewPdscXML(fileName) assert.NotNil(pdscXML, "NewPdscXML should not fail on a simple instance creation") }) t.Run("test latest version", func(t *testing.T) { var latest string pdscXML := xml.PdscXML{ Vendor: "TheVendor", URL: "http://the.url/", Name: "TheName", } // It is OK to have an empty LatestVersion() (or is it?) latest = pdscXML.LatestVersion() assert.Equal(latest, "") release1 := xml.ReleaseTag{ Version: "0.0.1", } release2 := xml.ReleaseTag{ Version: "0.0.2", } pdscXML.ReleasesTag.Releases = append(pdscXML.ReleasesTag.Releases, release2) pdscXML.ReleasesTag.Releases = append(pdscXML.ReleasesTag.Releases, release1) latest = pdscXML.LatestVersion() assert.Equal(latest, "0.0.2") }) t.Run("test pdscXML to pdscTag generation", func(t *testing.T) { var url = "http://the.url/" var name = "TheName" var vendor = "TheVendor" var version = "0.0.1" pdscXML := xml.PdscXML{ Vendor: vendor, URL: url, Name: name, } release := xml.ReleaseTag{ Version: version, } pdscXML.ReleasesTag.Releases = append(pdscXML.ReleasesTag.Releases, release) pdscTag := pdscXML.Tag() assert.Equal(pdscTag.Vendor, vendor) assert.Equal(pdscTag.URL, url) assert.Equal(pdscTag.Name, name) assert.Equal(pdscTag.Version, version) }) t.Run("test reading a PDSC file", func(t *testing.T) { pdsc := xml.NewPdscXML("../../testdata/devpack/1.2.3/TheVendor.DevPack.pdsc") assert.Nil(pdsc.Read()) assert.Equal(pdsc.Vendor, "TheVendor") assert.Equal(pdsc.URL, "file:///testdata/devpack/1.2.3/") assert.Equal(pdsc.Name, "DevPack") assert.Equal(pdsc.LatestVersion(), "1.2.3") }) }
explode_data.jsonl/52640
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 828 }
[ 2830, 3393, 47, 67, 2388, 10609, 1155, 353, 8840, 836, 8, 1476, 6948, 1669, 2060, 7121, 1155, 692, 3244, 16708, 445, 1944, 1532, 47, 67, 2388, 10609, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 2405, 12665, 284, 330, 14689, 1192, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCustomerCreate(t *testing.T) { // Set up the mock customer response mockCustomerResponseID := int64(1523) mockCustomerResponse := new(Customer) mockCustomerResponseData := new(invdendpoint.Customer) mockCustomerResponse.Customer = mockCustomerResponseData mockCustomerResponse.Id = mockCustomerResponseID // Launch our mock server server, err := invdmockserver.New(200, mockCustomerResponse, "json", true) if err != nil { t.Fatal(err) } defer server.Close() // Establish our mock connection key := "test api key" conn := mockConnection(key, server) customer := conn.NewCustomer() nowUnix := time.Now().UnixNano() s := strconv.FormatInt(nowUnix, 10) customerToCreate := customer.NewCustomer() customerToCreate.Name = "Test Customer Original " + s customerToCreate.Id = mockCustomerResponse.Id mockCustomerResponse.Name = customerToCreate.Name // mockCustomerResponse.Connection = conn // Make the call to create our customer createdCustomer, err := customer.Create(customerToCreate) if err != nil { t.Fatal("Error Creating Customer", err) } // Customer that we wanted to create should equal the customer we created if !reflect.DeepEqual(createdCustomer, customerToCreate) { t.Fatal(createdCustomer.Customer, customerToCreate.Customer) } }
explode_data.jsonl/15000
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 387 }
[ 2830, 3393, 12792, 4021, 1155, 353, 8840, 836, 8, 341, 197, 322, 2573, 705, 279, 7860, 6002, 2033, 198, 77333, 12792, 2582, 915, 1669, 526, 21, 19, 7, 16, 20, 17, 18, 340, 77333, 12792, 2582, 1669, 501, 71412, 340, 77333, 12792, 258...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestADS1x15DriverReadInvalidGain(t *testing.T) { d, _ := initTestADS1015DriverWithStubbedAdaptor() _, err := d.Read(0, 21, d.DefaultDataRate) gobottest.Assert(t, err, errors.New("Gain must be one of: 2/3, 1, 2, 4, 8, 16")) }
explode_data.jsonl/42590
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 97 }
[ 2830, 3393, 49541, 16, 87, 16, 20, 11349, 4418, 7928, 58611, 1155, 353, 8840, 836, 8, 341, 2698, 11, 716, 1669, 2930, 2271, 49541, 16, 15, 16, 20, 11349, 2354, 33838, 2721, 2589, 32657, 2822, 197, 6878, 1848, 1669, 294, 6503, 7, 15,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_seperateFNameFromArg(t *testing.T) { type args struct { signature string } tests := []struct { name string args args wantName string wantArg string }{ { "simple", args{"A(B)"}, "A", "B", }, { "no Arg", args{"A"}, "A", "", }, { "empty Parenthesis", args{"A()"}, "A", "", }, { "no name", args{"(B)"}, "", "B", }, { "more complex", args{"A(B+C)"}, "A", "B+C", }, { "more complex, neste functions", args{"A(B(D)+C(E(F)))"}, "A", "B(D)+C(E(F))", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { gotName, gotArg := seperateFNameFromArg(tt.args.signature) if gotName != tt.wantName { t.Errorf("seperateFNameFromArg() gotName = %v, want %v", gotName, tt.wantName) } if gotArg != tt.wantArg { t.Errorf("seperateFNameFromArg() gotArg = %v, want %v", gotArg, tt.wantArg) } }) } }
explode_data.jsonl/64797
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 509 }
[ 2830, 3393, 3453, 79113, 37, 675, 3830, 2735, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 69054, 1568, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 31215, 257, 2827, 198, 197, 50780,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMarshalRawFuncLog(t *testing.T) { buf := make([]byte, DefaultMaxSmallPacketSize) a := assert.New(t) n := MarshalRawFuncLog(buf, rawFuncLog) buf = buf[:n] a.Equal(rawFuncLogBytes, buf) }
explode_data.jsonl/72613
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 85 }
[ 2830, 3393, 55438, 20015, 9626, 2201, 1155, 353, 8840, 836, 8, 341, 26398, 1669, 1281, 10556, 3782, 11, 7899, 5974, 25307, 16679, 1695, 340, 11323, 1669, 2060, 7121, 1155, 692, 9038, 1669, 35667, 20015, 9626, 2201, 10731, 11, 7112, 9626, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestGRPCTransportPortRange(t *testing.T) { tp := NewTransport() lsn1, err := tp.Listen(":44444-44448") if err != nil { t.Errorf("Did not expect an error, got %s", err) } expectedPort(t, "44444", lsn1) lsn2, err := tp.Listen(":44444-44448") if err != nil { t.Errorf("Did not expect an error, got %s", err) } expectedPort(t, "44445", lsn2) lsn, err := tp.Listen(":0") if err != nil { t.Errorf("Did not expect an error, got %s", err) } lsn.Close() lsn1.Close() lsn2.Close() }
explode_data.jsonl/82518
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 224 }
[ 2830, 3393, 95993, 1162, 33389, 403, 7084, 6046, 1155, 353, 8840, 836, 8, 341, 73423, 1669, 1532, 27560, 2822, 197, 4730, 77, 16, 11, 1848, 1669, 18101, 68334, 18893, 19, 19, 19, 19, 19, 12, 19, 19, 19, 19, 23, 1138, 743, 1848, 96...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestPluginEndorserGreenPath(t *testing.T) { expectedSignature := []byte{5, 4, 3, 2, 1} expectedProposalResponsePayload := []byte{1, 2, 3} pluginMapper := &mocks.PluginMapper{} pluginFactory := &mocks.PluginFactory{} plugin := &mocks.Plugin{} plugin.On("Endorse", mock.Anything, mock.Anything).Return(&peer.Endorsement{Signature: expectedSignature}, expectedProposalResponsePayload, nil) pluginMapper.On("PluginFactoryByName", endorser.PluginName("plugin")).Return(pluginFactory) // Expect that the plugin would be instantiated only once in this test, because we call the endorser with the same arguments plugin.On("Init", mock.Anything, mock.Anything).Return(nil).Once() pluginFactory.On("New").Return(plugin).Once() sif := &mocks.SigningIdentityFetcher{} cs := &mocks.ChannelStateRetriever{} queryCreator := &mocks.QueryCreator{} cs.On("NewQueryCreator", "mychannel").Return(queryCreator, nil) pluginEndorser := endorser.NewPluginEndorser(&endorser.PluginSupport{ ChannelStateRetriever: cs, SigningIdentityFetcher: sif, PluginMapper: pluginMapper, TransientStoreRetriever: mockTransientStoreRetriever, }) // Scenario I: Call the endorsement for the first time endorsement, prpBytes, err := pluginEndorser.EndorseWithPlugin("plugin", "mychannel", nil, nil) assert.NoError(t, err) assert.Equal(t, expectedSignature, endorsement.Signature) assert.Equal(t, expectedProposalResponsePayload, prpBytes) // Ensure both state and SigningIdentityFetcher were passed to Init() plugin.AssertCalled(t, "Init", &endorser.ChannelState{QueryCreator: queryCreator, Store: &transientstore.Store{}}, sif) // Scenario II: Call the endorsement again a second time. // Ensure the plugin wasn't instantiated again - which means the same instance // was used to service the request. // Also - check that the Init() wasn't called more than once on the plugin. endorsement, prpBytes, err = pluginEndorser.EndorseWithPlugin("plugin", "mychannel", nil, nil) assert.NoError(t, err) assert.Equal(t, expectedSignature, endorsement.Signature) assert.Equal(t, expectedProposalResponsePayload, prpBytes) pluginFactory.AssertNumberOfCalls(t, "New", 1) plugin.AssertNumberOfCalls(t, "Init", 1) // Scenario III: Call the endorsement with a channel-less context. // The init method should be called again, but this time - a channel state object // should not be passed into the init. pluginFactory.On("New").Return(plugin).Once() plugin.On("Init", mock.Anything).Return(nil).Once() endorsement, prpBytes, err = pluginEndorser.EndorseWithPlugin("plugin", "", nil, nil) assert.NoError(t, err) assert.Equal(t, expectedSignature, endorsement.Signature) assert.Equal(t, expectedProposalResponsePayload, prpBytes) plugin.AssertCalled(t, "Init", sif) }
explode_data.jsonl/1348
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 905 }
[ 2830, 3393, 11546, 3727, 269, 799, 19576, 1820, 1155, 353, 8840, 836, 8, 341, 42400, 25088, 1669, 3056, 3782, 90, 20, 11, 220, 19, 11, 220, 18, 11, 220, 17, 11, 220, 16, 532, 42400, 98637, 2582, 29683, 1669, 3056, 3782, 90, 16, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetCheckName(t *testing.T) { type args struct { status provider.StatusOpts pacopts *info.PacOpts } tests := []struct { name string args args want string }{ { name: "no application name", args: args{ status: provider.StatusOpts{ OriginalPipelineRunName: "HELLO", }, pacopts: &info.PacOpts{ApplicationName: ""}, }, want: "HELLO", }, { name: "application and pipelinerun name", args: args{ status: provider.StatusOpts{ OriginalPipelineRunName: "MOTO", }, pacopts: &info.PacOpts{ApplicationName: "HELLO"}, }, want: "HELLO / MOTO", }, { name: "application no pipelinerun name", args: args{ status: provider.StatusOpts{ OriginalPipelineRunName: "", }, pacopts: &info.PacOpts{ApplicationName: "PAC"}, }, want: "PAC", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := getCheckName(tt.args.status, tt.args.pacopts); got != tt.want { t.Errorf("getCheckName() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/40816
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 491 }
[ 2830, 3393, 1949, 3973, 675, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 23847, 220, 9109, 10538, 43451, 198, 197, 3223, 580, 10518, 353, 2733, 1069, 580, 43451, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestErrEventFingerprintOverride(t *testing.T) { assert := assert.New(t) ctx := WithFingerprint(context.Background(), "test", "fingerprint") event := errEvent(ctx, logger.ErrorEvent{ Flag: logger.Fatal, Err: ex.New("this is a test", ex.OptMessage("a message")), State: &http.Request{ Method: "POST", Host: "example.org", TLS: &tls.ConnectionState{}, URL: webutil.MustParseURL("https://example.org/foo"), }, }) assert.NotNil(event) assert.NotEmpty(event.Fingerprint) assert.Equal([]string{"test", "fingerprint"}, event.Fingerprint) }
explode_data.jsonl/81456
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 7747, 1556, 37, 47918, 2177, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 20985, 1669, 3085, 37, 47918, 5378, 19047, 1507, 330, 1944, 497, 330, 69, 47918, 5130, 28302, 1669, 1848, 1556, 7502, 11, 5925, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestActiveHandshake(t *testing.T) { c := context.New(t, defaultMTU) defer c.Cleanup() c.CreateConnected(context.TestInitialSequenceNumber, 30000, -1 /* epRcvBuf */) }
explode_data.jsonl/75927
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 67 }
[ 2830, 3393, 5728, 2314, 29661, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 2266, 7121, 1155, 11, 1638, 8505, 52, 340, 16867, 272, 727, 60639, 2822, 1444, 7251, 21146, 5378, 8787, 6341, 14076, 2833, 11, 220, 18, 15, 15, 15, 15, 11, 481...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestFilePoolReset(t *testing.T) { dir, err := ioutil.TempDir("", "fastzip-filepool") require.NoError(t, err) defer os.RemoveAll(dir) fp, err := New(dir, 16, -1) require.NoError(t, err) for i := range fp.files { file := fp.Get() _, err = file.Write(bytes.Repeat([]byte("0"), i)) assert.NoError(t, err) b, err := ioutil.ReadAll(file) assert.NoError(t, err) assert.Len(t, b, i) assert.Equal(t, uint64(i), file.Written()) _, err = file.Hasher().Write([]byte("hello")) assert.NoError(t, err) assert.Equal(t, uint32(0x3610a686), file.Checksum()) fp.Put(file) } for range fp.files { file := fp.Get() b, err := ioutil.ReadAll(file) assert.NoError(t, err) assert.Len(t, b, 0) assert.Equal(t, uint64(0), file.Written()) assert.Equal(t, uint32(0), file.Checksum()) fp.Put(file) } assert.NoError(t, fp.Close()) }
explode_data.jsonl/60247
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 395 }
[ 2830, 3393, 1703, 10551, 14828, 1155, 353, 8840, 836, 8, 341, 48532, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 9349, 9964, 14203, 10285, 1138, 17957, 35699, 1155, 11, 1848, 340, 16867, 2643, 84427, 14161, 692, 65219, 11, 1848, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBuildInboundRoutes(t *testing.T) { testWeightedCluster := service.WeightedCluster{ ClusterName: "default/testCluster|80|local", Weight: 100, } testCases := []struct { name string inputRules []*trafficpolicy.Rule expectFunc func(assert *tassert.Assertions, actual []*xds_route.Route) }{ { name: "valid route rule", inputRules: []*trafficpolicy.Rule{ { Route: trafficpolicy.RouteWeightedClusters{ HTTPRouteMatch: trafficpolicy.HTTPRouteMatch{ Path: "/hello", PathMatchType: trafficpolicy.PathMatchRegex, Methods: []string{"GET"}, Headers: map[string]string{"hello": "world"}, }, WeightedClusters: mapset.NewSet(testWeightedCluster), }, AllowedServiceIdentities: mapset.NewSetFromSlice( []interface{}{identity.K8sServiceAccount{Name: "foo", Namespace: "bar"}.ToServiceIdentity()}, ), }, }, expectFunc: func(assert *tassert.Assertions, actual []*xds_route.Route) { assert.Equal(1, len(actual)) assert.Equal("/hello", actual[0].GetMatch().GetSafeRegex().Regex) assert.Equal("GET", actual[0].GetMatch().GetHeaders()[0].GetSafeRegexMatch().Regex) assert.Equal(1, len(actual[0].GetRoute().GetWeightedClusters().Clusters)) assert.Equal(uint32(100), actual[0].GetRoute().GetWeightedClusters().TotalWeight.GetValue()) assert.Equal("default/testCluster|80|local", actual[0].GetRoute().GetWeightedClusters().Clusters[0].Name) assert.Equal(uint32(100), actual[0].GetRoute().GetWeightedClusters().Clusters[0].Weight.GetValue()) assert.NotNil(actual[0].TypedPerFilterConfig) }, }, { name: "invalid route rule without Rule.AllowedServiceIdentities", inputRules: []*trafficpolicy.Rule{ { Route: trafficpolicy.RouteWeightedClusters{ HTTPRouteMatch: trafficpolicy.HTTPRouteMatch{ Path: "/hello", PathMatchType: trafficpolicy.PathMatchRegex, Methods: []string{"GET"}, Headers: map[string]string{"hello": "world"}, }, WeightedClusters: mapset.NewSet(testWeightedCluster), }, AllowedServiceIdentities: nil, }, }, expectFunc: func(assert *tassert.Assertions, actual []*xds_route.Route) { assert.Equal(0, len(actual)) }, }, } for i, tc := range testCases { t.Run(fmt.Sprintf("Testing test case %d: %s", i, tc.name), func(t *testing.T) { actual := buildInboundRoutes(tc.inputRules) tc.expectFunc(tassert.New(t), actual) }) } }
explode_data.jsonl/75513
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1085 }
[ 2830, 3393, 11066, 641, 10891, 26653, 1155, 353, 8840, 836, 8, 341, 18185, 8295, 291, 28678, 1669, 2473, 22404, 64507, 28678, 515, 197, 197, 28678, 675, 25, 330, 2258, 12697, 28678, 91, 23, 15, 91, 2438, 756, 197, 197, 8295, 25, 414, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClusterValidateAndAssignIDs(t *testing.T) { tests := []struct { clmembs []*Member membs []*Member wids []types.ID }{ { []*Member{ newTestMember(1, []string{"http://127.0.0.1:2379"}, "", nil), newTestMember(2, []string{"http://127.0.0.2:2379"}, "", nil), }, []*Member{ newTestMember(3, []string{"http://127.0.0.1:2379"}, "", nil), newTestMember(4, []string{"http://127.0.0.2:2379"}, "", nil), }, []types.ID{3, 4}, }, } for i, tt := range tests { lcl := newTestCluster(tt.clmembs) ecl := newTestCluster(tt.membs) if err := ValidateClusterAndAssignIDs(lcl, ecl); err != nil { t.Errorf("#%d: unexpect update error: %v", i, err) } if !reflect.DeepEqual(lcl.MemberIDs(), tt.wids) { t.Errorf("#%d: ids = %v, want %v", i, lcl.MemberIDs(), tt.wids) } } }
explode_data.jsonl/52335
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 401 }
[ 2830, 3393, 28678, 17926, 3036, 28933, 30466, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 39407, 10536, 1279, 29838, 9366, 198, 197, 14145, 1279, 256, 29838, 9366, 198, 197, 6692, 3365, 262, 3056, 9242, 9910, 198, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestHaGroupDeleteParams_WithTimeout(t *testing.T) { p := NewHaGroupDeleteParams() p = p.WithTimeout(time.Minute * 5) require.NotNil(t, p.timeout) assert.Equal(t, time.Minute*5, p.timeout) }
explode_data.jsonl/7752
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 81 }
[ 2830, 3393, 32942, 2808, 6435, 4870, 62, 2354, 7636, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 1532, 32942, 2808, 6435, 4870, 741, 3223, 284, 281, 26124, 7636, 9730, 75770, 353, 220, 20, 340, 17957, 93882, 1155, 11, 281, 36110, 340, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestRPC_QueryAccountTXs(t *testing.T) { codec.UpgradeHeight = 7000 var tx *types.TxResponse _, _, cleanup := NewInMemoryTendermintNode(t, oneValTwoNodeGenesisState()) memCLI, _, evtChan := subscribeTo(t, tmTypes.EventNewBlock) <-evtChan // Wait for block var err error _, stopCli, evtChan := subscribeTo(t, tmTypes.EventTx) kb := getInMemoryKeybase() cb, err := kb.GetCoinbase() assert.Nil(t, err) tx, err = nodes.Send(memCodec(), memCLI, kb, cb.GetAddress(), cb.GetAddress(), "test", types.NewInt(100), true) assert.Nil(t, err) assert.NotNil(t, tx) <-evtChan // Wait for tx kb = getInMemoryKeybase() cb, err = kb.GetCoinbase() assert.Nil(t, err) var params = PaginateAddrParams{ Address: cb.GetAddress().String(), } q := newQueryRequest("accounttxs", newBody(params)) rec := httptest.NewRecorder() AccountTxs(rec, q, httprouter.Params{}) resp := getJSONResponse(rec) assert.NotNil(t, resp) assert.NotEmpty(t, resp) var resTXs core_types.ResultTxSearch unmarshalErr := json.Unmarshal([]byte(resp), &resTXs) assert.Nil(t, unmarshalErr) assert.NotEmpty(t, resTXs.Txs) assert.NotZero(t, resTXs.TotalCount) _, _, evtChan = subscribeTo(t, tmTypes.EventNewBlock) <-evtChan // Wait for block q = newQueryRequest("accounttxs", newBody(params)) rec = httptest.NewRecorder() AccountTxs(rec, q, httprouter.Params{}) resp = getJSONResponse(rec) assert.NotNil(t, resp) assert.NotEmpty(t, resp) var resTXs2 core_types.ResultTxSearch unmarshalErr = json.Unmarshal([]byte(resp), &resTXs2) assert.Nil(t, unmarshalErr) assert.NotEmpty(t, resTXs2.Txs) assert.NotZero(t, resTXs2.TotalCount) cleanup() stopCli() }
explode_data.jsonl/44708
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 681 }
[ 2830, 3393, 29528, 48042, 7365, 22867, 82, 1155, 353, 8840, 836, 8, 341, 43343, 66, 13, 43861, 3640, 284, 220, 22, 15, 15, 15, 198, 2405, 9854, 353, 9242, 81362, 2582, 198, 197, 6878, 8358, 21290, 1669, 1532, 641, 10642, 51, 1659, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUniqueKeyAddFunc(t *testing.T) { myRowID := EncodeSequence(1) presetKeyPart := []byte("my-preset-key") presetKey := append(AddLengthPrefix(presetKeyPart), myRowID...) specs := map[string]struct { srcKey []byte expErr *sdkerrors.Error expExistingEntry []byte }{ "create when not exists": { srcKey: []byte("my-index-key"), expExistingEntry: append(AddLengthPrefix([]byte("my-index-key")), myRowID...), }, "error when exists already": { srcKey: presetKeyPart, expErr: errors.ErrORMUniqueConstraint, }, "nil key not allowed": { srcKey: nil, expErr: errors.ErrORMInvalidArgument, }, "empty key not allowed": { srcKey: []byte{}, expErr: errors.ErrORMInvalidArgument, }, } for msg, spec := range specs { t.Run(msg, func(t *testing.T) { storeKey := sdk.NewKVStoreKey("test") store := NewMockContext().KVStore(storeKey) store.Set(presetKey, []byte{}) err := uniqueKeysAddFunc(store, spec.srcKey, myRowID) require.True(t, spec.expErr.Is(err)) if spec.expErr != nil { return } assert.True(t, store.Has(spec.expExistingEntry), "not found") }) } }
explode_data.jsonl/42811
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 512 }
[ 2830, 3393, 22811, 1592, 2212, 9626, 1155, 353, 8840, 836, 8, 341, 13624, 3102, 915, 1669, 56562, 14076, 7, 16, 340, 3223, 9716, 1592, 5800, 1669, 3056, 3782, 445, 2408, 2268, 9716, 16173, 1138, 3223, 9716, 1592, 1669, 8737, 7, 2212, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStringToUint32(t *testing.T) { type args struct { ip string } tests := []struct { name string args args want uint32 }{ {name: "normal", args: args{ip: "1.2.3.4"}, want: 67305985}, {name: "illegal", args: args{ip: "1.2"}, want: 0}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := StringToUint32(tt.args.ip); got != tt.want { t.Errorf("StringToUint32() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/13149
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 215 }
[ 2830, 3393, 703, 1249, 21570, 18, 17, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 46531, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197, 50780, 2622, 18, 17, 198, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInfluxdbQueryParser(t *testing.T) { Convey("Influxdb query parser", t, func() { parser := &InfluxdbQueryParser{} dsInfo := &models.DataSource{ JsonData: simplejson.New(), } Convey("can parse influxdb json model", func() { json := ` { "dsType": "influxdb", "groupBy": [ { "params": [ "$interval" ], "type": "time" }, { "params": [ "datacenter" ], "type": "tag" }, { "params": [ "none" ], "type": "fill" } ], "measurement": "logins.count", "policy": "default", "refId": "B", "resultFormat": "time_series", "select": [ [ { "type": "field", "params": [ "value" ] }, { "type": "count", "params": [] } ], [ { "type": "field", "params": [ "value" ] }, { "type": "bottom", "params": [ 3 ] } ], [ { "type": "field", "params": [ "value" ] }, { "type": "mean", "params": [] }, { "type": "math", "params": [ " / 100" ] } ] ], "alias": "serie alias", "tags": [ { "key": "datacenter", "operator": "=", "value": "America" }, { "condition": "OR", "key": "hostname", "operator": "=", "value": "server1" } ] } ` dsInfo.JsonData.Set("timeInterval", ">20s") modelJson, err := simplejson.NewJson([]byte(json)) So(err, ShouldBeNil) res, err := parser.Parse(modelJson, dsInfo) So(err, ShouldBeNil) So(len(res.GroupBy), ShouldEqual, 3) So(len(res.Selects), ShouldEqual, 3) So(len(res.Tags), ShouldEqual, 2) So(res.Interval, ShouldEqual, ">20s") So(res.Alias, ShouldEqual, "serie alias") }) Convey("can part raw query json model", func() { json := ` { "dsType": "influxdb", "groupBy": [ { "params": [ "$interval" ], "type": "time" }, { "params": [ "null" ], "type": "fill" } ], "interval": ">10s", "policy": "default", "query": "RawDummieQuery", "rawQuery": true, "refId": "A", "resultFormat": "time_series", "select": [ [ { "params": [ "value" ], "type": "field" }, { "params": [ ], "type": "mean" } ] ], "tags": [ ] } ` modelJson, err := simplejson.NewJson([]byte(json)) So(err, ShouldBeNil) res, err := parser.Parse(modelJson, dsInfo) So(err, ShouldBeNil) So(res.RawQuery, ShouldEqual, "RawDummieQuery") So(len(res.GroupBy), ShouldEqual, 2) So(len(res.Selects), ShouldEqual, 1) So(len(res.Tags), ShouldEqual, 0) So(res.Interval, ShouldEqual, ">10s") }) }) }
explode_data.jsonl/606
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2357 }
[ 2830, 3393, 641, 36706, 1999, 2859, 6570, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 641, 36706, 1999, 3239, 6729, 497, 259, 11, 2915, 368, 1476, 197, 55804, 1669, 609, 641, 36706, 1999, 2859, 6570, 16094, 197, 83336, 1731, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDecodeTcp_splitRequest(t *testing.T) { stream := &DnsStream{rawData: sophosTxtTcp.request[:10], message: new(DnsMessage)} _, err := decodeDnsData(TransportTcp, stream.rawData) assert.NotNil(t, err, "Not expecting a complete message yet") stream.rawData = append(stream.rawData, sophosTxtTcp.request[10:]...) _, err = decodeDnsData(TransportTcp, stream.rawData) assert.Nil(t, err, "Message should be complete") }
explode_data.jsonl/68697
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 156 }
[ 2830, 3393, 32564, 77536, 17052, 1900, 1155, 353, 8840, 836, 8, 341, 44440, 1669, 609, 35, 4412, 3027, 90, 1041, 1043, 25, 18701, 436, 35629, 77536, 8223, 3447, 16, 15, 1125, 1943, 25, 501, 5432, 4412, 2052, 10569, 197, 6878, 1848, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBaseMetricListFlushBeforeStale(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() var ( targetNanosFn = standardMetricTargetNanos isEarlierThanFn = isStandardMetricEarlierThan timestampNanosFn = standardMetricTimestampNanos opts = testOptions(ctrl) ) l, err := newBaseMetricList(testShard, 0, targetNanosFn, isEarlierThanFn, timestampNanosFn, opts) require.NoError(t, err) l.lastFlushedNanos = 1234 l.flushBefore(1000, 0, discardType) require.Equal(t, int64(1234), l.LastFlushedNanos()) }
explode_data.jsonl/43584
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 226 }
[ 2830, 3393, 3978, 54310, 852, 46874, 10227, 623, 1574, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 2405, 2399, 197, 28861, 45, 43605, 24911, 262, 284, 5297, 54310, 63...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBlockBreak(t *testing.T) { const SCRIPT = ` var rv = 0; B1: { rv = 1; B2: { rv = 2; break B1; } rv = 3; } ` testScript(SCRIPT, intToValue(2), t) }
explode_data.jsonl/75221
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 102 }
[ 2830, 3393, 4713, 22524, 1155, 353, 8840, 836, 8, 341, 4777, 53679, 284, 22074, 2405, 17570, 284, 220, 15, 280, 12791, 16, 25, 341, 197, 78484, 284, 220, 16, 280, 197, 12791, 17, 25, 341, 298, 78484, 284, 220, 17, 280, 298, 3388, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPushExporterServicer_Submit_Untyped(t *testing.T) { srv, exp := makeTestCustomPushExporter(t) err := submitNewMetric(exp, prometheus_models.MetricType_UNTYPED, sampleGatewayContext) assert.NoError(t, err) assert.Equal(t, 1, totalMetricCount(srv)) err = submitNewMetric(exp, prometheus_models.MetricType_UNTYPED, sampleGatewayContext) assert.NoError(t, err) assert.Equal(t, 2, totalMetricCount(srv)) assert.Equal(t, len(srv.FamiliesByName), 1) for _, fam := range srv.FamiliesByName { assert.Equal(t, prometheus_models.MetricType_GAUGE, *fam.Type) for _, metric := range fam.Metric { assert.True(t, tests.HasLabel(metric.Label, "testLabel", "testValue")) } } }
explode_data.jsonl/61836
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 275 }
[ 2830, 3393, 16644, 88025, 39159, 12999, 36359, 1763, 6665, 406, 32501, 1155, 353, 8840, 836, 8, 341, 1903, 10553, 11, 1343, 1669, 1281, 2271, 10268, 16644, 88025, 1155, 340, 9859, 1669, 9318, 3564, 54310, 25865, 11, 2706, 39705, 30792, 13...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBuffer(t *testing.T) { pb := proto.NewBuffer(nil) pb.EncodeVarint(10) pos1 := len(pb.Bytes()) pb.EncodeRawBytes([]byte("JunkText")) pos2 := len(pb.Bytes()) pb.EncodeRawBytes([]byte("YetAnotherJunkText")) pos3 := len(pb.Bytes()) pb.EncodeVarint(1000000) pos4 := len(pb.Bytes()) b := NewBuffer(pb.Bytes()) b.DecodeVarint() assert.Equal(t, pos1, b.GetBytesConsumed()) b.DecodeRawBytes(false) assert.Equal(t, pos2, b.GetBytesConsumed()) b.DecodeRawBytes(false) assert.Equal(t, pos3, b.GetBytesConsumed()) b.DecodeVarint() assert.Equal(t, pos4, b.GetBytesConsumed()) }
explode_data.jsonl/17834
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 252 }
[ 2830, 3393, 4095, 1155, 353, 8840, 836, 8, 341, 3223, 65, 1669, 18433, 7121, 4095, 27907, 340, 3223, 65, 50217, 3962, 396, 7, 16, 15, 340, 28164, 16, 1669, 2422, 76878, 36868, 2398, 3223, 65, 50217, 20015, 7078, 10556, 3782, 445, 41, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateCharts(t *testing.T) { charts := []Chart{ Chart{ Title: "toplevel", Location: "/foo", WaitUntilDeployment: "toplevel", DeploymentHealthIndication: IgnorePodHealth, DependencyList: []string{"someservice", "anotherthing", "redis"}, }, Chart{ Title: "someservice", Location: "/foo", WaitUntilDeployment: "someservice", DeploymentHealthIndication: IgnorePodHealth, }, Chart{ Title: "anotherthing", Location: "/foo", WaitUntilDeployment: "anotherthing", DeploymentHealthIndication: AllPodsHealthy, WaitTimeout: 2 * time.Second, DependencyList: []string{"redis"}, }, Chart{ Title: "redis", Location: "/foo", DeploymentHealthIndication: IgnorePodHealth, }, } if err := ValidateCharts(charts); err != nil { t.Fatalf("should have succeeded: %v", err) } charts[3].DependencyList = []string{"anotherthing"} if err := ValidateCharts(charts); err == nil { t.Fatalf("should have failed with dependency cycle") } charts[3].DependencyList = nil charts[3].Title = "" if err := ValidateCharts(charts); err == nil { t.Fatalf("should have failed with empty title") } charts[3].Title = "redis" charts[3].Location = "" if err := ValidateCharts(charts); err == nil { t.Fatalf("should have failed with empty location") } charts[3].Location = "/foo" charts[3].DeploymentHealthIndication = 9999 if err := ValidateCharts(charts); err == nil { t.Fatalf("should have failed with invalid DeploymentHealthIndication") } charts[3].DeploymentHealthIndication = IgnorePodHealth charts[3].DependencyList = []string{"doesntexist"} if err := ValidateCharts(charts); err == nil { t.Fatalf("should have failed with unknown dependency") } }
explode_data.jsonl/69446
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 853 }
[ 2830, 3393, 17926, 64878, 1155, 353, 8840, 836, 8, 341, 23049, 7038, 1669, 3056, 14488, 515, 197, 197, 14488, 515, 298, 92233, 25, 2549, 330, 83, 67780, 756, 298, 197, 4707, 25, 4293, 3521, 7975, 756, 298, 197, 14190, 24493, 75286, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestGetRegion(t *testing.T) { zoneName := "us-central1-b" regionName, err := GetGCERegion(zoneName) if err != nil { t.Fatalf("unexpected error from GetGCERegion: %v", err) } if regionName != "us-central1" { t.Errorf("Unexpected region from GetGCERegion: %s", regionName) } gce := &GCECloud{ localZone: zoneName, region: regionName, } zones, ok := gce.Zones() if !ok { t.Fatalf("Unexpected missing zones impl") } zone, err := zones.GetZone() if err != nil { t.Fatalf("unexpected error %v", err) } if zone.Region != "us-central1" { t.Errorf("Unexpected region: %s", zone.Region) } }
explode_data.jsonl/28871
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 258 }
[ 2830, 3393, 1949, 14091, 1155, 353, 8840, 836, 8, 341, 197, 8684, 675, 1669, 330, 355, 84081, 16, 1455, 698, 197, 3943, 675, 11, 1848, 1669, 2126, 22863, 640, 791, 290, 74228, 675, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestGenModel_Issue1409(t *testing.T) { specDoc, err := loads.Spec("../fixtures/bugs/1409/fixture-1409.yaml") require.NoError(t, err) definitions := specDoc.Spec().Definitions k := "Graph" schema := definitions[k] opts := opts() genModel, err := makeGenDefinition(k, "models", schema, specDoc, opts) require.NoError(t, err) buf := bytes.NewBuffer(nil) require.NoError(t, opts.templates.MustGet("model").Execute(buf, genModel)) ct, err := opts.LanguageOpts.FormatContent("foo.go", buf.Bytes()) require.NoError(t, err) res := string(ct) // Just verify that the validation call is generated with proper format assertInCode(t, `nodes, err := UnmarshalNodeSlice(bytes.NewBuffer(data.Nodes), runtime.JSONConsumer())`, res) assertInCode(t, `if err := json.Unmarshal(raw, &rawProps); err != nil {`, res) assertInCode(t, `m.GraphAdditionalProperties[k] = toadd`, res) assertInCode(t, `b3, err = json.Marshal(m.GraphAdditionalProperties)`, res) }
explode_data.jsonl/2565
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 351 }
[ 2830, 3393, 9967, 1712, 7959, 83890, 16, 19, 15, 24, 1155, 353, 8840, 836, 8, 341, 98100, 9550, 11, 1848, 1669, 20907, 36473, 17409, 45247, 14, 56176, 14, 16, 19, 15, 24, 14, 59612, 12, 16, 19, 15, 24, 33406, 1138, 17957, 35699, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUpdateTaskRunsState(t *testing.T) { // TestUpdateTaskRunsState runs "getTaskRunsStatus" and verifies how it updates a PipelineRun status // from a TaskRun associated to the PipelineRun pr := &v1beta1.PipelineRun{ ObjectMeta: baseObjectMeta("test-pipeline-run", "foo"), Spec: v1beta1.PipelineRunSpec{ PipelineRef: &v1beta1.PipelineRef{Name: "test-pipeline"}, }, } pipelineTask := v1beta1.PipelineTask{ Name: "unit-test-1", WhenExpressions: []v1beta1.WhenExpression{{ Input: "foo", Operator: selection.In, Values: []string{"foo", "bar"}, }}, TaskRef: &v1beta1.TaskRef{Name: "unit-test-task"}, } task := &v1beta1.Task{ ObjectMeta: baseObjectMeta("unit-test-task", "foo"), Spec: v1beta1.TaskSpec{ Resources: &v1beta1.TaskResources{ Inputs: []v1beta1.TaskResource{{ ResourceDeclaration: v1beta1.ResourceDeclaration{ Name: "workspace", Type: resourcev1alpha1.PipelineResourceTypeGit, }, }}, }, }, } taskrun := &v1beta1.TaskRun{ ObjectMeta: baseObjectMeta("test-pipeline-run-success-unit-test-1", "foo"), Spec: v1beta1.TaskRunSpec{ TaskRef: &v1beta1.TaskRef{Name: "unit-test-task"}, ServiceAccountName: "test-sa", Resources: &v1beta1.TaskRunResources{}, Timeout: &metav1.Duration{Duration: config.DefaultTimeoutMinutes * time.Minute}, }, Status: v1beta1.TaskRunStatus{ Status: duckv1beta1.Status{ Conditions: duckv1beta1.Conditions{ apis.Condition{ Type: apis.ConditionSucceeded, }, }, }, TaskRunStatusFields: v1beta1.TaskRunStatusFields{ Steps: []v1beta1.StepState{{ ContainerState: corev1.ContainerState{ Terminated: &corev1.ContainerStateTerminated{ExitCode: int32(0)}, }, }}, }, }, } expectedTaskRunsStatus := make(map[string]*v1beta1.PipelineRunTaskRunStatus) expectedTaskRunsStatus["test-pipeline-run-success-unit-test-1"] = &v1beta1.PipelineRunTaskRunStatus{ PipelineTaskName: "unit-test-1", Status: &v1beta1.TaskRunStatus{ TaskRunStatusFields: v1beta1.TaskRunStatusFields{ Steps: []v1beta1.StepState{{ ContainerState: corev1.ContainerState{ Terminated: &corev1.ContainerStateTerminated{ExitCode: 0}, }, }}}, Status: duckv1beta1.Status{ Conditions: []apis.Condition{{Type: apis.ConditionSucceeded}}, }, }, WhenExpressions: []v1beta1.WhenExpression{{ Input: "foo", Operator: selection.In, Values: []string{"foo", "bar"}, }}, } expectedPipelineRunStatus := v1beta1.PipelineRunStatus{ PipelineRunStatusFields: v1beta1.PipelineRunStatusFields{ TaskRuns: expectedTaskRunsStatus, }, } state := resources.PipelineRunState{{ PipelineTask: &pipelineTask, TaskRunName: "test-pipeline-run-success-unit-test-1", TaskRun: taskrun, ResolvedTaskResources: &taskrunresources.ResolvedTaskResources{ TaskSpec: &task.Spec, }, }} pr.Status.InitializeConditions() status := state.GetTaskRunsStatus(pr) if d := cmp.Diff(expectedPipelineRunStatus.TaskRuns, status); d != "" { t.Fatalf("Expected PipelineRun status to match TaskRun(s) status, but got a mismatch: %s", diff.PrintWantGot(d)) } }
explode_data.jsonl/68253
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1358 }
[ 2830, 3393, 4289, 6262, 73920, 1397, 1155, 353, 8840, 836, 8, 341, 197, 322, 3393, 4289, 6262, 73920, 1397, 8473, 330, 455, 6262, 73920, 2522, 1, 323, 87856, 1246, 432, 8837, 264, 40907, 6727, 2639, 198, 197, 322, 504, 264, 5430, 6727...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInt8Slice(t *testing.T) { val := int8(1) m := map[string]interface{}{"value": []int8{val}, "nothing": nil} assert.Equal(t, val, New(m).Get("value").Int8Slice()[0]) assert.Equal(t, val, New(m).Get("value").MustInt8Slice()[0]) assert.Equal(t, []int8(nil), New(m).Get("nothing").Int8Slice()) assert.Equal(t, val, New(m).Get("nothing").Int8Slice([]int8{int8(1)})[0]) assert.Panics(t, func() { New(m).Get("nothing").MustInt8Slice() }) }
explode_data.jsonl/23425
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 203 }
[ 2830, 3393, 1072, 23, 33236, 1155, 353, 8840, 836, 8, 1476, 19302, 1669, 526, 23, 7, 16, 340, 2109, 1669, 2415, 14032, 31344, 6257, 4913, 957, 788, 3056, 396, 23, 90, 831, 2137, 330, 41212, 788, 2092, 532, 6948, 12808, 1155, 11, 104...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOnlyWriteTimeout(t *testing.T) { if runtime.GOOS == "plan9" { t.Skip("skipping test; see https://golang.org/issue/7237") } defer afterTest(t) var conn net.Conn var afterTimeoutErrc = make(chan error, 1) ts := httptest.NewUnstartedServer(HandlerFunc(func(w ResponseWriter, req *Request) { buf := make([]byte, 512<<10) _, err := w.Write(buf) if err != nil { t.Errorf("handler Write error: %v", err) return } conn.SetWriteDeadline(time.Now().Add(-30 * time.Second)) _, err = w.Write(buf) afterTimeoutErrc <- err })) ts.Listener = trackLastConnListener{ts.Listener, &conn} ts.Start() defer ts.Close() tr := &Transport{DisableKeepAlives: false} defer tr.CloseIdleConnections() c := &Client{Transport: tr} errc := make(chan error) go func() { res, err := c.Get(ts.URL) if err != nil { errc <- err return } _, err = io.Copy(ioutil.Discard, res.Body) errc <- err }() select { case err := <-errc: if err == nil { t.Errorf("expected an error from Get request") } case <-time.After(5 * time.Second): t.Fatal("timeout waiting for Get error") } if err := <-afterTimeoutErrc; err == nil { t.Error("expected write error after timeout") } }
explode_data.jsonl/22397
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 488 }
[ 2830, 3393, 7308, 7985, 7636, 1155, 353, 8840, 836, 8, 341, 743, 15592, 97574, 3126, 621, 330, 10393, 24, 1, 341, 197, 3244, 57776, 445, 4886, 5654, 1273, 26, 1490, 3703, 1110, 70, 37287, 2659, 14, 11159, 14, 22, 17, 18, 22, 1138, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestOrdersDataDynamicIndex(t *testing.T) { if checkSkipTest(t) { return } indexesById := map[string]*Index{} initIndexer := func(indexer *Indexer) (*Indexer, errors.Error) { if indexer.IndexesById == nil { indexer.IndexesById = initIndexesById(t, map[string]*Index{ "ftsIdx": { Parent: indexer, IdStr: "ftsIdx", NameStr: "ftsIdx", IndexMapping: &mapping.IndexMappingImpl{ DefaultAnalyzer: "keyword", DefaultDateTimeParser: "disabled", DefaultMapping: &mapping.DocumentMapping{ Enabled: true, Dynamic: true, }, IndexDynamic: true, }, }, }) for id, v := range indexer.IndexesById { indexesById[id] = v } } return indexer, nil } c := MakeWrapCallbacksForIndexType(datastore.IndexType("FTS"), initIndexer) s, err := NewServer("./", c) if err != nil { t.Fatalf("did not expect err: %v", err) } testOrdersData(t, s, indexesById, []testOrdersDataCase{ { `SELECT * FROM data:orders as o UNNEST o.orderlines as orderline WHERE orderline.productId = "sugar22"`, 3, flex.FieldTracks{ flex.FieldTrack("orderlines.productId"): 1, }, false, `{"field":"orderlines.productId","term":"sugar22"}`, }, { `SELECT * FROM data:orders as o UNNEST o.orderlines as orderline WHERE orderline.productId = "sugar22" AND (o.custId = "ccc" OR o.custId = "abc")`, 3, flex.FieldTracks{ flex.FieldTrack("orderlines.productId"): 1, flex.FieldTrack("custId"): 2, }, false, `{"conjuncts":[{"field":"orderlines.productId","term":"sugar22"},{"disjuncts":[{"field":"custId","term":"ccc"},{"field":"custId","term":"abc"}]}]}`, }, { `SELECT * FROM data:orders as o UNNEST orderlines as orderline LEFT OUTER JOIN [] as o2 ON o.id = o2.id WHERE o.custId = "ccc" OR o.custId = "abc"`, 6, flex.FieldTracks{ flex.FieldTrack("custId"): 2, }, false, `{"disjuncts":[{"field":"custId","term":"ccc"},{"field":"custId","term":"abc"}]}`, }, { `SELECT * FROM data:orders as o LEFT OUTER JOIN [] as o2 ON o.id = o2.id UNNEST o.orderlines as orderline LET c = o.custId WHERE c = "ccc" OR c = "abc"`, 6, flex.FieldTracks{ flex.FieldTrack("custId"): 2, }, false, `{"disjuncts":[{"field":"custId","term":"ccc"},{"field":"custId","term":"abc"}]}`, }, // --------------------------------------------------------------- { `SELECT * FROM data:orders as o WHERE ANY ol IN o.orderlines SATISFIES ol.instructions = "expedite" END`, 0, flex.FieldTracks{ flex.FieldTrack("orderlines.instructions"): 1, }, false, `{"field":"orderlines.instructions","term":"expedite"}`, }, { `SELECT * FROM data:orders as o WHERE ANY ol IN o.orderlines SATISFIES ol.qty = 100 END`, 0, flex.FieldTracks{}, false, ``, }, { `SELECT * FROM data:orders as o WHERE ANY ol IN o.orderlines SATISFIES ol.instructions = "expedite" AND ol.qty = 100 END`, 0, flex.FieldTracks{}, false, ``, }, { `SELECT * FROM data:orders as o UNNEST o.orderlines as ol WHERE ol.qty = 100`, 0, flex.FieldTracks{}, false, ``, }, }) }
explode_data.jsonl/45731
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1788 }
[ 2830, 3393, 24898, 1043, 21752, 1552, 1155, 353, 8840, 836, 8, 341, 743, 1779, 35134, 2271, 1155, 8, 341, 197, 853, 198, 197, 630, 26327, 288, 2720, 1669, 2415, 14032, 8465, 1552, 31483, 28248, 1552, 261, 1669, 2915, 7195, 261, 353, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_randIntegerByLength1(t *testing.T) { type args struct { length string } tests := []struct { name string args args want int }{ // TODO: Add test cases. } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := randIntegerByLength(tt.args.length); got != tt.want { t.Errorf("randIntegerByLength() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/47734
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 169 }
[ 2830, 3393, 33864, 3486, 1359, 4373, 16, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 49046, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197, 50780, 526, 198, 197, 59403...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCheckMountPoint(t *testing.T) { node, err := ioutil.TempDir("", "dir") if err != nil { t.Fatalf("Failed to create folder: %s", err.Error()) } defer os.RemoveAll(node) if err = CheckMountPoint(node); err != nil { t.Errorf("Function returned error: %s", err.Error()) } }
explode_data.jsonl/18651
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 111 }
[ 2830, 3393, 3973, 16284, 2609, 1155, 353, 8840, 836, 8, 341, 20831, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 3741, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 9408, 311, 1855, 8527, 25, 1018, 82, 497, 1848, 6141, 23...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPathNewIPv4(t *testing.T) { peerP := PathCreatePeer() pathP := PathCreatePath(peerP) ipv4p := NewPath(pathP[0].GetSource(), pathP[0].GetNlri(), true, pathP[0].GetPathAttrs(), time.Now(), false) assert.NotNil(t, ipv4p) }
explode_data.jsonl/57480
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 1820, 3564, 58056, 19, 1155, 353, 8840, 836, 8, 341, 197, 16537, 47, 1669, 7933, 4021, 30888, 741, 26781, 47, 1669, 7933, 4021, 1820, 63372, 47, 340, 197, 42676, 19, 79, 1669, 1532, 1820, 5581, 47, 58, 15, 936, 1949, 360...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_incrementNonce(t *testing.T) { opts := &bind.TransactOpts{From: common.HexToAddress("0x45B9c4798999FFa52e1ff1eFce9d3e45819E4158")} gc := &gethClient{ config: cfg, } // noncer failed mockClient := &MockEthCl{} mockClient.On("PendingNonceAt", mock.Anything, opts.From).Return(uint64(0), errors.New("error")).Once() gc.client = mockClient err := gc.setNonce(opts) mockClient.AssertExpectations(t) assert.Error(t, err) assert.Contains(t, err.Error(), "failed to get chain nonce") // noncer success mockClient.On("PendingNonceAt", mock.Anything, opts.From).Return(uint64(1), nil).Once() gc.client = mockClient err = gc.setNonce(opts) mockClient.AssertExpectations(t) assert.NoError(t, err) }
explode_data.jsonl/53157
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 301 }
[ 2830, 3393, 51482, 90528, 1155, 353, 8840, 836, 8, 341, 64734, 1669, 609, 7666, 11815, 531, 43451, 90, 3830, 25, 4185, 91538, 1249, 4286, 445, 15, 87, 19, 20, 33, 24, 66, 19, 22, 24, 23, 24, 24, 24, 1748, 64, 20, 17, 68, 16, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIndexDatabase_GetOrCreateSeriesID(t *testing.T) { ctrl := gomock.NewController(t) defer func() { _ = fileutil.RemoveDir(testPath) ctrl.Finish() }() meta := metadb.NewMockMetadata(ctrl) meta.EXPECT().DatabaseName().Return("test").AnyTimes() db, err := NewIndexDatabase(context.TODO(), testPath, meta, nil, nil) assert.NoError(t, err) // case 1: generate new series id and create new metric id mapping seriesID, isCreated, err := db.GetOrCreateSeriesID(1, 10) assert.NoError(t, err) assert.True(t, isCreated) assert.Equal(t, uint32(1), seriesID) // case 2: get series id from memory seriesID, isCreated, err = db.GetOrCreateSeriesID(1, 10) assert.NoError(t, err) assert.False(t, isCreated) assert.Equal(t, uint32(1), seriesID) // case 3: generate new series id from memory seriesID, isCreated, err = db.GetOrCreateSeriesID(1, 20) assert.NoError(t, err) assert.True(t, isCreated) assert.Equal(t, uint32(2), seriesID) // close db err = db.Close() assert.NoError(t, err) // reopen db, err = NewIndexDatabase(context.TODO(), testPath, meta, nil, nil) assert.NoError(t, err) // case 4: get series id from backend seriesID, isCreated, err = db.GetOrCreateSeriesID(1, 20) assert.NoError(t, err) assert.False(t, isCreated) assert.Equal(t, uint32(2), seriesID) // case 5: gen series id, id sequence reset from backend seriesID, isCreated, err = db.GetOrCreateSeriesID(1, 30) assert.NoError(t, err) assert.True(t, isCreated) assert.Equal(t, uint32(3), seriesID) // case 6: append series wal err, need rollback new series id mockSeriesWAl := wal.NewMockSeriesWAL(ctrl) db1 := db.(*indexDatabase) oldWAL := db1.seriesWAL db1.seriesWAL = mockSeriesWAl mockSeriesWAl.EXPECT().Append(uint32(1), uint64(50), uint32(4)).Return(fmt.Errorf("err")) seriesID, isCreated, err = db.GetOrCreateSeriesID(1, 50) assert.Error(t, err) assert.False(t, isCreated) assert.Equal(t, uint32(0), seriesID) // add use series id => 4 db1.seriesWAL = oldWAL seriesID, isCreated, err = db.GetOrCreateSeriesID(1, 50) assert.NoError(t, err) assert.True(t, isCreated) assert.Equal(t, uint32(4), seriesID) // close db err = db.Close() assert.NoError(t, err) }
explode_data.jsonl/33825
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 845 }
[ 2830, 3393, 1552, 5988, 13614, 57111, 25544, 915, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 2915, 368, 341, 197, 197, 62, 284, 1034, 1314, 13270, 6184, 8623, 1820, 692, 197, 84381, 991, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Ordering(t *testing.T) { closeNotify := make(chan struct{}) metricsRegistry := metrics.NewUsageRegistry("test", map[string]string{}, closeNotify) InitPayloadIngester(closeNotify) InitMetrics(metricsRegistry) InitAcker(&noopForwarder{}, metricsRegistry, closeNotify) conn := &testConn{ ch: make(chan uint64, 1), closeNotify: make(chan struct{}), } x := NewXgress(&identity.TokenId{Token: "test"}, "test", conn, Initiator, DefaultOptions()) x.receiveHandler = noopReceiveHandler{} go x.tx() defer x.Close() msgCount := 100000 errorCh := make(chan error, 1) go func() { for i := 0; i < msgCount; i++ { data := make([]byte, 8) binary.LittleEndian.PutUint64(data, uint64(i)) payload := &Payload{ Header: Header{ CircuitId: "test", Flags: 0, RecvBufferSize: 16000, RTT: 0, }, Sequence: int32(i), Headers: nil, Data: data, } if err := x.SendPayload(payload); err != nil { errorCh <- err x.Close() return } } }() timeout := time.After(20 * time.Second) req := require.New(t) for i := 0; i < msgCount; i++ { select { case next := <-conn.ch: req.Equal(uint64(i), next) case <-conn.closeNotify: req.Fail("test failed with count at %v", i) case err := <-errorCh: req.NoError(err) case <-timeout: req.Failf("timed out", "count at %v", i) } } }
explode_data.jsonl/14772
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 634 }
[ 2830, 3393, 53267, 287, 1155, 353, 8840, 836, 8, 341, 27873, 28962, 1669, 1281, 35190, 2036, 37790, 2109, 13468, 15603, 1669, 16734, 7121, 14783, 15603, 445, 1944, 497, 2415, 14032, 30953, 22655, 3265, 28962, 340, 98762, 29683, 25416, 5191,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMultiPath(t *testing.T) { peerP := PathCreatePeer() origin := bgp.NewPathAttributeOrigin(0) aspathParam := []bgp.AsPathParamInterface{bgp.NewAs4PathParam(2, []uint32{64512, 64513, 1, 2})} aspath := bgp.NewPathAttributeAsPath(aspathParam) nlri := bgp.NewIPAddrPrefix(24, "30.30.30.0") localPref100 := bgp.NewPathAttributeLocalPref(100) localPref110 := bgp.NewPathAttributeLocalPref(110) med100 := bgp.NewPathAttributeMultiExitDisc(100) med110 := bgp.NewPathAttributeMultiExitDisc(110) path100 := NewPath(peerP[0], nlri, false, []bgp.PathAttributeInterface{origin, aspath, localPref100}, time.Now(), false) path110 := NewPath(peerP[0], nlri, false, []bgp.PathAttributeInterface{origin, aspath, localPref110}, time.Now(), false) assert.Equal(t, path110.CompareMultiPath(path100), 10) pathMed100 := NewPath(peerP[0], nlri, false, []bgp.PathAttributeInterface{origin, aspath, localPref100, med100}, time.Now(), false) pathMed110 := NewPath(peerP[0], nlri, false, []bgp.PathAttributeInterface{origin, aspath, localPref100, med110}, time.Now(), false) assert.Equal(t, pathMed110.CompareMultiPath(pathMed100), 0) }
explode_data.jsonl/57495
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 412 }
[ 2830, 3393, 20358, 1820, 1155, 353, 8840, 836, 8, 341, 197, 16537, 47, 1669, 7933, 4021, 30888, 741, 197, 8611, 1669, 8951, 79, 7121, 1820, 3907, 13298, 7, 15, 340, 60451, 2343, 2001, 1669, 3056, 12220, 79, 20242, 93492, 5051, 90, 122...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFactoryVerifyChequebookInvalid(t *testing.T) { factoryAddress := common.HexToAddress("0xabcd") chequebookAddress := common.HexToAddress("0xefff") factory := chequebook.NewFactory( backendmock.New(), transactionmock.New( transactionmock.WithABICall( &factoryABI, common.Hex2Bytes("0000000000000000000000000000000000000000000000000000000000000000"), "deployedContracts", chequebookAddress, ), ), factoryAddress, ) err := factory.VerifyChequebook(context.Background(), chequebookAddress) if err == nil { t.Fatal("verified invalid chequebook") } if !errors.Is(err, chequebook.ErrNotDeployedByFactory) { t.Fatalf("wrong error. wanted %v, got %v", chequebook.ErrNotDeployedByFactory, err) } }
explode_data.jsonl/50291
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 275 }
[ 2830, 3393, 4153, 32627, 26843, 591, 2190, 7928, 1155, 353, 8840, 836, 8, 341, 1166, 2919, 4286, 1669, 4185, 91538, 1249, 4286, 445, 15, 52616, 4385, 1138, 197, 1528, 591, 2190, 4286, 1669, 4185, 91538, 1249, 4286, 445, 15, 87, 6445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestOut(t *testing.T) { module := "LoggerConfig" funcname := "WithOut()" _ = module _ = funcname type test struct { name string outs []io.Writer wants *LCOut } var tests = []test{ { name: "test defaults", outs: []io.Writer{}, wants: &LCOut{out: os.Stderr}, }, { name: "test single writer", outs: []io.Writer{os.Stdout}, wants: &LCOut{out: os.Stdout}, }, { name: "test multi writers", outs: []io.Writer{os.Stdout, os.Stderr}, wants: &LCOut{out: io.MultiWriter(os.Stdout, os.Stderr)}, }, } var init = func(test test) LoggerConfig { return WithOut(test.outs...) } var verify = func(idx int, test test) { conf := init(test) if !reflect.DeepEqual(conf.(*LCOut).out, test.wants.out) { t.Errorf( "#%v -- FAILED -- [%s] [%s] output mismatch error: wanted %v ; got %v -- action: %s", idx, module, funcname, *test.wants, *conf.(*LCOut), test.name, ) } t.Logf( "#%v -- PASSED -- [%s] [%s] -- action: %s", idx, module, funcname, test.name, ) } for idx, test := range tests { verify(idx, test) } }
explode_data.jsonl/2591
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 554 }
[ 2830, 3393, 2662, 1155, 353, 8840, 836, 8, 341, 54020, 1669, 330, 7395, 2648, 698, 29244, 606, 1669, 330, 2354, 2662, 368, 1837, 197, 62, 284, 4688, 198, 197, 62, 284, 2915, 606, 271, 13158, 1273, 2036, 341, 197, 11609, 220, 914, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGitCommandGetCommits(t *testing.T) { type scenario struct { testName string command func(string, ...string) *exec.Cmd test func([]*Commit) } scenarios := []scenario{ { "No data found", func(cmd string, args ...string) *exec.Cmd { assert.EqualValues(t, "git", cmd) switch args[0] { case "rev-list": assert.EqualValues(t, []string{"rev-list", "@{u}..head", "--abbrev-commit"}, args) return exec.Command("echo") case "log": assert.EqualValues(t, []string{"log", "--oneline", "-30"}, args) return exec.Command("echo") } return nil }, func(commits []*Commit) { assert.Len(t, commits, 0) }, }, { "GetCommits returns 2 commits, 1 pushed the other not", func(cmd string, args ...string) *exec.Cmd { assert.EqualValues(t, "git", cmd) switch args[0] { case "rev-list": assert.EqualValues(t, []string{"rev-list", "@{u}..head", "--abbrev-commit"}, args) return exec.Command("echo", "8a2bb0e") case "log": assert.EqualValues(t, []string{"log", "--oneline", "-30"}, args) return exec.Command("echo", "8a2bb0e commit 1\n78976bc commit 2") } return nil }, func(commits []*Commit) { assert.Len(t, commits, 2) assert.EqualValues(t, []*Commit{ { Sha: "8a2bb0e", Name: "commit 1", Pushed: true, DisplayString: "8a2bb0e commit 1", }, { Sha: "78976bc", Name: "commit 2", Pushed: false, DisplayString: "78976bc commit 2", }, }, commits) }, }, } for _, s := range scenarios { t.Run(s.testName, func(t *testing.T) { gitCmd := newDummyGitCommand() gitCmd.OSCommand.command = s.command s.test(gitCmd.GetCommits()) }) } }
explode_data.jsonl/38380
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 877 }
[ 2830, 3393, 46562, 4062, 1949, 17977, 1199, 1155, 353, 8840, 836, 8, 341, 13158, 15048, 2036, 341, 197, 18185, 675, 914, 198, 197, 45566, 220, 2915, 3609, 11, 2503, 917, 8, 353, 11748, 64512, 198, 197, 18185, 257, 2915, 85288, 33441, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMVCCComputeStatsError(t *testing.T) { defer leaktest.AfterTest(t)() engine := createTestEngine() defer engine.Close() // Write a MVCC metadata key where the value is not an encoded MVCCMetadata // protobuf. if err := engine.Put(mvccKey(roachpb.Key("garbage")), []byte("garbage")); err != nil { t.Fatal(err) } iter := engine.NewIterator(IterOptions{UpperBound: roachpb.KeyMax}) defer iter.Close() for _, mvccStatsTest := range mvccStatsTests { t.Run(mvccStatsTest.name, func(t *testing.T) { _, err := mvccStatsTest.fn(iter, mvccKey(roachpb.KeyMin), mvccKey(roachpb.KeyMax), 100) if e := "unable to decode MVCCMetadata"; !testutils.IsError(err, e) { t.Fatalf("expected %s, got %v", e, err) } }) } }
explode_data.jsonl/41651
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 301 }
[ 2830, 3393, 66626, 3706, 46254, 16635, 1454, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 80118, 1669, 1855, 2271, 4571, 741, 16867, 4712, 10421, 2822, 197, 322, 9645, 264, 42271, 3706, 11160, 1376, 1380, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGenerate(t *testing.T) { testTable := []struct { Name string Rect image.Rectangle Points []image.Point ExpectRes *L ExpectErr error }{ { Name: "errors if centerPoints is nil", Points: nil, ExpectRes: nil, ExpectErr: errors.New("centerPoints must have at least one element"), }, { Name: "errors if centerPoints is empty", Points: make([]image.Point, 0), ExpectRes: nil, ExpectErr: errors.New("centerPoints must have at least one element"), }, { Name: "succeeds under normal circumstances", Rect: image.Rectangle{Max: image.Point{X: 3, Y: 3}}, Points: []image.Point{{X: 0, Y: 0}, {X: 2, Y: 2}}, ExpectRes: &L{ Bounds: image.Rectangle{Max: image.Point{X: 3, Y: 3}}, Points: map[image.Point][]image.Point{ {X: 0, Y: 0}: { {X: 0, Y: 0}, {X: 0, Y: 1}, {X: 0, Y: 2}, {X: 1, Y: 0}, {X: 1, Y: 1}, {X: 2, Y: 0}, }, {X: 2, Y: 2}: { {X: 1, Y: 2}, {X: 2, Y: 1}, {X: 2, Y: 2}, }, }, }, }, } for _, entry := range testTable { entry := entry t.Run(entry.Name, func(t *testing.T) { t.Parallel() res, err := Generate(entry.Rect, entry.Points) if entry.ExpectErr != nil { assert.Nil(t, res) assert.EqualError(t, err, entry.ExpectErr.Error()) } else { assert.NoError(t, err) assertEqualL(t, entry.ExpectRes, res) } }) } }
explode_data.jsonl/33939
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 724 }
[ 2830, 3393, 31115, 1155, 353, 8840, 836, 8, 341, 18185, 2556, 1669, 3056, 1235, 341, 197, 21297, 914, 271, 197, 97014, 256, 2168, 55014, 198, 197, 197, 11411, 3056, 1805, 3775, 271, 197, 35911, 1061, 353, 43, 198, 197, 35911, 7747, 14...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInit(t *testing.T) { // No t.Parallel() since the cfg.RedirectURL is set in Init() ab := authboss.New() oauth := &OAuth2{} router := &mocks.Router{} ab.Config.Modules.OAuth2Providers = testProviders ab.Config.Core.Router = router ab.Config.Core.ErrorHandler = &mocks.ErrorHandler{} ab.Config.Paths.Mount = "/auth" ab.Config.Paths.RootURL = "https://www.example.com" if err := oauth.Init(ab); err != nil { t.Fatal(err) } gets := []string{ "/oauth2/facebook", "/oauth2/callback/facebook", "/oauth2/google", "/oauth2/callback/google", } if err := router.HasGets(gets...); err != nil { t.Error(err) } }
explode_data.jsonl/70896
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 261 }
[ 2830, 3393, 3803, 1155, 353, 8840, 836, 8, 341, 197, 322, 2308, 259, 41288, 7957, 368, 2474, 279, 13286, 38869, 3144, 374, 738, 304, 15690, 2822, 197, 370, 1669, 4166, 33314, 7121, 741, 22229, 3242, 1669, 609, 57850, 17, 31483, 67009, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestListR(t *testing.T) { objects := fs.DirEntries{ mockobject.Object("a"), mockobject.Object("b"), mockdir.New("dir"), mockobject.Object("dir/a"), mockobject.Object("dir/b"), mockobject.Object("dir/c"), } f := mockfs.NewFs("mock", "/") var got []string clearCallback := func() { got = nil } callback := func(entries fs.DirEntries) error { for _, entry := range entries { got = append(got, entry.Remote()) } return nil } doListR := func(ctx context.Context, dir string, callback fs.ListRCallback) error { var os fs.DirEntries for _, o := range objects { if dir == "" || strings.HasPrefix(o.Remote(), dir+"/") { os = append(os, o) } } return callback(os) } // Setup filter oldFilter := filter.Active defer func() { filter.Active = oldFilter }() var err error filter.Active, err = filter.NewFilter(nil) require.NoError(t, err) require.NoError(t, filter.Active.AddRule("+ b")) require.NoError(t, filter.Active.AddRule("- *")) // Base case clearCallback() err = listR(context.Background(), f, "", true, ListAll, callback, doListR, false) require.NoError(t, err) require.Equal(t, []string{"a", "b", "dir", "dir/a", "dir/b", "dir/c"}, got) // Base case - with Objects clearCallback() err = listR(context.Background(), f, "", true, ListObjects, callback, doListR, false) require.NoError(t, err) require.Equal(t, []string{"a", "b", "dir/a", "dir/b", "dir/c"}, got) // Base case - with Dirs clearCallback() err = listR(context.Background(), f, "", true, ListDirs, callback, doListR, false) require.NoError(t, err) require.Equal(t, []string{"dir"}, got) // With filter clearCallback() err = listR(context.Background(), f, "", false, ListAll, callback, doListR, false) require.NoError(t, err) require.Equal(t, []string{"b", "dir", "dir/b"}, got) // With filter - with Objects clearCallback() err = listR(context.Background(), f, "", false, ListObjects, callback, doListR, false) require.NoError(t, err) require.Equal(t, []string{"b", "dir/b"}, got) // With filter - with Dir clearCallback() err = listR(context.Background(), f, "", false, ListDirs, callback, doListR, false) require.NoError(t, err) require.Equal(t, []string{"dir"}, got) // With filter and subdir clearCallback() err = listR(context.Background(), f, "dir", false, ListAll, callback, doListR, false) require.NoError(t, err) require.Equal(t, []string{"dir/b"}, got) // Now bucket based objects = fs.DirEntries{ mockobject.Object("a"), mockobject.Object("b"), mockobject.Object("dir/a"), mockobject.Object("dir/b"), mockobject.Object("dir/subdir/c"), mockdir.New("dir/subdir"), } // Base case clearCallback() err = listR(context.Background(), f, "", true, ListAll, callback, doListR, true) require.NoError(t, err) require.Equal(t, []string{"a", "b", "dir/a", "dir/b", "dir/subdir/c", "dir/subdir", "dir"}, got) // With filter clearCallback() err = listR(context.Background(), f, "", false, ListAll, callback, doListR, true) require.NoError(t, err) require.Equal(t, []string{"b", "dir/b", "dir/subdir", "dir"}, got) // With filter and subdir clearCallback() err = listR(context.Background(), f, "dir", false, ListAll, callback, doListR, true) require.NoError(t, err) require.Equal(t, []string{"dir/b", "dir/subdir"}, got) // With filter and subdir - with Objects clearCallback() err = listR(context.Background(), f, "dir", false, ListObjects, callback, doListR, true) require.NoError(t, err) require.Equal(t, []string{"dir/b"}, got) // With filter and subdir - with Dirs clearCallback() err = listR(context.Background(), f, "dir", false, ListDirs, callback, doListR, true) require.NoError(t, err) require.Equal(t, []string{"dir/subdir"}, got) }
explode_data.jsonl/72573
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1427 }
[ 2830, 3393, 852, 49, 1155, 353, 8840, 836, 8, 341, 197, 19210, 1669, 8619, 83757, 24533, 515, 197, 77333, 1700, 8348, 445, 64, 4461, 197, 77333, 1700, 8348, 445, 65, 4461, 197, 77333, 3741, 7121, 445, 3741, 4461, 197, 77333, 1700, 834...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewURL(t *testing.T) { testCases := []struct { name, url, expect, host string setup func() func() }{{ name: "https", // Does nothing when the URL has scheme part url: "https://github.com/motemen/pusheen-explorer", expect: "https://github.com/motemen/pusheen-explorer", host: "github.com", }, { name: "scp", // Convert SCP-like URL to SSH URL url: "git@github.com:motemen/pusheen-explorer.git", expect: "ssh://git@github.com/motemen/pusheen-explorer.git", host: "github.com", }, { name: "scp with root", url: "git@github.com:/motemen/pusheen-explorer.git", expect: "ssh://git@github.com/motemen/pusheen-explorer.git", host: "github.com", }, { name: "scp without user", url: "github.com:motemen/pusheen-explorer.git", expect: "ssh://github.com/motemen/pusheen-explorer.git", host: "github.com", }, { name: "different name repository", url: "motemen/ghq", expect: "https://github.com/motemen/ghq", host: "github.com", }, { name: "with authority repository", url: "github.com/motemen/gore", expect: "https://github.com/motemen/gore", host: "github.com", }, { name: "with authority repository and go-import", url: "golang.org/x/crypto", expect: "https://golang.org/x/crypto", host: "golang.org", }, { name: "fill username", setup: func() func() { key := "GITHUB_USER" orig := os.Getenv(key) os.Setenv(key, "ghq-test") return func() { os.Setenv(key, orig) } }, url: "same-name-ghq", expect: "https://github.com/ghq-test/same-name-ghq", host: "github.com", }, { name: "same name repository", setup: func() func() { return gitconfig.WithConfig(t, `[ghq] completeUser = false`) }, url: "peco", expect: "https://github.com/peco/peco", host: "github.com", }} for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { if tc.setup != nil { defer tc.setup()() } repo, err := newURL(tc.url, false, false) if err != nil { t.Errorf("error should be nil but: %s", err) } if repo.String() != tc.expect { t.Errorf("url: got: %s, expect: %s", repo.String(), tc.expect) } if repo.Host != tc.host { t.Errorf("host: got: %s, expect: %s", repo.Host, tc.host) } }) } }
explode_data.jsonl/27132
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1066 }
[ 2830, 3393, 3564, 3144, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 11, 2515, 11, 1720, 11, 3468, 914, 198, 197, 84571, 4293, 2915, 368, 2915, 741, 197, 15170, 515, 197, 11609, 25, 256, 330, 2428, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLatencySpike(t *testing.T) { e := getTestExporter() setupLatency(t, os.Getenv("TEST_REDIS_URI")) defer resetLatency(t, os.Getenv("TEST_REDIS_URI")) chM := make(chan prometheus.Metric) go func() { e.Collect(chM) close(chM) }() for m := range chM { if strings.Contains(m.Desc().String(), "latency_spike_duration_seconds") { got := &dto.Metric{} m.Write(got) // The metric value is in seconds, but our sleep interval is specified // in milliseconds, so we need to convert val := got.GetGauge().GetValue() * 1000 // Because we're dealing with latency, there might be a slight delay // even after sleeping for a specific amount of time so checking // to see if we're between +-5 of our expected value if math.Abs(float64(TimeToSleep)-val) > 5 { t.Errorf("values not matching, %f != %f", float64(TimeToSleep), val) } } } resetLatency(t, os.Getenv("TEST_REDIS_URI")) chM = make(chan prometheus.Metric) go func() { e.Collect(chM) close(chM) }() for m := range chM { switch m := m.(type) { case prometheus.Gauge: if strings.Contains(m.Desc().String(), "latency_spike_duration_seconds") { t.Errorf("latency threshold was not reset") } } } }
explode_data.jsonl/46979
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 485 }
[ 2830, 3393, 23140, 2251, 50, 64446, 1155, 353, 8840, 836, 8, 341, 7727, 1669, 633, 2271, 88025, 2822, 84571, 23140, 2251, 1155, 11, 2643, 64883, 445, 10033, 2192, 21202, 23116, 5455, 16867, 7585, 23140, 2251, 1155, 11, 2643, 64883, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInt64ArrayScanNil(t *testing.T) { arr := Int64Array{5, 5, 5} err := arr.Scan(nil) if err != nil { t.Fatalf("Expected no error, got %v", err) } if arr != nil { t.Errorf("Expected nil, got %+v", arr) } }
explode_data.jsonl/5327
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 1072, 21, 19, 1857, 26570, 19064, 1155, 353, 8840, 836, 8, 341, 36511, 1669, 1333, 21, 19, 1857, 90, 20, 11, 220, 20, 11, 220, 20, 532, 9859, 1669, 2890, 54874, 27907, 692, 743, 1848, 961, 2092, 341, 197, 3244, 30762, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestProposeValidBlock(t *testing.T) { cs1, vss := randState(4) vs2, vs3, vs4 := vss[1], vss[2], vss[3] height, round := cs1.Height, cs1.Round partSize := types.BlockPartSizeBytes proposalCh := subscribe(cs1.eventBus, types.EventQueryCompleteProposal) timeoutWaitCh := subscribe(cs1.eventBus, types.EventQueryTimeoutWait) timeoutProposeCh := subscribe(cs1.eventBus, types.EventQueryTimeoutPropose) newRoundCh := subscribe(cs1.eventBus, types.EventQueryNewRound) unlockCh := subscribe(cs1.eventBus, types.EventQueryUnlock) pv1, err := cs1.privValidator.GetPubKey() require.NoError(t, err) addr := pv1.Address() voteCh := subscribeToVoter(cs1, addr) // start round and wait for propose and prevote startTestRound(cs1, cs1.Height, round) ensureNewRound(newRoundCh, height, round) ensureNewProposal(proposalCh, height, round) rs := cs1.GetRoundState() propBlock := rs.ProposalBlock propBlockHash := propBlock.Hash() ensurePrevote(voteCh, height, round) validatePrevote(t, cs1, round, vss[0], propBlockHash) // the others sign a polka signAddVotes(cs1, types.PrevoteType, propBlockHash, propBlock.MakePartSet(partSize).Header(), vs2, vs3, vs4) ensurePrecommit(voteCh, height, round) // we should have precommitted validatePrecommit(t, cs1, round, round, vss[0], propBlockHash, propBlockHash) signAddVotes(cs1, types.PrecommitType, nil, types.PartSetHeader{}, vs2, vs3, vs4) ensureNewTimeout(timeoutWaitCh, height, round, cs1.config.Precommit(round).Nanoseconds()) incrementRound(vs2, vs3, vs4) round++ // moving to the next round ensureNewRound(newRoundCh, height, round) t.Log("### ONTO ROUND 2") // timeout of propose ensureNewTimeout(timeoutProposeCh, height, round, cs1.config.Propose(round).Nanoseconds()) ensurePrevote(voteCh, height, round) validatePrevote(t, cs1, round, vss[0], propBlockHash) signAddVotes(cs1, types.PrevoteType, nil, types.PartSetHeader{}, vs2, vs3, vs4) ensureNewUnlock(unlockCh, height, round) ensurePrecommit(voteCh, height, round) // we should have precommitted validatePrecommit(t, cs1, round, -1, vss[0], nil, nil) incrementRound(vs2, vs3, vs4) incrementRound(vs2, vs3, vs4) signAddVotes(cs1, types.PrecommitType, nil, types.PartSetHeader{}, vs2, vs3, vs4) round += 2 // moving to the next round ensureNewRound(newRoundCh, height, round) t.Log("### ONTO ROUND 3") ensureNewTimeout(timeoutWaitCh, height, round, cs1.config.Precommit(round).Nanoseconds()) round++ // moving to the next round ensureNewRound(newRoundCh, height, round) t.Log("### ONTO ROUND 4") ensureNewProposal(proposalCh, height, round) rs = cs1.GetRoundState() assert.True(t, bytes.Equal(rs.ProposalBlock.Hash(), propBlockHash)) assert.True(t, bytes.Equal(rs.ProposalBlock.Hash(), rs.ValidBlock.Hash())) assert.True(t, rs.Proposal.POLRound == rs.ValidRound) assert.True(t, bytes.Equal(rs.Proposal.BlockID.Hash, rs.ValidBlock.Hash())) }
explode_data.jsonl/81653
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1071 }
[ 2830, 3393, 2008, 960, 4088, 4713, 1155, 353, 8840, 836, 8, 341, 71899, 16, 11, 348, 778, 1669, 10382, 1397, 7, 19, 340, 5195, 82, 17, 11, 6165, 18, 11, 6165, 19, 1669, 348, 778, 58, 16, 1125, 348, 778, 58, 17, 1125, 348, 778, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNilStructPointerValidation(t *testing.T) { type Inner struct { Data string } type Outer struct { Inner *Inner `validate:"omitempty"` } inner := &Inner{ Data: "test", } outer := &Outer{ Inner: inner, } validate := New() errs := validate.Struct(outer) Equal(t, errs, nil) outer = &Outer{ Inner: nil, } errs = validate.Struct(outer) Equal(t, errs, nil) type Inner2 struct { Data string } type Outer2 struct { Inner2 *Inner2 `validate:"required"` } inner2 := &Inner2{ Data: "test", } outer2 := &Outer2{ Inner2: inner2, } errs = validate.Struct(outer2) Equal(t, errs, nil) outer2 = &Outer2{ Inner2: nil, } errs = validate.Struct(outer2) NotEqual(t, errs, nil) AssertError(t, errs, "Outer2.Inner2", "Outer2.Inner2", "Inner2", "Inner2", "required") type Inner3 struct { Data string } type Outer3 struct { Inner3 *Inner3 } inner3 := &Inner3{ Data: "test", } outer3 := &Outer3{ Inner3: inner3, } errs = validate.Struct(outer3) Equal(t, errs, nil) type Inner4 struct { Data string } type Outer4 struct { Inner4 *Inner4 `validate:"-"` } inner4 := &Inner4{ Data: "test", } outer4 := &Outer4{ Inner4: inner4, } errs = validate.Struct(outer4) Equal(t, errs, nil) }
explode_data.jsonl/77259
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 580 }
[ 2830, 3393, 19064, 9422, 9084, 13799, 1155, 353, 8840, 836, 8, 341, 13158, 36356, 2036, 341, 197, 40927, 914, 198, 197, 630, 13158, 55197, 2036, 341, 197, 197, 31597, 353, 31597, 1565, 7067, 2974, 19967, 8805, 197, 630, 197, 4382, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMemory_String(t *testing.T) { tests := []struct { in Memory out string }{ {1, "1"}, {500, "500"}, {Memory(KB), "1.00kb"}, {Memory(2 * KB), "2.00kb"}, {Memory(KB + 512), "1.50kb"}, {Memory(MB), "1.00mb"}, {Memory(2 * MB), "2.00mb"}, {Memory(MB + (512 * KB)), "1.50mb"}, {Memory(GB), "1.00gb"}, {Memory(2 * GB), "2.00gb"}, {Memory(GB + (512 * MB)), "1.50gb"}, } for _, tt := range tests { out := tt.in.String() if got, want := out, tt.out; got != want { t.Fatalf("Memory.String() => %s; want %s", got, want) } } }
explode_data.jsonl/64518
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 275 }
[ 2830, 3393, 10642, 31777, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 17430, 220, 13850, 198, 197, 13967, 914, 198, 197, 59403, 197, 197, 90, 16, 11, 330, 16, 7115, 197, 197, 90, 20, 15, 15, 11, 330, 20, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3