text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestImportImportRole(t *testing.T) { th := Setup(t) defer th.TearDown() // Try importing an invalid role in dryRun mode. rid1 := model.NewId() data := RoleImportData{ Name: &rid1, } err := th.App.importRole(&data, true, false) require.NotNil(t, err, "Should have failed to import.") _, err = th.App.Srv.Store.Role().GetByName(rid1) require.NotNil(t, err, "Should have failed to import.") // Try importing the valid role in dryRun mode. data.DisplayName = ptrStr("display name") err = th.App.importRole(&data, true, false) require.Nil(t, err, "Should have succeeded.") _, err = th.App.Srv.Store.Role().GetByName(rid1) require.NotNil(t, err, "Role should not have imported as we are in dry run mode.") // Try importing an invalid role. data.DisplayName = nil err = th.App.importRole(&data, false, false) require.NotNil(t, err, "Should have failed to import.") _, err = th.App.Srv.Store.Role().GetByName(rid1) require.NotNil(t, err, "Role should not have imported.") // Try importing a valid role with all params set. data.DisplayName = ptrStr("display name") data.Description = ptrStr("description") data.Permissions = &[]string{"invite_user", "add_user_to_team"} err = th.App.importRole(&data, false, false) require.Nil(t, err, "Should have succeeded.") role, err := th.App.Srv.Store.Role().GetByName(rid1) require.Nil(t, err, "Should have found the imported role.") assert.Equal(t, *data.Name, role.Name) assert.Equal(t, *data.DisplayName, role.DisplayName) assert.Equal(t, *data.Description, role.Description) assert.Equal(t, *data.Permissions, role.Permissions) assert.False(t, role.BuiltIn) assert.False(t, role.SchemeManaged) // Try changing all the params and reimporting. data.DisplayName = ptrStr("new display name") data.Description = ptrStr("description") data.Permissions = &[]string{"use_slash_commands"} err = th.App.importRole(&data, false, true) require.Nil(t, err, "Should have succeeded. %v", err) role, err = th.App.Srv.Store.Role().GetByName(rid1) require.Nil(t, err, "Should have found the imported role.") assert.Equal(t, *data.Name, role.Name) assert.Equal(t, *data.DisplayName, role.DisplayName) assert.Equal(t, *data.Description, role.Description) assert.Equal(t, *data.Permissions, role.Permissions) assert.False(t, role.BuiltIn) assert.True(t, role.SchemeManaged) // Check that re-importing with only required fields doesn't update the others. data2 := RoleImportData{ Name: &rid1, DisplayName: ptrStr("new display name again"), } err = th.App.importRole(&data2, false, false) require.Nil(t, err, "Should have succeeded.") role, err = th.App.Srv.Store.Role().GetByName(rid1) require.Nil(t, err, "Should have found the imported role.") assert.Equal(t, *data2.Name, role.Name) assert.Equal(t, *data2.DisplayName, role.DisplayName) assert.Equal(t, *data.Description, role.Description) assert.Equal(t, *data.Permissions, role.Permissions) assert.False(t, role.BuiltIn) assert.False(t, role.SchemeManaged) }
explode_data.jsonl/67135
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1094 }
[ 2830, 3393, 11511, 11511, 9030, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 340, 16867, 270, 836, 682, 4454, 2822, 197, 322, 9735, 49895, 458, 8318, 3476, 304, 9058, 6727, 3856, 624, 197, 1869, 16, 1669, 1614, 7121, 764, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSnapgeneGenbankRegression(t *testing.T) { snapgene := Read("../../data/puc19_snapgene.gb") if snapgene.Sequence == "" { t.Errorf("Parsing snapgene returned an empty string") } }
explode_data.jsonl/74812
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 75 }
[ 2830, 3393, 61871, 42371, 9967, 17033, 45200, 1155, 353, 8840, 836, 8, 341, 1903, 6861, 42371, 1669, 4457, 36800, 691, 4322, 1754, 16, 24, 74175, 42371, 77262, 5130, 743, 10658, 42371, 63537, 621, 1591, 341, 197, 3244, 13080, 445, 68839, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestEngine_DeleteSeries(t *testing.T) { for _, index := range tsdb.RegisteredIndexes() { t.Run(index, func(t *testing.T) { // Create a few points. p1 := MustParsePointString("cpu,host=A value=1.1 1000000000") p2 := MustParsePointString("cpu,host=B value=1.2 2000000000") p3 := MustParsePointString("cpu,host=A sum=1.3 3000000000") e, err := NewEngine(index) if err != nil { t.Fatal(err) } // mock the planner so compactions don't run during the test e.CompactionPlan = &mockPlanner{} if err := e.Open(); err != nil { t.Fatal(err) } defer e.Close() if err := e.writePoints(p1, p2, p3); err != nil { t.Fatalf("failed to write points: %s", err.Error()) } if err := e.WriteSnapshot(); err != nil { t.Fatalf("failed to snapshot: %s", err.Error()) } keys := e.FileStore.Keys() if exp, got := 3, len(keys); exp != got { t.Fatalf("series count mismatch: exp %v, got %v", exp, got) } itr := &seriesIterator{keys: [][]byte{[]byte("cpu,host=A")}} if err := e.DeleteSeriesRange(itr, math.MinInt64, math.MaxInt64); err != nil { t.Fatalf("failed to delete series: %v", err) } keys = e.FileStore.Keys() if exp, got := 1, len(keys); exp != got { t.Fatalf("series count mismatch: exp %v, got %v", exp, got) } exp := "cpu,host=B#!~#value" if _, ok := keys[exp]; !ok { t.Fatalf("wrong series deleted: exp %v, got %v", exp, keys) } }) } }
explode_data.jsonl/28090
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 632 }
[ 2830, 3393, 4571, 57418, 25544, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1922, 1669, 2088, 10591, 1999, 19983, 291, 62229, 368, 341, 197, 3244, 16708, 7195, 11, 2915, 1155, 353, 8840, 836, 8, 341, 298, 197, 322, 4230, 264, 2421, 3501...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestNewDefaultManifest(t *testing.T) { manifest := newDefaultManifest() assert.Equal(t, "Go", manifest.Template.Format) assert.Equal(t, "snake_case", manifest.Output.FileNaming.Style) assert.Equal(t, "service.tmpl", manifest.TemplateFiles["service"].FilePath) assert.Equal(t, "sub_service.tmpl", manifest.TemplateFiles["sub_service"].FilePath) assert.Equal(t, "types.tmpl", manifest.TemplateFiles["types"].FilePath) assert.Nil(t, manifest.TemplateFiles["other"]) }
explode_data.jsonl/48936
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 168 }
[ 2830, 3393, 3564, 3675, 38495, 1155, 353, 8840, 836, 8, 341, 197, 42315, 1669, 501, 3675, 38495, 2822, 6948, 12808, 1155, 11, 330, 10850, 497, 14455, 52530, 9978, 340, 6948, 12808, 1155, 11, 330, 72139, 19096, 497, 14455, 34246, 8576, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServiceCreateInvalidDataJSON(t *testing.T) { tempDir, err := ioutil.TempDir("", "kn-file") defer os.RemoveAll(tempDir) assert.NilError(t, err) tempFile := filepath.Join(tempDir, "invalid.json") // Double curly bracket at the beginning of file invalidData := strings.Replace(serviceJSON, "{\n", "{{\n", 1) err = ioutil.WriteFile(tempFile, []byte(invalidData), os.FileMode(0666)) assert.NilError(t, err) _, _, _, err = fakeServiceCreate([]string{"service", "create", "foo", "--filename", tempFile}, false) assert.Assert(t, util.ContainsAll(err.Error(), "invalid", "character", "'{'", "beginning")) // Remove closing quote on key invalidData = strings.Replace(serviceJSON, "metadata\"", "metadata", 1) err = ioutil.WriteFile(tempFile, []byte(invalidData), os.FileMode(0666)) assert.NilError(t, err) _, _, _, err = fakeServiceCreate([]string{"service", "create", "foo", "--filename", tempFile}, false) assert.Assert(t, util.ContainsAll(err.Error(), "invalid", "character", "'\\n'", "string", "literal")) // Remove opening square bracket invalidData = strings.Replace(serviceJSON, " [", "", 1) err = ioutil.WriteFile(tempFile, []byte(invalidData), os.FileMode(0666)) assert.NilError(t, err) _, _, _, err = fakeServiceCreate([]string{"service", "create", "foo", "--filename", tempFile}, false) assert.Assert(t, util.ContainsAll(err.Error(), "invalid", "character", "']'", "after", "key:value")) }
explode_data.jsonl/42463
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 510 }
[ 2830, 3393, 1860, 4021, 7928, 1043, 5370, 1155, 353, 8840, 836, 8, 341, 16280, 6184, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 19095, 14203, 1138, 16867, 2643, 84427, 9758, 6184, 340, 6948, 59678, 1454, 1155, 11, 1848, 340, 16280, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_etcdRepository_CreateCoordinates(t *testing.T) { r := &etcdRepository{} assert.Panics(t, func() { _ = r.CreateCoordinates(context.Background(), "a", 1, 1) }) }
explode_data.jsonl/11285
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 66 }
[ 2830, 3393, 45668, 4385, 4624, 34325, 43876, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 609, 295, 4385, 4624, 16094, 6948, 1069, 276, 1211, 1155, 11, 2915, 368, 314, 716, 284, 435, 7251, 43876, 5378, 19047, 1507, 330, 64, 497, 220, 16,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestMultipleGroupSnapshots(t *testing.T) { defer resetTest() name1 := "test-group-snap-1" name2 := "test-group-snap-2" namespace := "default" selectors := map[string]string{"app": "mysql"} _, err := core.Instance().CreateNamespace(&v1.Namespace{ObjectMeta: metav1.ObjectMeta{Name: namespace}}) require.NoError(t, err, "Error creating namespace") createGroupSnapshotAndVerify(t, name1, namespace, selectors, "", "", nil, nil, 0) createGroupSnapshotAndVerify(t, name2, namespace, selectors, "", "", nil, nil, 0) expected := fmt.Sprintf("NAME STATUS STAGE SNAPSHOTS CREATED\n"+ "%v 0 \n"+ "%v 0 \n", name1, name2) cmdArgs := []string{"get", "groupsnapshots", "-n", namespace, name1, name2} testCommon(t, cmdArgs, nil, expected, false) // Should get all group snapshots if no name given cmdArgs = []string{"get", "groupsnapshots"} testCommon(t, cmdArgs, nil, expected, false) name3 := "test-group-snap-3" customNamespace := "ns1" _, err = core.Instance().CreateNamespace(&v1.Namespace{ObjectMeta: metav1.ObjectMeta{Name: customNamespace}}) require.NoError(t, err, "Error creating namespace") createGroupSnapshotAndVerify(t, name3, customNamespace, selectors, "", "", nil, nil, 0) // get from all namespaces expected = fmt.Sprintf("NAMESPACE NAME STATUS STAGE SNAPSHOTS CREATED\n"+ "%v %v 0 \n"+ "%v %v 0 \n"+ "%v %v 0 \n", namespace, name1, namespace, name2, customNamespace, name3) cmdArgs = []string{"get", "groupsnapshots", "--all-namespaces"} testCommon(t, cmdArgs, nil, expected, false) }
explode_data.jsonl/20431
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 771 }
[ 2830, 3393, 32089, 2808, 61871, 27634, 1155, 353, 8840, 836, 8, 341, 16867, 7585, 2271, 2822, 11609, 16, 1669, 330, 1944, 4351, 1331, 6861, 12, 16, 698, 11609, 17, 1669, 330, 1944, 4351, 1331, 6861, 12, 17, 698, 56623, 1669, 330, 2258...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateTask(t *testing.T) { serv, client := setUp(t) defer tearDown(t, serv) createdQueue := createTestQueue(t, client) createTaskRequest := taskspb.CreateTaskRequest{ Parent: createdQueue.GetName(), Task: &taskspb.Task{ MessageType: &taskspb.Task_HttpRequest{ HttpRequest: &taskspb.HttpRequest{ Url: "http://www.google.com", }, }, }, } createdTask, err := client.CreateTask(context.Background(), &createTaskRequest) require.NoError(t, err) assert.NotEmpty(t, createdTask.GetName()) assert.Contains(t, createdTask.GetName(), "projects/TestProject/locations/TestLocation/queues/test/tasks/") assert.Equal(t, "http://www.google.com", createdTask.GetHttpRequest().GetUrl()) assert.Equal(t, taskspb.HttpMethod_POST, createdTask.GetHttpRequest().GetHttpMethod()) assert.EqualValues(t, 0, createdTask.GetDispatchCount()) }
explode_data.jsonl/72416
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 311 }
[ 2830, 3393, 4021, 6262, 1155, 353, 8840, 836, 8, 341, 1903, 648, 11, 2943, 1669, 18620, 1155, 340, 16867, 32825, 1155, 11, 4853, 692, 197, 7120, 7554, 1669, 1855, 2271, 7554, 1155, 11, 2943, 692, 39263, 6262, 1900, 1669, 3383, 43467, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStream_ReadWriteTo(t *testing.T) { addr := genAddr(t) done := make(chan struct{}) want := "HELLO" go func() { defer close(done) stream := makeReadStream(addr, t) reader := stream.Reader() read := 0 buf := make([]byte, 1) res := bytes.NewBuffer(nil) for { n, err := reader.Read(buf) if err != nil { break } res.Write(buf[:n]) read += n } assert.Equal(t, len(want), read) assert.Equal(t, want, res.String()) assert.Nil(t, reader.Close()) }() time.Sleep(time.Second) // write stream := makeWriteStream(addr, t) n, err := stream.ReadFrom(bytes.NewReader([]byte(want))) assert.Nil(t, err) assert.Equal(t, len(want), int(n)) <-done }
explode_data.jsonl/58209
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 306 }
[ 2830, 3393, 3027, 38381, 7985, 1249, 1155, 353, 8840, 836, 8, 341, 53183, 1669, 4081, 13986, 1155, 340, 40495, 1669, 1281, 35190, 2036, 37790, 50780, 1669, 330, 50712, 1593, 1837, 30680, 2915, 368, 341, 197, 16867, 3265, 34232, 340, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRemoveBillingManager(t *testing.T) { Convey("Remove billing manager", t, func() { setup(MockRoute{"DELETE", "/v2/organizations/bc7b4caf-f4b8-4d85-b126-0729b9351e56/billing_managers/user-guid", []string{""}, "", 204, "", nil}, t) defer teardown() c := &Config{ ApiAddress: server.URL, Token: "foobar", } client, err := NewClient(c) So(err, ShouldBeNil) org := &Org{ Guid: "bc7b4caf-f4b8-4d85-b126-0729b9351e56", c: client, } err = org.RemoveBillingManager("user-guid") So(err, ShouldBeNil) }) }
explode_data.jsonl/4453
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 264 }
[ 2830, 3393, 13021, 82007, 2043, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 13021, 33531, 6645, 497, 259, 11, 2915, 368, 341, 197, 84571, 66436, 4899, 4913, 14424, 497, 3521, 85, 17, 14, 69253, 3470, 66, 22, 65, 19, 68796, 2220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestResultOutputs_GetGeneratedByBundle(t *testing.T) { testcases := []struct { value string wantValue bool wantOK bool }{ {value: "true", wantValue: true, wantOK: true}, {value: "false", wantValue: false, wantOK: true}, {value: "invalid", wantValue: false, wantOK: false}, } for _, tc := range testcases { t.Run("existing metadata", func(t *testing.T) { outputs := OutputMetadata{} err := outputs.SetMetadata("test1", OutputGeneratedByBundle, tc.value) require.NoError(t, err, "SetMetadata failed") generatedByBundle, ok := outputs.GetGeneratedByBundle("test1") require.Equal(t, tc.wantOK, ok, "GetGeneratedByBundle did not return the expected ok value") assert.Equal(t, tc.wantValue, generatedByBundle, "GetGeneratedByBundle did not return the expected value") }) } }
explode_data.jsonl/70429
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 303 }
[ 2830, 3393, 2077, 61438, 13614, 15741, 1359, 8409, 1155, 353, 8840, 836, 8, 341, 18185, 23910, 1669, 3056, 1235, 341, 197, 16309, 257, 914, 198, 197, 50780, 1130, 1807, 198, 197, 50780, 3925, 262, 1807, 198, 197, 59403, 197, 197, 90, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExpressionLessThanEqualToUnsatisfiable(t *testing.T) { x := NewVariable("x") solver := NewSolver() err := solver.AddConstraint(NewExpression(100).LessThanOrEqualToVariable(x)) assert.NoError(t, err) solver.UpdateVariables() assert.True(t, x.Value <= 100) err = solver.AddConstraint(x.EqualsFloat(10)) assert.Error(t, err) solver.UpdateVariables() assert.InDelta(t, 10, x.Value, Epsilon) }
explode_data.jsonl/36237
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 162 }
[ 2830, 3393, 9595, 27451, 26067, 22759, 1806, 82, 7478, 2156, 1155, 353, 8840, 836, 8, 341, 10225, 1669, 1532, 7827, 445, 87, 1138, 1903, 7921, 1669, 1532, 63830, 741, 9859, 1669, 28961, 1904, 17890, 35063, 9595, 7, 16, 15, 15, 568, 27...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDelete(t *testing.T) { vec := NewGaugeVec( GaugeOpts{ Name: "test", Help: "helpless", }, []string{"l1", "l2"}, ) testDelete(t, vec) }
explode_data.jsonl/14618
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 81 }
[ 2830, 3393, 6435, 1155, 353, 8840, 836, 8, 341, 40213, 1669, 1532, 38, 19392, 10050, 1006, 197, 9600, 19392, 43451, 515, 298, 21297, 25, 330, 1944, 756, 298, 197, 12689, 25, 330, 8653, 1717, 756, 197, 197, 1583, 197, 197, 1294, 917, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestName(t *testing.T) { l, err := New() if err != nil { t.Fatal(err) } if l.String() != "zap" { t.Errorf("name is error %s", l.String()) } t.Logf("test logger name: %s", l.String()) }
explode_data.jsonl/1944
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 93 }
[ 2830, 3393, 675, 1155, 353, 8840, 836, 8, 341, 8810, 11, 1848, 1669, 1532, 741, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 630, 743, 326, 6431, 368, 961, 330, 92371, 1, 341, 197, 3244, 13080, 445, 606, 374, 1465, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMap(t *testing.T) { m := map[string][]int{} m["b"] = []int{100} r := m["a"] for k, v := range m { fmt.Println(fmt.Sprintf("k: %s, v: %+v", k, v)) } for k := range m { fmt.Println(fmt.Sprintf("k: %s", k)) } assert.True(t, r == nil) }
explode_data.jsonl/77879
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 133 }
[ 2830, 3393, 2227, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 2415, 14032, 45725, 396, 16094, 2109, 1183, 65, 1341, 284, 3056, 396, 90, 16, 15, 15, 532, 7000, 1669, 296, 1183, 64, 25912, 2023, 595, 11, 348, 1669, 2088, 296, 341, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGovimModReadonly(t *testing.T) { const mod = ` -- go.mod -- module mod.com go 1.13 -- main.go -- package main import "example.com/blah" func main() { println(blah.Name) } ` withOptions( EditorConfig{ Env: map[string]string{ "GOFLAGS": "-mod=readonly", }, }, WithProxyFiles(proxy), WithModes(WithoutExperiments), ).run(t, mod, func(t *testing.T, env *Env) { env.OpenFile("main.go") original := env.ReadWorkspaceFile("go.mod") env.Await( env.DiagnosticAtRegexp("main.go", `"example.com/blah"`), ) got := env.ReadWorkspaceFile("go.mod") if got != original { t.Fatalf("go.mod file modified:\n%s", tests.Diff(original, got)) } env.RunGoCommand("get", "example.com/blah@v1.2.3") env.RunGoCommand("mod", "tidy") env.Await( EmptyDiagnostics("main.go"), ) }) }
explode_data.jsonl/3747
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 364 }
[ 2830, 3393, 66901, 318, 4459, 4418, 3243, 1155, 353, 8840, 836, 8, 341, 4777, 1463, 284, 22074, 313, 728, 10929, 39514, 4352, 1463, 905, 271, 3346, 220, 16, 13, 16, 18, 198, 313, 1887, 18002, 39514, 1722, 1887, 271, 474, 330, 8687, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestWalkNotFoundSkipError(t *testing.T) { // this doesn't work for WalkR newListDirs(t, nil, true, listResults{ "": {err: errDirNotFound}, }, errorMap{ "": ErrorSkipDir, }, nil, ).Walk() }
explode_data.jsonl/65747
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 94 }
[ 2830, 3393, 48849, 10372, 35134, 1454, 1155, 353, 8840, 836, 8, 341, 197, 322, 419, 3171, 944, 975, 369, 12554, 49, 198, 8638, 852, 97384, 1155, 11, 2092, 11, 830, 345, 197, 14440, 9801, 515, 298, 197, 28796, 314, 615, 25, 1848, 618...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClosingWithEnqueuedSegments(t *testing.T) { c := context.New(t, defaultMTU) defer c.Cleanup() c.CreateConnected(context.TestInitialSequenceNumber, 30000, -1 /* epRcvBuf */) ep := c.EP c.EP = nil if got, want := tcp.EndpointState(ep.State()), tcp.StateEstablished; got != want { t.Errorf("unexpected endpoint state: want %d, got %d", want, got) } // Send a FIN for ESTABLISHED --> CLOSED-WAIT iss := seqnum.Value(context.TestInitialSequenceNumber).Add(1) c.SendPacket(nil, &context.Headers{ SrcPort: context.TestPort, DstPort: c.Port, Flags: header.TCPFlagFin | header.TCPFlagAck, SeqNum: iss, AckNum: c.IRS.Add(1), RcvWnd: 30000, }) // Get the ACK for the FIN we sent. checker.IPv4(t, c.GetPacket(), checker.TCP( checker.DstPort(context.TestPort), checker.TCPSeqNum(uint32(c.IRS)+1), checker.TCPAckNum(uint32(iss)+1), checker.TCPFlags(header.TCPFlagAck), ), ) // Give the stack a few ms to transition the endpoint out of ESTABLISHED // state. time.Sleep(10 * time.Millisecond) if got, want := tcp.EndpointState(ep.State()), tcp.StateCloseWait; got != want { t.Errorf("unexpected endpoint state: want %d, got %d", want, got) } // Close the application endpoint for CLOSE_WAIT --> LAST_ACK ep.Close() // Get the FIN checker.IPv4(t, c.GetPacket(), checker.TCP( checker.DstPort(context.TestPort), checker.TCPSeqNum(uint32(c.IRS)+1), checker.TCPAckNum(uint32(iss)+1), checker.TCPFlags(header.TCPFlagAck|header.TCPFlagFin), ), ) if got, want := tcp.EndpointState(ep.State()), tcp.StateLastAck; got != want { t.Errorf("unexpected endpoint state: want %s, got %s", want, got) } // Pause the endpoint`s protocolMainLoop. ep.(interface{ StopWork() }).StopWork() // Enqueue last ACK followed by an ACK matching the endpoint // // Send Last ACK for LAST_ACK --> CLOSED c.SendPacket(nil, &context.Headers{ SrcPort: context.TestPort, DstPort: c.Port, Flags: header.TCPFlagAck, SeqNum: iss.Add(1), AckNum: c.IRS.Add(2), RcvWnd: 30000, }) // Send a packet with ACK set, this would generate RST when // not using SYN cookies as in this test. c.SendPacket(nil, &context.Headers{ SrcPort: context.TestPort, DstPort: c.Port, Flags: header.TCPFlagAck | header.TCPFlagFin, SeqNum: iss.Add(2), AckNum: c.IRS.Add(2), RcvWnd: 30000, }) // Unpause endpoint`s protocolMainLoop. ep.(interface{ ResumeWork() }).ResumeWork() // Wait for the protocolMainLoop to resume and update state. time.Sleep(10 * time.Millisecond) // Expect the endpoint to be closed. if got, want := tcp.EndpointState(ep.State()), tcp.StateClose; got != want { t.Errorf("unexpected endpoint state: want %s, got %s", want, got) } if got := c.Stack().Stats().TCP.EstablishedClosed.Value(); got != 1 { t.Errorf("got c.Stack().Stats().TCP.EstablishedClosed = %d, want = 1", got) } if got := c.Stack().Stats().TCP.CurrentEstablished.Value(); got != 0 { t.Errorf("got stats.TCP.CurrentEstablished.Value() = %d, want = 0", got) } // Check if the endpoint was moved to CLOSED and netstack a reset in // response to the ACK packet that we sent after last-ACK. checker.IPv4(t, c.GetPacket(), checker.TCP( checker.DstPort(context.TestPort), checker.TCPSeqNum(uint32(c.IRS)+2), checker.TCPAckNum(0), checker.TCPFlags(header.TCPFlagRst), ), ) }
explode_data.jsonl/75931
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1378 }
[ 2830, 3393, 36294, 2354, 1702, 66547, 64813, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 2266, 7121, 1155, 11, 1638, 8505, 52, 340, 16867, 272, 727, 60639, 2822, 1444, 7251, 21146, 5378, 8787, 6341, 14076, 2833, 11, 220, 18, 15, 15, 15,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestDataDump(t *testing.T) { AllConnections = NewConnectionsMap() s := CreateAndPopulateTestState() ds, err := state.DeepStateDisplayCopy(s) if err != nil { t.Error(err) } DisplayState = *ds d := GetDataDumps() if len(d) == 0 { t.Error("No data") } }
explode_data.jsonl/66527
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 110 }
[ 2830, 93200, 51056, 1155, 353, 8840, 836, 8, 341, 197, 2403, 54751, 284, 1532, 54751, 2227, 741, 1903, 1669, 4230, 3036, 11598, 6334, 2271, 1397, 741, 83336, 11, 1848, 1669, 1584, 55602, 1397, 7020, 12106, 1141, 340, 743, 1848, 961, 209...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSAR_UnknownIMSI(t *testing.T) { hss := getTestHSSDiameterServer(t) swxProxy := getTestSwxProxy(t, hss, false, true, true) sar := &fegprotos.RegistrationRequest{UserName: "sub_unknown"} _, err := swxProxy.Register(context.Background(), sar) assert.EqualError(t, err, "rpc error: code = Code(5001) desc = Diameter Error: 5001 (USER_UNKNOWN)") }
explode_data.jsonl/73021
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 135 }
[ 2830, 3393, 50, 934, 62, 13790, 1791, 13817, 1155, 353, 8840, 836, 8, 341, 9598, 778, 1669, 633, 2271, 39, 1220, 35, 36044, 5475, 1155, 340, 1903, 20984, 16219, 1669, 633, 2271, 13218, 87, 16219, 1155, 11, 305, 778, 11, 895, 11, 830...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEnsureWindowsUserDataScriptPersists(t *testing.T) { t.Run("NoopsForNonWindowsHosts", func(t *testing.T) { h := &host.Host{ Distro: distro.Distro{Arch: evergreen.ArchLinuxAmd64}, } in := &userData{ Options: userdata.Options{ Directive: userdata.ShellScript + "/bin/bash", Content: "echo foo", }, } out := ensureWindowsUserDataScriptPersists(h, in) assert.Equal(t, in.Directive, out.Directive) assert.Equal(t, in.Content, out.Content) assert.Equal(t, in.Persist, out.Persist) }) t.Run("WithWindowsHost", func(t *testing.T) { for testName, testCase := range map[string]func(t *testing.T, h *host.Host){ "SetsPersistForScripts": func(t *testing.T, h *host.Host) { in := &userData{ Options: userdata.Options{ Directive: userdata.PowerShellScript, Content: "echo foo", }, } out := ensureWindowsUserDataScriptPersists(h, in) assert.Equal(t, in.Directive, out.Directive) assert.Equal(t, in.Content, out.Content) assert.True(t, out.Persist) }, "NoopsIfAlreadyPersisted": func(t *testing.T, h *host.Host) { in := &userData{ Options: userdata.Options{ Directive: userdata.PowerShellScript, Content: "echo foo", Persist: true, }, } out := ensureWindowsUserDataScriptPersists(h, in) assert.Equal(t, in.Directive, out.Directive) assert.Equal(t, in.Content, out.Content) assert.Equal(t, in.Persist, out.Persist) }, "NoopsForUnpersistable": func(t *testing.T, h *host.Host) { in := &userData{ Options: userdata.Options{ Directive: userdata.CloudConfig, Content: "echo foo", }, } out := ensureWindowsUserDataScriptPersists(h, in) assert.Equal(t, in.Directive, out.Directive) assert.Equal(t, in.Content, out.Content) assert.False(t, out.Persist) }, "RemovesPersistForUnpersistable": func(t *testing.T, h *host.Host) { in := &userData{ Options: userdata.Options{ Directive: userdata.CloudConfig, Content: "echo foo", Persist: true, }, } out := ensureWindowsUserDataScriptPersists(h, in) assert.Equal(t, in.Directive, out.Directive) assert.Equal(t, in.Content, out.Content) assert.False(t, out.Persist) }, } { t.Run(testName, func(t *testing.T) { h := &host.Host{ Distro: distro.Distro{Arch: evergreen.ArchWindowsAmd64}, } testCase(t, h) }) } }) }
explode_data.jsonl/3806
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1110 }
[ 2830, 3393, 64439, 13164, 39485, 5910, 58642, 1671, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 2753, 3721, 2461, 8121, 13164, 9296, 82, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 9598, 1669, 609, 3790, 29840, 515, 298, 10957, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRejectBadNameStartWithNumber(t *testing.T) { g := gomega.NewGomegaWithT(t) isvc := makeTestInferenceService() isvc.Name = "1abcde" g.Expect(isvc.validate(c)).ShouldNot(gomega.Succeed()) }
explode_data.jsonl/1501
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 85 }
[ 2830, 3393, 78413, 17082, 675, 3479, 2354, 2833, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 342, 32696, 7121, 38, 32696, 2354, 51, 1155, 340, 19907, 7362, 1669, 1281, 2271, 641, 2202, 1860, 741, 19907, 7362, 2967, 284, 330, 16, 13683, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestPad(t *testing.T) { t.Parallel() var padTests = []padTest{ // Simple cases {"wow", "-", 5, "-wow-"}, {"pow", " ", 4, "pow "}, // Even-length str {"wow", "-", 10, "---wow----"}, // Input same length as n {"pow", " ", 3, "pow"}, // Input longer than n {"powwow", " ", 3, "powwow"}, } for _, td := range padTests { td := td t.Run(td.str, func(t *testing.T) { t.Parallel() assert.Equal(t, td.x, Pad(td.str, td.pad, td.n), "unexpected output") }) } }
explode_data.jsonl/57938
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 230 }
[ 2830, 3393, 13731, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 2405, 11016, 18200, 284, 3056, 13242, 2271, 515, 197, 197, 322, 8993, 5048, 198, 197, 197, 4913, 57454, 497, 77223, 220, 20, 11, 6523, 57454, 12, 7115, 197, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParser_ParseGeneralAPIInfoMarkdown(t *testing.T) { p := New(SetMarkdownFileDirectory("testdata")) mainAPIFile := "testdata/markdown.go" err := p.ParseGeneralAPIInfo(mainAPIFile) assert.NoError(t, err) expected := `{ "swagger": "2.0", "info": { "description": "Swagger Example API Markdown Description", "title": "Swagger Example API", "termsOfService": "http://swagger.io/terms/", "contact": {}, "version": "1.0" }, "paths": {}, "tags": [ { "description": "Users Tag Markdown Description", "name": "users" } ] }` b, _ := json.MarshalIndent(p.swagger, "", " ") assert.Equal(t, expected, string(b)) p = New() err = p.ParseGeneralAPIInfo(mainAPIFile) assert.Error(t, err) }
explode_data.jsonl/63548
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 360 }
[ 2830, 3393, 6570, 77337, 15415, 7082, 1731, 68005, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 1532, 52474, 68005, 1703, 9310, 445, 92425, 5455, 36641, 7082, 1703, 1669, 330, 92425, 73598, 2923, 18002, 698, 9859, 1669, 281, 8937, 15415, 708...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_getCronCompensateInterval(t *testing.T) { type args struct { interval int64 } tests := []struct { name string args args want time.Duration }{ { args: args{interval: 5}, name: "normal", want: 5 * time.Minute, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := getCronCompensateInterval(tt.args.interval); got != tt.want { t.Errorf("getCronCompensateInterval() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/18877
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 217 }
[ 2830, 3393, 3062, 34, 2248, 13552, 724, 349, 10256, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 2084, 6152, 526, 21, 19, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCacheQueryer_IngressesForService(t *testing.T) { service := &corev1.Service{ TypeMeta: metav1.TypeMeta{APIVersion: "v1", Kind: "Service"}, ObjectMeta: metav1.ObjectMeta{Name: "service", Namespace: "default"}, } ingress1 := &extv1beta1.Ingress{ TypeMeta: metav1.TypeMeta{APIVersion: "extensions/v1beta1", Kind: "Ingress"}, ObjectMeta: metav1.ObjectMeta{Name: "ingress1", Namespace: "default"}, Spec: extv1beta1.IngressSpec{ Backend: &extv1beta1.IngressBackend{ ServiceName: "service", }, }, } ingress2 := &extv1beta1.Ingress{ TypeMeta: metav1.TypeMeta{APIVersion: "extensions/v1beta1", Kind: "Ingress"}, ObjectMeta: metav1.ObjectMeta{Name: "ingress2", Namespace: "default"}, Spec: extv1beta1.IngressSpec{ Rules: []extv1beta1.IngressRule{ { IngressRuleValue: extv1beta1.IngressRuleValue{ HTTP: &extv1beta1.HTTPIngressRuleValue{ Paths: []extv1beta1.HTTPIngressPath{ { Backend: extv1beta1.IngressBackend{ ServiceName: "service", }, }, { Backend: extv1beta1.IngressBackend{ ServiceName: "", }, }, }, }, }, }, { IngressRuleValue: extv1beta1.IngressRuleValue{}, }, }, }, } ingress3 := &extv1beta1.Ingress{ TypeMeta: metav1.TypeMeta{APIVersion: "extensions/v1beta1", Kind: "Ingress"}, ObjectMeta: metav1.ObjectMeta{Name: "ingress2", Namespace: "default"}, } cases := []struct { name string service *corev1.Service setup func(t *testing.T, o *storeFake.MockStore) expected []*extv1beta1.Ingress isErr bool }{ { name: "in general", service: service, setup: func(t *testing.T, o *storeFake.MockStore) { ingressesKey := store.Key{ Namespace: "default", APIVersion: "extensions/v1beta1", Kind: "Ingress", } o.EXPECT(). List(gomock.Any(), gomock.Eq(ingressesKey)). Return(testutil.ToUnstructuredList(t, ingress1, ingress2, ingress3), false, nil) }, expected: []*extv1beta1.Ingress{ ingress1, ingress2, }, }, { name: "service is nil", service: nil, isErr: true, }, { name: "ingress list failure", service: service, setup: func(t *testing.T, o *storeFake.MockStore) { ingressesKey := store.Key{ Namespace: "default", APIVersion: "extensions/v1beta1", Kind: "Ingress", } o.EXPECT(). List(gomock.Any(), gomock.Eq(ingressesKey)). Return(nil, false, errors.New("failed")) }, isErr: true, }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { controller := gomock.NewController(t) defer controller.Finish() o := storeFake.NewMockStore(controller) discovery := queryerFake.NewMockDiscoveryInterface(controller) if tc.setup != nil { tc.setup(t, o) } oq := New(o, discovery) ctx := context.Background() got, err := oq.IngressesForService(ctx, tc.service) if tc.isErr { require.Error(t, err) return } require.NoError(t, err) assert.Equal(t, tc.expected, got) }) } }
explode_data.jsonl/45668
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1481 }
[ 2830, 3393, 8233, 2859, 261, 25972, 2483, 288, 2461, 1860, 1155, 353, 8840, 836, 8, 341, 52934, 1669, 609, 98645, 16, 13860, 515, 197, 27725, 12175, 25, 256, 77520, 16, 10184, 12175, 90, 7082, 5637, 25, 330, 85, 16, 497, 16840, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStringNorm(t *testing.T) { want := []struct { s string norm string }{ {s: "A", norm: "a"}, {s: "0a", norm: "a"}, {s: "00a", norm: "a"}, {s: "a0", norm: "a0"}, {s: "a.0", norm: "a"}, {s: "a0a", norm: "a0a"}, {s: "a.0a", norm: "aa"}, {s: "10", norm: "10"}, {s: "01", norm: "1"}, {s: ".a", norm: "a"}, {s: "..a", norm: "a"}, } for _, v := range want { n := stringNorm(v.s) if n != v.norm { t.Errorf(`Got "%v", want norm("%v")=="%v"`, n, v.s, v.norm) } } }
explode_data.jsonl/68143
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 283 }
[ 2830, 3393, 703, 24993, 1155, 353, 8840, 836, 8, 341, 50780, 1669, 3056, 1235, 341, 197, 1903, 262, 914, 198, 197, 9038, 493, 914, 198, 197, 59403, 197, 197, 84386, 25, 330, 32, 497, 7465, 25, 330, 64, 7115, 197, 197, 84386, 25, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSingleIngressRuleSingleNs(t *testing.T) { gomega.RegisterTestingT(t) logger := logrus.DefaultLogger() logger.SetLevel(logging.DebugLevel) logger.Debug("TestSingleIngressRuleSingleNs") // Prepare input data. const ( nsIndex = 10 podIP = "192.168.2.1" ) rule := newContivRule("allow-http", renderer.ActionPermit, &net.IPNet{}, ipNetwork("192.168.1.0/24"), renderer.TCP, 80) ingress := []*renderer.ContivRule{rule} egress := []*renderer.ContivRule{} // Create an instance of SessionRuleCache ruleCache := &SessionRuleCache{ Deps: Deps{ Log: logger, }, } ruleCache.Init(func() ([]*SessionRule, error) { return []*SessionRule{}, nil }, tagPrefix) checkNamespaces(ruleCache) // Run single transaction. txn := ruleCache.NewTxn(false) added, removed, err := txn.Changes() gomega.Expect(err).To(gomega.BeNil()) gomega.Expect(added).To(gomega.BeEmpty()) gomega.Expect(removed).To(gomega.BeEmpty()) // Change config for one namespace txn.Update(nsIndex, GetOneHostSubnet(podIP), ingress, egress) checkNamespaces(ruleCache) // not yet committed added, removed, err = txn.Changes() gomega.Expect(err).To(gomega.BeNil()) gomega.Expect(len(added)).To(gomega.BeEquivalentTo(1)) gomega.Expect(len(removed)).To(gomega.BeEquivalentTo(0)) checkSessionRule(added, "LOCAL", nsIndex, "", 0, "192.168.1.0/24", 80, "TCP", "ALLOW") // Commit the transaction. txn.Commit() checkNamespaces(ruleCache, 10) // Verify cache content. cacheIngress, cacheEgress := ruleCache.LookupByNamespace(10) checkContivRules(cacheIngress, ingress) checkContivRules(cacheEgress, egress) }
explode_data.jsonl/74318
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 623 }
[ 2830, 3393, 10888, 641, 2483, 11337, 10888, 47360, 1155, 353, 8840, 836, 8, 341, 3174, 32696, 19983, 16451, 51, 1155, 340, 17060, 1669, 1487, 20341, 13275, 7395, 741, 17060, 4202, 4449, 51687, 20345, 4449, 340, 17060, 20345, 445, 2271, 10...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContext2Plan_hook(t *testing.T) { m := testModule(t, "plan-good") h := new(MockHook) p := testProvider("aws") p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Config: m, Hooks: []Hook{h}, ProviderResolver: providers.ResolverFixed( map[string]providers.Factory{ "aws": testProviderFuncFixed(p), }, ), }) _, diags := ctx.Plan() if diags.HasErrors() { t.Fatalf("unexpected errors: %s", diags.Err()) } if !h.PreDiffCalled { t.Fatal("should be called") } if !h.PostDiffCalled { t.Fatal("should be called") } }
explode_data.jsonl/28695
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 251 }
[ 2830, 3393, 1972, 17, 20485, 32005, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1273, 3332, 1155, 11, 330, 10393, 71700, 1138, 9598, 1669, 501, 66436, 31679, 340, 3223, 1669, 1273, 5179, 445, 8635, 1138, 3223, 98063, 24911, 284, 1273, 217...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestListImages(t *testing.T) { manager, fakeDocker := newTestDockerManager() dockerImages := []dockertypes.Image{{ID: "1111"}, {ID: "2222"}, {ID: "3333"}} expected := sets.NewString([]string{"1111", "2222", "3333"}...) fakeDocker.Images = dockerImages actualImages, err := manager.ListImages() if err != nil { t.Fatalf("unexpected error %v", err) } actual := sets.NewString() for _, i := range actualImages { actual.Insert(i.ID) } // We can compare the two sets directly because util.StringSet.List() // returns a "sorted" list. if !reflect.DeepEqual(expected.List(), actual.List()) { t.Errorf("expected %#v, got %#v", expected.List(), actual.List()) } }
explode_data.jsonl/31154
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 239 }
[ 2830, 3393, 852, 14228, 1155, 353, 8840, 836, 8, 341, 92272, 11, 12418, 35, 13659, 1669, 501, 2271, 35, 13659, 2043, 741, 2698, 13659, 14228, 1669, 3056, 77055, 529, 1804, 7528, 2979, 915, 25, 330, 16, 16, 16, 16, 14345, 314, 915, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestSqlEqOrder(t *testing.T) { b := Eq{"a": 1, "b": 2, "c": 3} sql, args, err := b.ToSql() assert.NoError(t, err) expectedSql := "a = ? AND b = ? AND c = ?" assert.Equal(t, expectedSql, sql) expectedArgs := []interface{}{1, 2, 3} assert.Equal(t, expectedArgs, args) }
explode_data.jsonl/44181
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 126 }
[ 2830, 3393, 8269, 27312, 4431, 1155, 353, 8840, 836, 8, 341, 2233, 1669, 33122, 4913, 64, 788, 220, 16, 11, 330, 65, 788, 220, 17, 11, 330, 66, 788, 220, 18, 532, 30633, 11, 2827, 11, 1848, 1669, 293, 3274, 8269, 741, 6948, 35699,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInsertShardedUnownedVerify(t *testing.T) { invschema := &vschemapb.SrvVSchema{ Keyspaces: map[string]*vschemapb.Keyspace{ "sharded": { Sharded: true, Vindexes: map[string]*vschemapb.Vindex{ "hash": { Type: "hash", }, "twocol": { Type: "lookup", Params: map[string]string{ "table": "lkp2", "from": "from1,from2", "to": "toc", }, }, "onecol": { Type: "lookup", Params: map[string]string{ "table": "lkp1", "from": "from", "to": "toc", }, }, }, Tables: map[string]*vschemapb.Table{ "t1": { ColumnVindexes: []*vschemapb.ColumnVindex{{ Name: "hash", Columns: []string{"id"}, }, { Name: "twocol", Columns: []string{"c1", "c2"}, }, { Name: "onecol", Columns: []string{"c3"}, }}, }, }, }, }, } vs, err := vindexes.BuildVSchema(invschema) if err != nil { t.Fatal(err) } ks := vs.Keyspaces["sharded"] ins := NewInsert( InsertSharded, ks.Keyspace, []sqltypes.PlanValue{{ // colVindex columns: id Values: []sqltypes.PlanValue{{ // rows for id Values: []sqltypes.PlanValue{{ Value: sqltypes.NewInt64(1), }, { Value: sqltypes.NewInt64(2), }, { Value: sqltypes.NewInt64(3), }}, }}, }, { // colVindex columns: c1, c2 Values: []sqltypes.PlanValue{{ // rows for c1 Values: []sqltypes.PlanValue{{ Value: sqltypes.NewInt64(4), }, { Value: sqltypes.NewInt64(5), }, { Value: sqltypes.NewInt64(6), }}, }, { // rows for c2 Values: []sqltypes.PlanValue{{ Value: sqltypes.NewInt64(7), }, { Value: sqltypes.NewInt64(8), }, { Value: sqltypes.NewInt64(9), }}, }}, }, { // colVindex columns: c3 Values: []sqltypes.PlanValue{{ // rows for c3 Values: []sqltypes.PlanValue{{ Value: sqltypes.NewInt64(10), }, { Value: sqltypes.NewInt64(11), }, { Value: sqltypes.NewInt64(12), }}, }}, }}, ks.Tables["t1"], "prefix", []string{" mid1", " mid2", " mid3"}, " suffix", ) // nonemptyResult will cause the lookup verify queries to succeed. nonemptyResult := sqltypes.MakeTestResult( sqltypes.MakeTestFields( "c1", "int64", ), "1", ) vc := &loggingVCursor{ shards: []string{"-20", "20-"}, shardForKsid: []string{"20-", "-20", "20-"}, results: []*sqltypes.Result{ nonemptyResult, nonemptyResult, nonemptyResult, nonemptyResult, nonemptyResult, nonemptyResult, }, } _, err = ins.Execute(vc, map[string]*querypb.BindVariable{}, false) if err != nil { t.Fatal(err) } vc.ExpectLog(t, []string{ // Perform verification for each colvindex. // Note that only first column of each colvindex is used. `Execute select from1 from lkp2 where from1 = :from1 and toc = :toc from1: type:INT64 value:"4" toc: type:VARBINARY value:"\026k@\264J\272K\326" false`, `Execute select from1 from lkp2 where from1 = :from1 and toc = :toc from1: type:INT64 value:"5" toc: type:VARBINARY value:"\006\347\352\"\316\222p\217" false`, `Execute select from1 from lkp2 where from1 = :from1 and toc = :toc from1: type:INT64 value:"6" toc: type:VARBINARY value:"N\261\220\311\242\372\026\234" false`, `Execute select from from lkp1 where from = :from and toc = :toc from: type:INT64 value:"10" toc: type:VARBINARY value:"\026k@\264J\272K\326" false`, `Execute select from from lkp1 where from = :from and toc = :toc from: type:INT64 value:"11" toc: type:VARBINARY value:"\006\347\352\"\316\222p\217" false`, `Execute select from from lkp1 where from = :from and toc = :toc from: type:INT64 value:"12" toc: type:VARBINARY value:"N\261\220\311\242\372\026\234" false`, // Based on shardForKsid, values returned will be 20-, -20, 20-. `ResolveDestinations sharded [value:"0" value:"1" value:"2" ] Destinations:DestinationKeyspaceID(166b40b44aba4bd6),DestinationKeyspaceID(06e7ea22ce92708f),DestinationKeyspaceID(4eb190c9a2fa169c)`, `ExecuteMultiShard ` + `sharded.20-: prefix mid1, mid3 suffix /* vtgate:: keyspace_id:166b40b44aba4bd6,4eb190c9a2fa169c */ ` + `{_c10: type:INT64 value:"4" _c11: type:INT64 value:"5" _c12: type:INT64 value:"6" ` + `_c20: type:INT64 value:"7" _c21: type:INT64 value:"8" _c22: type:INT64 value:"9" ` + `_c30: type:INT64 value:"10" _c31: type:INT64 value:"11" _c32: type:INT64 value:"12" ` + `_id0: type:INT64 value:"1" _id1: type:INT64 value:"2" _id2: type:INT64 value:"3" } ` + `sharded.-20: prefix mid2 suffix /* vtgate:: keyspace_id:06e7ea22ce92708f */ ` + `{_c10: type:INT64 value:"4" _c11: type:INT64 value:"5" _c12: type:INT64 value:"6" ` + `_c20: type:INT64 value:"7" _c21: type:INT64 value:"8" _c22: type:INT64 value:"9" ` + `_c30: type:INT64 value:"10" _c31: type:INT64 value:"11" _c32: type:INT64 value:"12" ` + `_id0: type:INT64 value:"1" _id1: type:INT64 value:"2" _id2: type:INT64 value:"3" } ` + `true false`, }) }
explode_data.jsonl/55994
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2412 }
[ 2830, 3393, 13780, 2016, 20958, 1806, 18332, 32627, 1155, 353, 8840, 836, 8, 341, 17430, 11562, 3416, 1669, 609, 11562, 2407, 391, 65, 808, 10553, 53, 8632, 515, 197, 197, 8850, 27338, 25, 2415, 14032, 8465, 11562, 2407, 391, 65, 37863,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRunner(t *testing.T) { store := NewMockStoreIface() ticker := glock.NewMockTicker(time.Second) refreshTicker := glock.NewMockTicker(time.Second * 30) store.ListFunc.SetDefaultReturn([]Migration{ {ID: 1, Progress: 0.5}, }, nil) runner := newRunner(store, refreshTicker, &observation.TestContext) migrator := NewMockMigrator() migrator.ProgressFunc.SetDefaultReturn(0.5, nil) if err := runner.Register(1, migrator, MigratorOptions{ticker: ticker}); err != nil { t.Fatalf("unexpected error registering migrator: %s", err) } go runner.Start() tickN(ticker, 3) runner.Stop() if callCount := len(migrator.UpFunc.History()); callCount != 3 { t.Errorf("unexpected number of calls to Up. want=%d have=%d", 3, callCount) } if callCount := len(migrator.DownFunc.History()); callCount != 0 { t.Errorf("unexpected number of calls to Down. want=%d have=%d", 0, callCount) } }
explode_data.jsonl/11529
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 340 }
[ 2830, 3393, 19486, 1155, 353, 8840, 836, 8, 341, 57279, 1669, 1532, 11571, 6093, 40, 1564, 741, 3244, 5215, 1669, 342, 1023, 7121, 11571, 87278, 9730, 32435, 340, 197, 17168, 87278, 1669, 342, 1023, 7121, 11571, 87278, 9730, 32435, 353, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRefreshableAccessTokens(t *testing.T) { initAccessTest(t, "") server := &config.ServerDetails{Url: *tests.JfrogUrl, AccessToken: *tests.JfrogAccessToken} err := coreenvsetup.GenerateNewLongTermRefreshableAccessToken(server) assert.NoError(t, err) assert.NotEmpty(t, server.RefreshToken) configCmd := commands.NewConfigCommand(commands.AddOrEdit, tests.ServerId).SetDetails(server).SetInteractive(false) assert.NoError(t, configCmd.Run()) defer deleteServerConfig(t) // Upload a file and assert the refreshable tokens were generated. artifactoryCommandExecutor := tests.NewJfrogCli(execMain, "jfrog rt", "") uploadedFiles := 1 err = uploadWithSpecificServerAndVerify(t, artifactoryCommandExecutor, tests.ServerId, "testdata/a/a1.in", uploadedFiles) if err != nil { return } curAccessToken, curRefreshToken, err := getAccessTokensFromConfig(t, tests.ServerId) if err != nil { return } assert.NotEmpty(t, curAccessToken) assert.NotEmpty(t, curRefreshToken) // Make the token always refresh. auth.InviteRefreshBeforeExpiryMinutes = 365 * 24 * 60 // Upload a file and assert tokens were refreshed. uploadedFiles++ err = uploadWithSpecificServerAndVerify(t, artifactoryCommandExecutor, tests.ServerId, "testdata/a/a2.in", uploadedFiles) if err != nil { return } curAccessToken, curRefreshToken, err = assertTokensChanged(t, tests.ServerId, curAccessToken, curRefreshToken) if err != nil { return } // Make the token not refresh. Verify Tokens did not refresh. auth.InviteRefreshBeforeExpiryMinutes = 0 uploadedFiles++ err = uploadWithSpecificServerAndVerify(t, artifactoryCommandExecutor, tests.ServerId, "testdata/a/b/b2.in", uploadedFiles) if err != nil { return } newAccessToken, newRefreshToken, err := getArtifactoryTokensFromConfig(t, tests.ServerId) if err != nil { return } assert.Equal(t, curAccessToken, newAccessToken) assert.Equal(t, curRefreshToken, newRefreshToken) // Cleanup cleanArtifactoryTest() }
explode_data.jsonl/35685
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 662 }
[ 2830, 3393, 14567, 480, 6054, 29300, 1155, 353, 8840, 836, 8, 341, 28248, 6054, 2271, 1155, 11, 85617, 41057, 1669, 609, 1676, 22997, 7799, 90, 2864, 25, 353, 23841, 3503, 85982, 2864, 11, 94341, 25, 353, 23841, 3503, 85982, 37649, 532,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestMetaWithTimestamp(t *testing.T) { const ( collID1 = typeutil.UniqueID(1) collID2 = typeutil.UniqueID(2) collName1 = "t1" collName2 = "t2" partID1 = 11 partID2 = 12 partName1 = "p1" partName2 = "p2" ) rand.Seed(time.Now().UnixNano()) randVal := rand.Int() Params.Init() rootPath := fmt.Sprintf("/test/meta/%d", randVal) var tsoStart typeutil.Timestamp = 100 vtso := tsoStart ftso := func() typeutil.Timestamp { vtso++ return vtso } etcdCli, err := clientv3.New(clientv3.Config{Endpoints: Params.EtcdEndpoints}) assert.Nil(t, err) defer etcdCli.Close() skv, err := newMetaSnapshot(etcdCli, rootPath, TimestampPrefix, 7) assert.Nil(t, err) assert.NotNil(t, skv) mt, err := NewMetaTable(skv) assert.Nil(t, err) collInfo := &pb.CollectionInfo{ ID: 1, Schema: &schemapb.CollectionSchema{ Name: collName1, }, } collInfo.PartitionIDs = []int64{partID1} collInfo.PartitionNames = []string{partName1} collInfo.PartitionCreatedTimestamps = []uint64{ftso()} t1 := ftso() err = mt.AddCollection(collInfo, t1, nil, nil) assert.Nil(t, err) collInfo.ID = 2 collInfo.PartitionIDs = []int64{partID2} collInfo.PartitionNames = []string{partName2} collInfo.PartitionCreatedTimestamps = []uint64{ftso()} collInfo.Schema.Name = collName2 t2 := ftso() err = mt.AddCollection(collInfo, t2, nil, nil) assert.Nil(t, err) assert.True(t, mt.HasCollection(collID1, 0)) assert.True(t, mt.HasCollection(collID2, 0)) assert.True(t, mt.HasCollection(collID1, t2)) assert.True(t, mt.HasCollection(collID2, t2)) assert.True(t, mt.HasCollection(collID1, t1)) assert.False(t, mt.HasCollection(collID2, t1)) assert.False(t, mt.HasCollection(collID1, tsoStart)) assert.False(t, mt.HasCollection(collID2, tsoStart)) c1, err := mt.GetCollectionByID(collID1, 0) assert.Nil(t, err) c2, err := mt.GetCollectionByID(collID2, 0) assert.Nil(t, err) assert.Equal(t, collID1, c1.ID) assert.Equal(t, collID2, c2.ID) c1, err = mt.GetCollectionByID(collID1, t2) assert.Nil(t, err) c2, err = mt.GetCollectionByID(collID2, t2) assert.Nil(t, err) assert.Equal(t, collID1, c1.ID) assert.Equal(t, collID2, c2.ID) c1, err = mt.GetCollectionByID(collID1, t1) assert.Nil(t, err) c2, err = mt.GetCollectionByID(collID2, t1) assert.NotNil(t, err) assert.Equal(t, int64(1), c1.ID) c1, err = mt.GetCollectionByID(collID1, tsoStart) assert.NotNil(t, err) c2, err = mt.GetCollectionByID(collID2, tsoStart) assert.NotNil(t, err) c1, err = mt.GetCollectionByName(collName1, 0) assert.Nil(t, err) c2, err = mt.GetCollectionByName(collName2, 0) assert.Nil(t, err) assert.Equal(t, int64(1), c1.ID) assert.Equal(t, int64(2), c2.ID) c1, err = mt.GetCollectionByName(collName1, t2) assert.Nil(t, err) c2, err = mt.GetCollectionByName(collName2, t2) assert.Nil(t, err) assert.Equal(t, int64(1), c1.ID) assert.Equal(t, int64(2), c2.ID) c1, err = mt.GetCollectionByName(collName1, t1) assert.Nil(t, err) c2, err = mt.GetCollectionByName(collName2, t1) assert.NotNil(t, err) assert.Equal(t, int64(1), c1.ID) c1, err = mt.GetCollectionByName(collName1, tsoStart) assert.NotNil(t, err) c2, err = mt.GetCollectionByName(collName2, tsoStart) assert.NotNil(t, err) getKeys := func(m map[string]*pb.CollectionInfo) []string { keys := make([]string, 0, len(m)) for key := range m { keys = append(keys, key) } return keys } s1, err := mt.ListCollections(0) assert.Nil(t, err) assert.Equal(t, 2, len(s1)) assert.ElementsMatch(t, getKeys(s1), []string{collName1, collName2}) s1, err = mt.ListCollections(t2) assert.Nil(t, err) assert.Equal(t, 2, len(s1)) assert.ElementsMatch(t, getKeys(s1), []string{collName1, collName2}) s1, err = mt.ListCollections(t1) assert.Nil(t, err) assert.Equal(t, 1, len(s1)) assert.ElementsMatch(t, getKeys(s1), []string{collName1}) s1, err = mt.ListCollections(tsoStart) assert.Nil(t, err) assert.Equal(t, 0, len(s1)) p1, err := mt.GetPartitionByName(1, partName1, 0) assert.Nil(t, err) p2, err := mt.GetPartitionByName(2, partName2, 0) assert.Nil(t, err) assert.Equal(t, int64(11), p1) assert.Equal(t, int64(12), p2) assert.Nil(t, err) p1, err = mt.GetPartitionByName(1, partName1, t2) assert.Nil(t, err) p2, err = mt.GetPartitionByName(2, partName2, t2) assert.Nil(t, err) assert.Equal(t, int64(11), p1) assert.Equal(t, int64(12), p2) p1, err = mt.GetPartitionByName(1, partName1, t1) assert.Nil(t, err) _, err = mt.GetPartitionByName(2, partName2, t1) assert.NotNil(t, err) assert.Equal(t, int64(11), p1) _, err = mt.GetPartitionByName(1, partName1, tsoStart) assert.NotNil(t, err) _, err = mt.GetPartitionByName(2, partName2, tsoStart) assert.NotNil(t, err) }
explode_data.jsonl/67323
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2158 }
[ 2830, 3393, 12175, 2354, 20812, 1155, 353, 8840, 836, 8, 341, 4777, 2399, 197, 1444, 965, 915, 16, 256, 284, 943, 1314, 87443, 915, 7, 16, 340, 197, 1444, 965, 915, 17, 256, 284, 943, 1314, 87443, 915, 7, 17, 340, 197, 1444, 965, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestListener_Server(t *testing.T) { t.Run("it closes the channel successfully", func(t *testing.T) { listener := consumer.Listen( "test-queue", consumer.OnSuccess(func(amqp.Delivery) { assert.Fail(t, "called success callback") }), consumer.OnError(func(_ amqp.Delivery, err error) { assert.Fail(t, "called error callback", err) }), ) sink := make(chan amqp.Delivery) var sinkCloser sync.Once ch := new(mocks.Channel) ch. On("Consume", "test-queue", "test-queue", false, false, false, false, amqp.Table(nil)). Return((<-chan amqp.Delivery)(sink), nil) ch. On("Close"). Run(func(args mock.Arguments) { sinkCloser.Do(func() { close(sink) }) }). Return(nil) closer, err := listener.Listen(nil, ch, handler.Func(func(context.Context, amqp.Delivery) error { return errors.New("should not be called") })) assert.NoError(t, err) assert.NotNil(t, closer) go func() { assert.NoError(t, closer.Close(context.Background())) }() select { case err := <-closer.Closed(): assert.NoError(t, err) case <-time.After(1 * time.Second): assert.Fail(t, "did not finish after 1 second") } // Closing a second time will return an error assert.Equal(t, consumer.ErrAlreadyClosed, closer.Close(context.Background())) }) t.Run("it calls the success callback when the message is being handled correctly", func(t *testing.T) { // Use this channel to mark completeness by sending a value from the // server callbacks. done := make(chan bool) defer close(done) received := false delivery := amqp.Delivery{ ConsumerTag: "test-queue", DeliveryTag: 1, } listener := consumer.Listen( "test-queue", consumer.OnSuccess(func(success amqp.Delivery) { received = true assert.Equal(t, delivery, success) done <- true }), consumer.OnError(func(_ amqp.Delivery, err error) { assert.Fail(t, "called error callback", err) done <- true }), ) sink := make(chan amqp.Delivery) defer close(sink) ch := new(mocks.Channel) ch. On("Consume", "test-queue", "test-queue", false, false, false, false, amqp.Table(nil)). Return((<-chan amqp.Delivery)(sink), nil) closer, err := listener.Listen(nil, ch, handler.Func(func(context.Context, amqp.Delivery) error { return nil })) assert.NoError(t, err) assert.NotNil(t, closer) sink <- delivery select { case <-done: assert.True(t, received) case <-time.After(1 * time.Second): assert.Fail(t, "did not finish after 1 second") } }) t.Run("it calls the error callback when the message handling failed", func(t *testing.T) { // Use this channel to mark completeness by sending a value from the // server callbacks. done := make(chan bool) defer close(done) received := false delivery := amqp.Delivery{ ConsumerTag: "test-queue", DeliveryTag: 1, } failure := errors.New("failed message") listener := consumer.Listen( "test-queue", consumer.OnSuccess(func(amqp.Delivery) { assert.Fail(t, "called success callback") done <- true }), consumer.OnError(func(failed amqp.Delivery, err error) { received = true assert.Equal(t, delivery, failed) assert.True(t, errors.Is(err, failure)) done <- true }), ) sink := make(chan amqp.Delivery) defer close(sink) ch := new(mocks.Channel) ch. On("Consume", "test-queue", "test-queue", false, false, false, false, amqp.Table(nil)). Return((<-chan amqp.Delivery)(sink), nil) closer, err := listener.Listen(nil, ch, handler.Func(func(context.Context, amqp.Delivery) error { // Fail message handling return failure })) assert.NoError(t, err) assert.NotNil(t, closer) sink <- delivery select { case <-done: assert.True(t, received) case <-time.After(1 * time.Second): assert.Fail(t, "did not finish after 1 second") } }) }
explode_data.jsonl/60537
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1525 }
[ 2830, 3393, 2743, 62320, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 275, 33250, 279, 5496, 7790, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 14440, 798, 1669, 11502, 68334, 1006, 298, 197, 1, 1944, 12, 4584, 756, 298, 37203, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetGoFunc(t *testing.T) { const SCRIPT = ` f(40, 2) ` r := New() r.Set("f", func(a, b int) int { return a + b }) v, err := r.RunString(SCRIPT) if err != nil { t.Fatal(err) } if v.ToInteger() != 42 { t.Fatalf("Unexpected result: %v", v) } }
explode_data.jsonl/10463
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 132 }
[ 2830, 3393, 1649, 10850, 9626, 1155, 353, 8840, 836, 8, 341, 4777, 53679, 284, 22074, 1166, 7, 19, 15, 11, 220, 17, 340, 197, 3989, 7000, 1669, 1532, 741, 7000, 4202, 445, 69, 497, 2915, 2877, 11, 293, 526, 8, 526, 341, 197, 853, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestService_SendOTP(t *testing.T) { repo := new(MockRepository) smsSvc := new(MockSMSService) ctx := context.Background() phoneNumber := "+1234567890" t.Run("Default", func(t *testing.T) { svc := otpsvc.New(repo, smsSvc) repo.On("Create", ctx, mock.Anything).Return(nil) smsSvc.On("SendMessage", ctx, phoneNumber, mock.Anything).Return(nil) res, err := svc.SendOTP(ctx, otpsvc.SendOTPRequest{PhoneNumber: phoneNumber}) assert.NoError(t, err) assert.NotNil(t, res) repo.On("Find", ctx, res.OTPUUID).Return(&repo.LastItem, nil) res2, err := svc.VerifyOTP(ctx, otpsvc.VerifyOTPRequest{ OTPUUID: res.OTPUUID, PinCode: smsSvc.LastMessage, }) assert.NoError(t, err) assert.Equal(t, phoneNumber, res2.PhoneNumber) }) t.Run("Invalid Pin Code", func(t *testing.T) { svc := otpsvc.New(repo, smsSvc) repo.On("Create", ctx, mock.Anything).Return(nil) smsSvc.On("SendMessage", ctx, phoneNumber, mock.Anything).Return(nil) res, err := svc.SendOTP(ctx, otpsvc.SendOTPRequest{PhoneNumber: phoneNumber}) assert.NoError(t, err) assert.NotNil(t, res) repo.On("Find", ctx, res.OTPUUID).Return(&repo.LastItem, nil) res2, err := svc.VerifyOTP(ctx, otpsvc.VerifyOTPRequest{ OTPUUID: res.OTPUUID, PinCode: "Invalid", }) var terr otpsvc.ErrInvalidPinCode assert.True(t, errors.As(err, &terr)) assert.Nil(t, res2) }) t.Run("Send Expired OTP", func(t *testing.T) { svc := otpsvc.New(repo, smsSvc) repo.On("Create", ctx, mock.Anything).Return(nil) smsSvc.On("SendMessage", ctx, phoneNumber, mock.Anything).Return(nil) res, err := svc.SendOTP(ctx, otpsvc.SendOTPRequest{PhoneNumber: phoneNumber}) assert.NoError(t, err) assert.NotNil(t, res) entity := repo.LastItem entity.ExpiresAt = time.Now() repo.On("Find", ctx, res.OTPUUID).Return(&entity, nil) res2, err := svc.VerifyOTP(ctx, otpsvc.VerifyOTPRequest{ OTPUUID: res.OTPUUID, PinCode: "Invalid", }) var terr otpsvc.ErrOTPNotFoundOrExpired assert.True(t, errors.As(err, &terr)) assert.Nil(t, res2) }) t.Run("WithGoogleAutomaticSMSVerificationTemplate", func(t *testing.T) { svc := otpsvc.New(repo, smsSvc, otpsvc.WithGoogleAutomaticSMSVerificationTemplate()) repo.On("Create", ctx, mock.Anything).Return(nil) smsSvc.On("SendMessage", ctx, phoneNumber, mock.Anything).Return(nil) hash := "FA+9qCX9VSu" res, err := svc.SendOTP( ctx, otpsvc.SendOTPRequest{PhoneNumber: phoneNumber}, otpsvc.WithGoogleAutomaticSMSVerification(hash), ) assert.NoError(t, err) assert.NotNil(t, res) repo.On("Find", ctx, res.OTPUUID).Return(&repo.LastItem, nil) msg := strings.Split(smsSvc.LastMessage, "\n") pincode := msg[0] hash2 := msg[len(msg)-1] assert.Equal(t, hash, hash2) res2, err := svc.VerifyOTP(ctx, otpsvc.VerifyOTPRequest{ OTPUUID: res.OTPUUID, PinCode: pincode, }) assert.NoError(t, err) assert.Equal(t, phoneNumber, res2.PhoneNumber) }) }
explode_data.jsonl/16532
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1297 }
[ 2830, 3393, 1860, 46267, 90146, 1155, 353, 8840, 836, 8, 341, 17200, 5368, 1669, 501, 66436, 4624, 340, 1903, 1011, 92766, 1669, 501, 66436, 9501, 1220, 1017, 340, 20985, 1669, 2266, 19047, 741, 197, 58173, 1669, 6630, 16, 17, 18, 19, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFileExistenceWithWrappedError(t *testing.T) { fileCheck := &ipb.FileCheck{ FilesToCheck: []*ipb.FileSet{testconfigcreator.SingleFileWithPath(nonExistentFilePath)}, CheckType: &ipb.FileCheck_Existence{Existence: &ipb.ExistenceCheck{ShouldExist: false}}, } openFileFunc := func(ctx context.Context, filePath string) (io.ReadCloser, error) { return nil, fmt.Errorf("Error: %w", os.ErrNotExist) } expectedResult := &apb.ComplianceResult{Id: "id", ComplianceOccurrence: &cpb.ComplianceOccurrence{}} check := createFileCheckBatch(t, "id", []*ipb.FileCheck{fileCheck}, newFakeAPI(withOpenFileFunc(openFileFunc))) resultMap, err := check.Exec() if err != nil { t.Fatalf("check.Exec() returned an error: %v", err) } result, gotSingleton := singleComplianceResult(resultMap) if !gotSingleton { t.Fatalf("check.Exec() expected to return 1 result, got %d", len(resultMap)) } if diff := cmp.Diff(expectedResult, result, protocmp.Transform()); diff != "" { t.Errorf("check.Exec() returned unexpected diff (-want +got):\n%s", diff) } }
explode_data.jsonl/24472
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 381 }
[ 2830, 3393, 1703, 25613, 763, 2354, 67795, 1454, 1155, 353, 8840, 836, 8, 341, 17661, 3973, 1669, 609, 573, 65, 8576, 3973, 515, 197, 197, 10809, 1249, 3973, 25, 29838, 573, 65, 8576, 1649, 90, 1944, 1676, 32398, 23119, 1703, 89534, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMsgBufferThrottler(t *testing.T) { assert := assert.New(t) throttler, err := newInboundMsgBufferThrottler("", prometheus.NewRegistry(), 3) assert.NoError(err) nodeID1, nodeID2 := ids.GenerateTestShortID(), ids.GenerateTestShortID() // Acquire shouldn't block for first 3 throttler.Acquire(nodeID1) throttler.Acquire(nodeID1) throttler.Acquire(nodeID1) assert.Len(throttler.nodeToNumProcessingMsgs, 1) assert.EqualValues(3, throttler.nodeToNumProcessingMsgs[nodeID1]) // Acquire shouldn't block for other node throttler.Acquire(nodeID2) throttler.Acquire(nodeID2) throttler.Acquire(nodeID2) assert.Len(throttler.nodeToNumProcessingMsgs, 2) assert.EqualValues(3, throttler.nodeToNumProcessingMsgs[nodeID1]) assert.EqualValues(3, throttler.nodeToNumProcessingMsgs[nodeID2]) // Acquire should block for 4th acquire done := make(chan struct{}) go func() { throttler.Acquire(nodeID1) done <- struct{}{} }() select { case <-done: t.Fatal("should block on acquiring") case <-time.After(50 * time.Millisecond): } // Acquire should block for 5th acquire done2 := make(chan struct{}) go func() { throttler.Acquire(nodeID1) done2 <- struct{}{} }() select { case <-done2: t.Fatal("should block on acquiring") case <-time.After(50 * time.Millisecond): } throttler.Release(nodeID1) // fourth acquire should be unblocked <-done assert.Len(throttler.nodeToNumProcessingMsgs, 2) assert.EqualValues(3, throttler.nodeToNumProcessingMsgs[nodeID2]) // But not the other select { case <-done2: t.Fatal("should be blocked") case <-time.After(50 * time.Millisecond): } // Releasing from other node should have no effect throttler.Release(nodeID2) throttler.Release(nodeID2) throttler.Release(nodeID2) select { case <-done2: t.Fatal("should be blocked") case <-time.After(50 * time.Millisecond): } // Unblock fifth acquire throttler.Release(nodeID1) <-done2 // Release remaining 3 acquires throttler.Release(nodeID1) throttler.Release(nodeID1) throttler.Release(nodeID1) assert.Len(throttler.nodeToNumProcessingMsgs, 0) }
explode_data.jsonl/3364
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 803 }
[ 2830, 3393, 6611, 4095, 1001, 46689, 1536, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 70479, 46689, 1536, 11, 1848, 1669, 501, 641, 10891, 6611, 4095, 1001, 46689, 1536, 19814, 2706, 39705, 7121, 15603, 1507, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCollection_RemoveAll(t *testing.T) { ast := require.New(t) cli := initClient("test") defer cli.Close(context.Background()) defer cli.DropCollection(context.Background()) cli.EnsureIndexes(context.Background(), nil, []string{"name"}) id1 := primitive.NewObjectID() id2 := primitive.NewObjectID() id3 := primitive.NewObjectID() id4 := primitive.NewObjectID() docs := []interface{}{ bson.D{{Key: "_id", Value: id1}, {Key: "name", Value: "Alice"}, {Key: "age", Value: 18}}, bson.D{{Key: "_id", Value: id2}, {Key: "name", Value: "Alice"}, {Key: "age", Value: 19}}, bson.D{{Key: "_id", Value: id3}, {Key: "name", Value: "Lucas"}, {Key: "age", Value: 20}}, bson.D{{Key: "_id", Value: id4}, {Key: "name", Value: "Rocket"}, {Key: "age", Value: 23}}, } _, _ = cli.InsertMany(context.Background(), docs) var err error // delete record: name = "Alice" ,after that, expect - record : name = "Alice" filter1 := bson.M{ "name": "Alice", } opts := options.RemoveOptions{} opts.DeleteOptions = officialOpts.Delete() res, err := cli.RemoveAll(context.Background(), filter1, opts) ast.NoError(err) ast.NotNil(res) ast.Equal(int64(2), res.DeletedCount) cnt, err := cli.Find(context.Background(), filter1).Count() ast.NoError(err) ast.Equal(int64(0), cnt) // delete with not match filter, DeletedCount in res is 0 filter2 := bson.M{ "name": "Lily", } res, err = cli.RemoveAll(context.Background(), filter2) ast.NoError(err) ast.NotNil(res) ast.Equal(int64(0), res.DeletedCount) // filter is bson.M{},delete all docs filter3 := bson.M{} preCnt, err := cli.Find(context.Background(), filter3).Count() ast.NoError(err) ast.Equal(int64(2), preCnt) res, err = cli.RemoveAll(context.Background(), filter3) ast.NoError(err) ast.NotNil(res) ast.Equal(preCnt, res.DeletedCount) afterCnt, err := cli.Find(context.Background(), filter3).Count() ast.NoError(err) ast.Equal(int64(0), afterCnt) // filter is nil or wrong BSON Document format res, err = cli.RemoveAll(context.Background(), nil) ast.Error(err) ast.Nil(res) res, err = cli.RemoveAll(context.Background(), 1) ast.Error(err) ast.Nil(res) }
explode_data.jsonl/18380
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 853 }
[ 2830, 3393, 6482, 66843, 2403, 1155, 353, 8840, 836, 8, 341, 88836, 1669, 1373, 7121, 1155, 340, 86448, 1669, 2930, 2959, 445, 1944, 1138, 16867, 21348, 10421, 5378, 19047, 2398, 16867, 21348, 21688, 6482, 5378, 19047, 2398, 86448, 22834, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestKafkaClient_decodeMetadataValueHeader(t *testing.T) { buf := bytes.NewBuffer([]byte("\x00\x08testtype\x00\x00\x00\x01\x00\x0ctestprotocol\x00\x0atestleader")) result, errorAt := decodeMetadataValueHeader(buf) assert.Equalf(t, "testtype", result.ProtocolType, "Expected ProtocolType to be testtype, not %v", result.ProtocolType) assert.Equalf(t, int32(1), result.Generation, "Expected Generation to be 1, not %v", result.Generation) assert.Equalf(t, "testprotocol", result.Protocol, "Expected Protocol to be testprotocol, not %v", result.Protocol) assert.Equalf(t, "testleader", result.Leader, "Expected Leader to be testleader, not %v", result.Leader) assert.Equalf(t, "", errorAt, "Expected decodeMetadataValueHeader to return empty errorAt, not %v", errorAt) }
explode_data.jsonl/34259
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 272 }
[ 2830, 3393, 42, 21883, 2959, 15227, 14610, 1130, 4047, 1155, 353, 8840, 836, 8, 341, 26398, 1669, 5820, 7121, 4095, 10556, 3782, 4921, 87, 15, 15, 3462, 15, 23, 1944, 1313, 3462, 15, 15, 3462, 15, 15, 3462, 15, 15, 3462, 15, 16, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServicesDiff(t *testing.T) { var updatedHTTPDNS = &model.Config{ ConfigMeta: model.ConfigMeta{ GroupVersionKind: collections.IstioNetworkingV1Alpha3Serviceentries.Resource().GroupVersionKind(), Name: "httpDNS", Namespace: "httpDNS", CreationTimestamp: GlobalTime, Labels: map[string]string{label.TLSMode: model.IstioMutualTLSModeLabel}, }, Spec: &networking.ServiceEntry{ Hosts: []string{"*.google.com", "*.mail.com"}, Ports: []*networking.Port{ {Number: 80, Name: "http-port", Protocol: "http"}, {Number: 8080, Name: "http-alt-port", Protocol: "http"}, }, Endpoints: []*networking.WorkloadEntry{ { Address: "us.google.com", Ports: map[string]uint32{"http-port": 7080, "http-alt-port": 18080}, Labels: map[string]string{label.TLSMode: model.IstioMutualTLSModeLabel}, }, { Address: "uk.google.com", Ports: map[string]uint32{"http-port": 1080}, Labels: map[string]string{label.TLSMode: model.IstioMutualTLSModeLabel}, }, { Address: "de.google.com", Labels: map[string]string{"foo": "bar", label.TLSMode: model.IstioMutualTLSModeLabel}, }, }, Location: networking.ServiceEntry_MESH_EXTERNAL, Resolution: networking.ServiceEntry_DNS, }, } var updatedHTTPDNSPort = func() *model.Config { c := updatedHTTPDNS.DeepCopy() se := c.Spec.(*networking.ServiceEntry) var ports []*networking.Port ports = append(ports, se.Ports...) ports = append(ports, &networking.Port{Number: 9090, Name: "http-new-port", Protocol: "http"}) se.Ports = ports return &c }() var updatedEndpoint = func() *model.Config { c := updatedHTTPDNS.DeepCopy() se := c.Spec.(*networking.ServiceEntry) var endpoints []*networking.WorkloadEntry endpoints = append(endpoints, se.Endpoints...) endpoints = append(endpoints, &networking.WorkloadEntry{ Address: "in.google.com", Labels: map[string]string{"foo": "bar", label.TLSMode: model.IstioMutualTLSModeLabel}, }) se.Endpoints = endpoints return &c }() stringsToHosts := func(hosts []string) []host.Name { ret := make([]host.Name, len(hosts)) for i, hostname := range hosts { ret[i] = host.Name(hostname) } return ret } cases := []struct { name string a *model.Config b *model.Config added []host.Name deleted []host.Name updated []host.Name unchanged []host.Name }{ { name: "same config", a: updatedHTTPDNS, b: updatedHTTPDNS, unchanged: stringsToHosts(updatedHTTPDNS.Spec.(*networking.ServiceEntry).Hosts), }, { name: "different config", a: updatedHTTPDNS, b: func() *model.Config { c := updatedHTTPDNS.DeepCopy() c.Name = "httpDNS1" return &c }(), unchanged: stringsToHosts(updatedHTTPDNS.Spec.(*networking.ServiceEntry).Hosts), }, { name: "different resolution", a: updatedHTTPDNS, b: func() *model.Config { c := updatedHTTPDNS.DeepCopy() c.Spec.(*networking.ServiceEntry).Resolution = networking.ServiceEntry_NONE return &c }(), updated: stringsToHosts(updatedHTTPDNS.Spec.(*networking.ServiceEntry).Hosts), }, { name: "config modified with added/deleted host", a: updatedHTTPDNS, b: func() *model.Config { c := updatedHTTPDNS.DeepCopy() se := c.Spec.(*networking.ServiceEntry) se.Hosts = []string{"*.google.com", "host.com"} return &c }(), added: []host.Name{"host.com"}, deleted: []host.Name{"*.mail.com"}, unchanged: []host.Name{"*.google.com"}, }, { name: "config modified with additional port", a: updatedHTTPDNS, b: updatedHTTPDNSPort, updated: stringsToHosts(updatedHTTPDNS.Spec.(*networking.ServiceEntry).Hosts), }, { name: "same config with additional endpoint", a: updatedHTTPDNS, b: updatedEndpoint, unchanged: stringsToHosts(updatedHTTPDNS.Spec.(*networking.ServiceEntry).Hosts), }, } servicesHostnames := func(services []*model.Service) map[host.Name]struct{} { ret := make(map[host.Name]struct{}) for _, svc := range services { ret[svc.Hostname] = struct{}{} } return ret } hostnamesToMap := func(hostnames []host.Name) map[host.Name]struct{} { ret := make(map[host.Name]struct{}) for _, hostname := range hostnames { ret[hostname] = struct{}{} } return ret } for _, tt := range cases { t.Run(tt.name, func(t *testing.T) { as := convertServices(*tt.a) bs := convertServices(*tt.b) added, deleted, updated, unchanged := servicesDiff(as, bs) for i, item := range []struct { hostnames []host.Name services []*model.Service }{ {tt.added, added}, {tt.deleted, deleted}, {tt.updated, updated}, {tt.unchanged, unchanged}, } { if !reflect.DeepEqual(servicesHostnames(item.services), hostnamesToMap(item.hostnames)) { t.Errorf("ServicesChanged %d got %v, want %v", i, servicesHostnames(item.services), hostnamesToMap(item.hostnames)) } } }) } }
explode_data.jsonl/12842
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2170 }
[ 2830, 3393, 11025, 21751, 1155, 353, 8840, 836, 8, 341, 2405, 6049, 9230, 61088, 284, 609, 2528, 10753, 515, 197, 66156, 12175, 25, 1614, 10753, 12175, 515, 298, 197, 2808, 5637, 10629, 25, 220, 15302, 2447, 267, 815, 78007, 53, 16, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_starToGlob(t *testing.T) { type args struct { star string } tests := []struct { name string args args want string }{ // TODO: Add test cases. } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := starToGlob(tt.args.star); got != tt.want { t.Errorf("starToGlob() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/73554
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 168 }
[ 2830, 3393, 31681, 1249, 38, 1684, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 197, 11870, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197, 50780, 914, 198, 197, 59403,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEC2CredentialsBuildSignatureV4(t *testing.T) { opts := ec2tokens.AuthOptions{ Verb: "GET", Path: "/", Headers: map[string]string{ "Host": "localhost", }, Params: map[string]string{ "Action": "foo", "Value": "bar", }, } expected := "6a5febe41427bf601f0ae7c34dbb0fd67094776138b03fb8e65783d733d302a5" date := time.Time{} stringToSign := ec2tokens.EC2CredentialsBuildStringToSignV4(opts, "host", "foo", date) key := ec2tokens.EC2CredentialsBuildSignatureKeyV4("", "", "", date) testhelper.CheckEquals(t, expected, ec2tokens.EC2CredentialsBuildSignatureV4(key, stringToSign)) }
explode_data.jsonl/68589
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 271 }
[ 2830, 3393, 7498, 17, 27025, 11066, 25088, 53, 19, 1155, 353, 8840, 836, 8, 341, 64734, 1669, 11942, 17, 30566, 25233, 3798, 515, 197, 197, 66946, 25, 330, 3806, 756, 197, 69640, 25, 3521, 756, 197, 197, 10574, 25, 2415, 14032, 30953,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCSNegativeLimit(t *testing.T) { for _, st := range testStores { st := st t.Run(st.name, func(t *testing.T) { t.Parallel() defer endTest(t, st) s := startTest(t, st) defer s.Close() limits := DefaultStoreLimits checkLimitError := func() { if err := s.SetLimits(&limits); err == nil { stackFatalf(t, "Setting negative limit should have failed") } } limits.MaxAge, _ = time.ParseDuration("-1.5h") checkLimitError() limits = DefaultStoreLimits limits.MaxBytes = -1000 checkLimitError() limits = DefaultStoreLimits limits.MaxChannels = -1000 checkLimitError() limits = DefaultStoreLimits limits.MaxMsgs = -1000 checkLimitError() limits = DefaultStoreLimits limits.MaxSubscriptions = -1000 checkLimitError() }) } }
explode_data.jsonl/28301
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 329 }
[ 2830, 3393, 6412, 38489, 16527, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 357, 1669, 2088, 1273, 69026, 341, 197, 18388, 1669, 357, 198, 197, 3244, 16708, 5895, 2644, 11, 2915, 1155, 353, 8840, 836, 8, 341, 298, 3244, 41288, 7957, 741...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestLicenseService_CheckLicense(t *testing.T) { services := InitMockEnvironment(t) ls, err := NewLicenseService(services.conf) assert.NoError(t, err) services.license.EXPECT().CheckLicense().Return(nil) err = ls.CheckLicense() assert.NoError(t, err) }
explode_data.jsonl/46593
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 89 }
[ 2830, 3393, 9827, 1860, 28188, 9827, 1155, 353, 8840, 836, 8, 341, 1903, 2161, 1669, 15690, 11571, 12723, 1155, 340, 197, 4730, 11, 1848, 1669, 1532, 9827, 1860, 1141, 2161, 13937, 340, 6948, 35699, 1155, 11, 1848, 692, 1903, 2161, 13, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRequest_MarshalJSON_jsonrpc(t *testing.T) { b, err := json.Marshal(&jsonrpc2.Request{}) if err != nil { t.Fatal(err) } if want := `{"method":"","id":0,"jsonrpc":"2.0"}`; string(b) != want { t.Errorf("got %q, want %q", b, want) } }
explode_data.jsonl/50177
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 113 }
[ 2830, 3393, 1900, 1245, 28423, 5370, 9455, 29414, 1155, 353, 8840, 836, 8, 341, 2233, 11, 1848, 1669, 2951, 37271, 2099, 2236, 29414, 17, 9659, 37790, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 743, 1366, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestWithdrawalRequestsBad(t *testing.T) { _, err := client.WithdrawalRequest(&WithdrawalRequest{ Amount: "5000000", Description: "a test withdrawal request", InternalID: "testd", CallbackURL: "https://example.com/callback", }) if err == nil { t.Errorf(".WithdrawalRequest() should have returned an error") } }
explode_data.jsonl/42389
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 120 }
[ 2830, 3393, 92261, 278, 35295, 17082, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 2943, 26124, 7633, 278, 1900, 2099, 92261, 278, 1900, 515, 197, 197, 10093, 25, 414, 330, 20, 15, 15, 15, 15, 15, 15, 756, 197, 47414, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_formatTime(t *testing.T) { tests := []struct { name string args string wantHour int wantMin int wantSec int wantErr bool }{ { name: "normal", args: "16:18", wantHour: 16, wantMin: 18, wantErr: false, }, { name: "normal_with_second", args: "6:18:01", wantHour: 6, wantMin: 18, wantSec: 1, wantErr: false, }, { name: "not_a_number", args: "e:18", wantHour: 0, wantMin: 0, wantErr: true, }, { name: "out_of_range_hour", args: "25:18", wantHour: 0, wantMin: 0, wantErr: true, }, { name: "out_of_range_minute", args: "23:60", wantHour: 0, wantMin: 0, wantErr: true, }, { name: "wrong_format", args: "19:18:17:17", wantHour: 0, wantMin: 0, wantErr: true, }, { name: "wrong_minute", args: "19:1e", wantHour: 19, wantMin: 0, wantErr: true, }, { name: "wrong_hour", args: "1e:10", wantHour: 11, wantMin: 0, wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { gotHour, gotMin, gotSec, err := formatTime(tt.args) if tt.wantErr { assert.NotEqual(t, nil, err, tt.args) return } assert.Equal(t, tt.wantHour, gotHour, tt.args) assert.Equal(t, tt.wantMin, gotMin, tt.args) if tt.wantSec != 0 { assert.Equal(t, tt.wantSec, gotSec) } else { assert.Zero(t, gotSec) } }) } }
explode_data.jsonl/63498
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 861 }
[ 2830, 3393, 8955, 1462, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 31215, 257, 914, 198, 197, 50780, 30254, 526, 198, 197, 50780, 6217, 220, 526, 198, 197, 50780, 8430, 220, 526, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDelegate(t *testing.T) { cfg, cfgcleanup := cltest.NewConfig(t) t.Cleanup(cfgcleanup) store, cleanup := cltest.NewStoreWithConfig(t, cfg) t.Cleanup(cleanup) vuni := setup(t, store.DB, cfg) vd := vrf.NewDelegate( store.DB, vuni.txm, vuni.ks, vuni.jpv2.Pr, vuni.jpv2.Prm, vuni.lb, vuni.ec, cfg) vs := testspecs.GenerateVRFSpec(testspecs.VRFSpecParams{PublicKey: vuni.vrfkey.String()}) t.Log(vs) jb, err := vrf.ValidatedVRFSpec(vs.Toml()) require.NoError(t, err) require.NoError(t, vuni.jpv2.Jrm.CreateJob(context.Background(), &jb, *pipeline.NewTaskDAG())) vl, err := vd.ServicesForSpec(jb) require.NoError(t, err) require.Len(t, vl, 1) listener := vl[0] unsubscribeAwaiter := cltest.NewAwaiter() unsubscribe := func() { unsubscribeAwaiter.ItHappened() } var logListener log.Listener vuni.lb.On("Register", mock.Anything, mock.Anything).Run(func(args mock.Arguments) { logListener = args.Get(0).(log.Listener) }).Return(unsubscribe) require.NoError(t, listener.Start()) t.Run("valid log", func(t *testing.T) { txHash := cltest.NewHash() reqID := cltest.NewHash() vuni.lb.On("WasAlreadyConsumed", mock.Anything, mock.Anything).Return(false, nil) a := cltest.NewAwaiter() vuni.lb.On("MarkConsumed", mock.Anything, mock.Anything).Run(func(args mock.Arguments) { a.ItHappened() }).Return(nil).Once() // Expect a call to check if the req is already fulfilled. vuni.ec.On("CallContract", mock.Anything, mock.Anything, mock.Anything).Return(generateCallbackReturnValues(t), nil) // Ensure we queue up a valid eth transaction // Linked to requestID vuni.txm.On("CreateEthTransaction", mock.AnythingOfType("*gorm.DB"), vuni.submitter, common.HexToAddress(vs.CoordinatorAddress), mock.Anything, uint64(500000), mock.MatchedBy(func(meta *models.EthTxMetaV2) bool { return meta.JobID > 0 && meta.RequestID == reqID && meta.RequestTxHash == txHash })).Once().Return(models.EthTx{}, nil) // Send a valid log pk, err := secp256k1.NewPublicKeyFromHex(vs.PublicKey) require.NoError(t, err) logListener.HandleLog(log.NewLogBroadcast(types.Log{ // Data has all the NON-indexed parameters Data: append(append(append(append( pk.MustHash().Bytes(), // key hash common.BigToHash(big.NewInt(42)).Bytes()...), // seed cltest.NewHash().Bytes()...), // sender cltest.NewHash().Bytes()...), // fee reqID.Bytes()...), // requestID // JobID is indexed, thats why it lives in the Topics. Topics: []common.Hash{{}, jb.ExternalIDToTopicHash()}, // jobID Address: common.Address{}, BlockNumber: 0, TxHash: txHash, TxIndex: 0, BlockHash: common.Hash{}, Index: 0, Removed: false, })) a.AwaitOrFail(t) // Ensure we created a successful run. runs, err := vuni.jpv2.Prm.GetAllRuns() require.NoError(t, err) require.Equal(t, 1, len(runs)) assert.False(t, runs[0].Errors.HasError()) m, ok := runs[0].Meta.Val.(map[string]interface{}) require.True(t, ok) _, ok = m["eth_tx_id"] assert.True(t, ok) assert.Len(t, runs[0].PipelineTaskRuns, 0) require.NoError(t, store.DB.Exec(`TRUNCATE eth_txes,pipeline_runs CASCADE`).Error) }) t.Run("invalid log", func(t *testing.T) { vuni.lb.On("WasAlreadyConsumed", mock.Anything, mock.Anything).Return(false, nil) a := cltest.NewAwaiter() vuni.lb.On("MarkConsumed", mock.Anything, mock.Anything).Run(func(args mock.Arguments) { a.ItHappened() }).Return(nil).Once() // Expect a call to check if the req is already fulfilled. vuni.ec.On("CallContract", mock.Anything, mock.Anything, mock.Anything).Return(generateCallbackReturnValues(t), nil) // Send a invalid log (keyhash doesnt match) logListener.HandleLog(log.NewLogBroadcast(types.Log{ // Data has all the NON-indexed parameters Data: append(append(append(append( cltest.NewHash().Bytes(), // key hash common.BigToHash(big.NewInt(42)).Bytes()...), // seed cltest.NewHash().Bytes()...), // sender cltest.NewHash().Bytes()...), // fee cltest.NewHash().Bytes()...), // requestID // JobID is indexed, thats why it lives in the Topics. Topics: []common.Hash{{}, jb.ExternalIDToTopicHash()}, // jobID Address: common.Address{}, BlockNumber: 0, TxHash: common.Hash{}, TxIndex: 0, BlockHash: common.Hash{}, Index: 0, Removed: false, })) a.AwaitOrFail(t) // Ensure we have not created a run. runs, err := vuni.jpv2.Prm.GetAllRuns() require.NoError(t, err) require.Equal(t, len(runs), 0) // Ensure we have NOT queued up an eth transaction var ethTxes []models.EthTx err = store.DB.Find(&ethTxes).Error require.NoError(t, err) require.Len(t, ethTxes, 0) }) require.NoError(t, listener.Close()) unsubscribeAwaiter.AwaitOrFail(t, 1*time.Second) vuni.Assert(t) }
explode_data.jsonl/43009
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2034 }
[ 2830, 3393, 9381, 1155, 353, 8840, 836, 8, 341, 50286, 11, 13286, 55235, 1669, 1185, 1944, 7121, 2648, 1155, 340, 3244, 727, 60639, 28272, 55235, 340, 57279, 11, 21290, 1669, 1185, 1944, 7121, 6093, 2354, 2648, 1155, 11, 13286, 340, 324...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConvertEnum(t *testing.T) { lines, err := convertTypes( "Foo", "Bar", `enum ItemState { REQUIRED, OPTIONAL } struct Foo { 1: optional ItemState one 2: required ItemState two } struct Bar { 1: optional ItemState one 2: required ItemState two }`, nil, nil, ) assert.NoError(t, err) assertPrettyEqual(t, trim(` out.One = (*structs.ItemState)(in.One) out.Two = structs.ItemState(in.Two) `), lines) }
explode_data.jsonl/62041
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 200 }
[ 2830, 3393, 12012, 10766, 1155, 353, 8840, 836, 8, 341, 78390, 11, 1848, 1669, 5508, 4173, 1006, 197, 197, 1, 40923, 497, 330, 3428, 756, 197, 197, 63, 9018, 5739, 1397, 341, 298, 34269, 30071, 345, 298, 197, 34057, 969, 198, 197, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrintScale(t *testing.T) { tests := []struct { scale autoscaling.Scale options printers.GenerateOptions expected []metav1.TableRow }{ { scale: autoscaling.Scale{ ObjectMeta: metav1.ObjectMeta{ Name: "test-autoscaling", CreationTimestamp: metav1.Time{Time: time.Now().Add(1.9e9)}, }, Spec: autoscaling.ScaleSpec{Replicas: 2}, Status: autoscaling.ScaleStatus{Replicas: 1}, }, expected: []metav1.TableRow{ { Cells: []interface{}{"test-autoscaling", int64(2), int64(1), string("0s")}, }, }, }, } for i, test := range tests { rows, err := printScale(&test.scale, test.options) if err != nil { t.Fatal(err) } for i := range rows { rows[i].Object.Object = nil } if !reflect.DeepEqual(test.expected, rows) { t.Errorf("%d mismatch: %s", i, diff.ObjectReflectDiff(test.expected, rows)) } } }
explode_data.jsonl/72312
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 406 }
[ 2830, 3393, 8994, 6947, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 70780, 262, 46872, 81552, 51832, 198, 197, 35500, 220, 55953, 57582, 3798, 198, 197, 42400, 3056, 4059, 402, 16, 18257, 3102, 198, 197, 59403, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestRequesting_ExecuteInbound(t *testing.T) { followup, _, err := (&requesting{}).ExecuteInbound(nil, &metaData{}) require.Error(t, err) require.Nil(t, followup) }
explode_data.jsonl/66258
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 1900, 287, 83453, 641, 10891, 1155, 353, 8840, 836, 8, 341, 1166, 1544, 454, 11, 8358, 1848, 1669, 15899, 2035, 287, 6257, 568, 17174, 641, 10891, 27907, 11, 609, 5490, 1043, 37790, 17957, 6141, 1155, 11, 1848, 340, 17957, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestPluginQueriesAndPaths(t *testing.T) { ctx := context.Background() manager, _ := plugins.New(nil, "test-instance-id", inmem.New()) backend := &testPlugin{} manager.Register("test_plugin", backend) config, err := ParseConfig([]byte(`{"plugin": "test_plugin"}`), nil, []string{"test_plugin"}) if err != nil { t.Fatal(err) } plugin := New(config, manager) plugin.Log(ctx, &server.Info{Path: "data.foo"}) plugin.Log(ctx, &server.Info{Path: "data.foo.bar"}) plugin.Log(ctx, &server.Info{Query: "a = data.foo"}) exp := []struct { query string path string }{ // TODO(tsandall): we need to fix how POST /v1/data (and // friends) are represented here. Currently we can't tell the // difference between /v1/data and /v1/data/data. The decision // log event paths should be slash prefixed to avoid ambiguity. // {path: "data"}, {path: "foo"}, {path: "foo/bar"}, {query: "a = data.foo"}, } if len(exp) != len(backend.events) { t.Fatalf("Expected %d events but got %v", len(exp), len(backend.events)) } for i, e := range exp { if e.query != backend.events[i].Query || e.path != backend.events[i].Path { t.Fatalf("Unexpected event %d, want %v but got %v", i, e, backend.events[i]) } } }
explode_data.jsonl/2170
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 476 }
[ 2830, 3393, 11546, 55261, 3036, 26901, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 92272, 11, 716, 1669, 17215, 7121, 27907, 11, 330, 1944, 73655, 12897, 497, 304, 10536, 7121, 12367, 197, 20942, 1669, 609, 1944, 11546, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestPgReadHandshakeStartupMessage(t *testing.T) { for _, tt := range pgValueTests { t.Run(tt.description, func(t *testing.T) { out := new(bytes.Buffer) dumper := &Dumper{ logger: newTestLogger(out), } in := tt.in direction := tt.direction connMetadata := &tt.connMetadata actual, err := dumper.readHandshake(in, direction, connMetadata) if err != nil { t.Errorf("%v", err) } expected := tt.expected if len(actual) != len(expected) { t.Errorf("actual %v\nwant %v", actual, expected) } if len(actual) == 2 { if actual[0] != expected[0] { t.Errorf("actual %v\nwant %v", actual, expected) } if actual[1] != expected[1] { t.Errorf("actual %v\nwant %v", actual, expected) } } }) } }
explode_data.jsonl/10721
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 351 }
[ 2830, 3393, 82540, 4418, 2314, 29661, 39076, 2052, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 17495, 1130, 18200, 341, 197, 3244, 16708, 47152, 13178, 11, 2915, 1155, 353, 8840, 836, 8, 341, 298, 13967, 1669, 501, 2315...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestFieldComments(test *testing.T) { var err error t1, err := parseRDLString(` //type comment type TestStruct Struct { String one; //comment for field 1 String two; //comment for field 2 } `) if err != nil { test.Errorf("cannot parse valid RDL: %v", err) } t2, err := parseRDLString(` //type comment type TestStruct Struct { //comment for field 1 String one; //comment for field 2 String two; } `) type1 := t1.Types[0] type2 := t2.Types[0] if !EquivalentTypes(type1, type2) { test.Errorf("Types don't match: %v, %v", type1, type2) } }
explode_data.jsonl/74356
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 228 }
[ 2830, 3393, 1877, 17373, 8623, 353, 8840, 836, 8, 341, 2405, 1848, 1465, 198, 3244, 16, 11, 1848, 1669, 4715, 49, 16524, 703, 61528, 322, 1313, 3980, 198, 1313, 3393, 9422, 16139, 341, 262, 923, 825, 26, 442, 6182, 369, 2070, 220, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAdapter_GetConfigMap(t *testing.T) { a, _ := NewAdapter(testConfig, IngressAPIVersionNetworking, testIngressFilter, testIngressDefaultSecurityGroup, testSSLPolicy, aws.LoadBalancerTypeApplication, DefaultClusterLocalDomain, false) client := &mockClient{} a.kubeClient = client cm, err := a.GetConfigMap("foo-ns", "foo-name") if err != nil { t.Error(err) } expectedData := map[string]string{"some-key": "key1: val1\nkey2: val2\n"} if !reflect.DeepEqual(cm.Data, expectedData) { t.Fatalf("unexpected ConfigMap data, got %+v, want %+v", cm.Data, expectedData) } client.broken = true _, err = a.GetConfigMap("foo-ns", "foo-name") if err == nil { t.Error("expected an error") } }
explode_data.jsonl/6735
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 262 }
[ 2830, 3393, 5940, 13614, 2648, 2227, 1155, 353, 8840, 836, 8, 341, 11323, 11, 716, 1669, 1532, 5940, 8623, 2648, 11, 758, 2483, 7082, 5637, 78007, 11, 1273, 641, 2483, 5632, 11, 1273, 641, 2483, 3675, 15352, 2808, 11, 1273, 1220, 1256...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestStartCmdWithBlankEnvVar(t *testing.T) { t.Run("test blank host env var", func(t *testing.T) { startCmd := GetStartCmd() err := os.Setenv(hostURLEnvKey, "") require.NoError(t, err) err = startCmd.Execute() require.Error(t, err) require.Equal(t, "ORB_HOST_URL value is empty", err.Error()) }) t.Run("test blank cas url env var", func(t *testing.T) { startCmd := GetStartCmd() err := os.Setenv(hostURLEnvKey, "localhost:8080") require.NoError(t, err) err = os.Setenv(hostMetricsURLEnvKey, "localhost:8081") require.NoError(t, err) err = os.Setenv(vctURLEnvKey, "localhost:8080") require.NoError(t, err) err = os.Setenv(casTypeEnvKey, "") require.NoError(t, err) defer func() { require.NoError(t, os.Unsetenv(hostURLEnvKey)) require.NoError(t, os.Unsetenv(vctURLEnvKey)) require.NoError(t, os.Unsetenv(casTypeEnvKey)) }() err = startCmd.Execute() require.Error(t, err) require.Equal(t, "CAS_TYPE value is empty", err.Error()) }) }
explode_data.jsonl/31124
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 430 }
[ 2830, 3393, 3479, 15613, 2354, 22770, 14359, 3962, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 1944, 10113, 3468, 6105, 762, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 21375, 15613, 1669, 2126, 3479, 15613, 2822, 197, 9859, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSchemaReadUpdateAndFailWrite(t *testing.T) { conn, cleanup, _ := testserver.NewTestServer(require.New(t), 0, memdb.DisableGC, 0, false, testfixtures.EmptyDatastore) t.Cleanup(cleanup) client := v1alpha1.NewSchemaServiceClient(conn) requestedObjectDefNames := []string{"example/user"} // Issue a write to create the schema's namespaces. writeResp, err := client.WriteSchema(context.Background(), &v1alpha1.WriteSchemaRequest{ Schema: `definition example/user {}`, }) require.NoError(t, err) require.Equal(t, requestedObjectDefNames, writeResp.GetObjectDefinitionsNames()) // Read the schema. resp, err := client.ReadSchema(context.Background(), &v1alpha1.ReadSchemaRequest{ ObjectDefinitionsNames: requestedObjectDefNames, }) require.NoError(t, err) // Issue a write with the precondition and ensure it succeeds. updateResp, err := client.WriteSchema(context.Background(), &v1alpha1.WriteSchemaRequest{ Schema: `definition example/user { relation foo1: example/user }`, OptionalDefinitionsRevisionPrecondition: resp.ComputedDefinitionsRevision, }) require.NoError(t, err) // Issue another write out of band to update the namespace. _, err = client.WriteSchema(context.Background(), &v1alpha1.WriteSchemaRequest{ Schema: `definition example/user { relation foo2: example/user }`, }) require.NoError(t, err) // Try to write using the previous revision and ensure it fails. _, err = client.WriteSchema(context.Background(), &v1alpha1.WriteSchemaRequest{ Schema: `definition example/user { relation foo3: example/user }`, OptionalDefinitionsRevisionPrecondition: updateResp.ComputedDefinitionsRevision, }) grpcutil.RequireStatus(t, codes.FailedPrecondition, err) // Read the schema and ensure it did not change. readResp, err := client.ReadSchema(context.Background(), &v1alpha1.ReadSchemaRequest{ ObjectDefinitionsNames: requestedObjectDefNames, }) require.NoError(t, err) require.Contains(t, readResp.ObjectDefinitions[0], "foo2") }
explode_data.jsonl/54549
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 651 }
[ 2830, 3393, 8632, 4418, 4289, 3036, 19524, 7985, 1155, 353, 8840, 836, 8, 341, 32917, 11, 21290, 11, 716, 1669, 1273, 4030, 7121, 2271, 5475, 23482, 7121, 1155, 701, 220, 15, 11, 1833, 1999, 10166, 480, 22863, 11, 220, 15, 11, 895, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCyclicReferences(t *testing.T) { type ObjectDiff struct { FieldCycle *ObjectDiff SliceCycle []*ObjectDiff MapCycle map[*ObjectDiff]*ObjectDiff } instance := &ObjectDiff{ FieldCycle: nil, SliceCycle: nil, MapCycle: nil, } generator := NewGenerator(UseAllExportedFields()) schemaRef, err := generator.GenerateSchemaRef(reflect.TypeOf(instance)) require.NoError(t, err) require.NotNil(t, schemaRef.Value.Properties["FieldCycle"]) require.Equal(t, "#/components/schemas/ObjectDiff", schemaRef.Value.Properties["FieldCycle"].Ref) require.NotNil(t, schemaRef.Value.Properties["SliceCycle"]) require.Equal(t, "array", schemaRef.Value.Properties["SliceCycle"].Value.Type) require.Equal(t, "#/components/schemas/ObjectDiff", schemaRef.Value.Properties["SliceCycle"].Value.Items.Ref) require.NotNil(t, schemaRef.Value.Properties["MapCycle"]) require.Equal(t, "object", schemaRef.Value.Properties["MapCycle"].Value.Type) require.Equal(t, "#/components/schemas/ObjectDiff", schemaRef.Value.Properties["MapCycle"].Value.AdditionalProperties.Ref) }
explode_data.jsonl/55879
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 402 }
[ 2830, 3393, 34, 65304, 31712, 1155, 353, 8840, 836, 8, 341, 13158, 3002, 21751, 2036, 341, 197, 94478, 44820, 353, 1190, 21751, 198, 197, 7568, 4754, 44820, 29838, 1190, 21751, 198, 197, 26873, 44820, 256, 2415, 33836, 1190, 21751, 8465, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSnapshotSaveCommand(t *testing.T) { t.Parallel() a := agent.NewTestAgent(t.Name(), ``) defer a.Shutdown() client := a.Client() ui := cli.NewMockUi() c := New(ui) dir := testutil.TempDir(t, "snapshot") defer os.RemoveAll(dir) file := path.Join(dir, "backup.tgz") args := []string{ "-http-addr=" + a.HTTPAddr(), file, } code := c.Run(args) if code != 0 { t.Fatalf("bad: %d. %#v", code, ui.ErrorWriter.String()) } f, err := os.Open(file) if err != nil { t.Fatalf("err: %v", err) } defer f.Close() if err := client.Snapshot().Restore(nil, f); err != nil { t.Fatalf("err: %v", err) } }
explode_data.jsonl/24878
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 284 }
[ 2830, 3393, 15009, 8784, 4062, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 11323, 1669, 8315, 7121, 2271, 16810, 1155, 2967, 1507, 9902, 340, 16867, 264, 10849, 18452, 741, 25291, 1669, 264, 11716, 2822, 37278, 1669, 21348, 7121...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCloneLeaf(t *testing.T) { leaf1 := &Leaf{1} clone := CloneRefOfLeaf(leaf1) assert.Equal(t, leaf1, clone) leaf1.v = 5 assert.NotEqual(t, leaf1, clone) }
explode_data.jsonl/24686
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 77 }
[ 2830, 3393, 37677, 31461, 1155, 353, 8840, 836, 8, 341, 197, 24153, 16, 1669, 609, 31461, 90, 16, 532, 197, 19982, 1669, 27913, 3945, 2124, 31461, 7, 24153, 16, 340, 6948, 12808, 1155, 11, 15933, 16, 11, 14715, 340, 197, 24153, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestValidateRoute53RecordType(t *testing.T) { validTypes := []string{ "AAAA", "SOA", "A", "TXT", "CNAME", "MX", "NAPTR", "PTR", "SPF", "SRV", "NS", } invalidTypes := []string{ "a", "alias", "SpF", "Txt", "AaAA", } for _, v := range validTypes { _, errors := validateRoute53RecordType(v, "route53_record") if len(errors) != 0 { t.Fatalf("%q should be a valid Route53 record type: %v", v, errors) } } for _, v := range invalidTypes { _, errors := validateRoute53RecordType(v, "route53_record") if len(errors) == 0 { t.Fatalf("%q should not be a valid Route53 record type", v) } } }
explode_data.jsonl/78594
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 299 }
[ 2830, 3393, 17926, 4899, 20, 18, 6471, 929, 1155, 353, 8840, 836, 8, 341, 56322, 4173, 1669, 3056, 917, 515, 197, 197, 1, 25699, 756, 197, 197, 1, 13880, 32, 756, 197, 197, 29133, 756, 197, 197, 1, 62865, 756, 197, 197, 46316, 753...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestClient_ResetUser_validation(t *testing.T) { err := testClient.ResetUserPassword(&ResetUserPasswordInput{ Login: "", }) if err != ErrMissingLogin { t.Errorf("bad error: %s", err) } }
explode_data.jsonl/26707
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 76 }
[ 2830, 3393, 2959, 67771, 1474, 19416, 1155, 353, 8840, 836, 8, 341, 9859, 1669, 1273, 2959, 36660, 1474, 4876, 2099, 14828, 1474, 4876, 2505, 515, 197, 197, 6231, 25, 8324, 197, 3518, 743, 1848, 961, 15495, 25080, 6231, 341, 197, 3244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestTestRunner(t *testing.T) { expected := "testing the test runner" runner := TestRunner{Output: []byte(expected)} output, err := runner.Run("command") assert.NoError(t, err) assert.Equal(t, expected, string(output)) runner = TestRunner{ExitCode: 42} _, err = runner.Run("command") assert.Error(t, err) assert.Equal(t, err.Error(), "exit status 42") runner = TestRunner{ExpectedCommand: []string{"foo", "bar"}} output, err = runner.Run("foo", "bar", "gazonk") assert.Error(t, err) assert.Equal(t, "exit status 127", err.Error()) assert.Equal(t, "expected argv [foo bar], got [foo bar gazonk]", string(output)) }
explode_data.jsonl/28843
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 226 }
[ 2830, 3393, 2271, 19486, 1155, 353, 8840, 836, 8, 341, 42400, 1669, 330, 8840, 279, 1273, 22259, 698, 197, 41736, 1669, 3393, 19486, 90, 5097, 25, 3056, 3782, 15253, 10569, 21170, 11, 1848, 1669, 22259, 16708, 445, 5631, 1138, 6948, 356...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMinimalSurface(t *testing.T) { t.Parallel() for _, size := range [][2]int{ {20, 30}, {30, 30}, {50, 40}, } { f := NewMinimalSurface(size[0], size[1]) x0 := f.InitX() grad := make([]float64, len(x0)) f.Grad(grad, x0) fdGrad := fd.Gradient(nil, f.Func, x0, &fd.Settings{Formula: fd.Central}) // Test that the numerical and analytical gradients agree. dist := floats.Distance(grad, fdGrad, math.Inf(1)) if dist > 1e-9 { t.Errorf("grid %v x %v: numerical and analytical gradient do not match. |fdGrad - grad|_∞ = %v", size[0], size[1], dist) } // Test that the gradient at the minimum is small enough. // In some sense this test is not completely correct because ExactX // returns the exact solution to the continuous problem projected on the // grid, not the exact solution to the discrete problem which we are // solving. This is the reason why a relatively loose tolerance 1e-4 // must be used. xSol := f.ExactX() f.Grad(grad, xSol) norm := floats.Norm(grad, math.Inf(1)) if norm > 1e-4 { t.Errorf("grid %v x %v: gradient at the minimum not small enough. |grad|_∞ = %v", size[0], size[1], norm) } } }
explode_data.jsonl/12134
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 459 }
[ 2830, 3393, 88328, 23697, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 2023, 8358, 1379, 1669, 2088, 508, 1457, 17, 63025, 515, 197, 197, 90, 17, 15, 11, 220, 18, 15, 1583, 197, 197, 90, 18, 15, 11, 220, 18, 15, 1583, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCleanupNSTimeout(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() ecscniClient := NewClient(&Config{}) libcniClient := mock_libcni.NewMockCNI(ctrl) ecscniClient.(*cniClient).libcni = libcniClient // This will be called for both bridge and eni plugin libcniClient.EXPECT().DelNetwork(gomock.Any(), gomock.Any(), gomock.Any()).Do( func(x interface{}, y interface{}, z interface{}) { }).Return(errors.New("timeout")).MaxTimes(3) additionalRoutesJson := `["169.254.172.1/32", "10.11.12.13/32"]` var additionalRoutes []cnitypes.IPNet err := json.Unmarshal([]byte(additionalRoutesJson), &additionalRoutes) assert.NoError(t, err) ctx, cancel := context.WithTimeout(context.TODO(), 1*time.Millisecond) defer cancel() err = ecscniClient.CleanupNS(ctx, &Config{AdditionalLocalRoutes: additionalRoutes}, time.Millisecond) assert.Error(t, err) }
explode_data.jsonl/30400
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 342 }
[ 2830, 3393, 67335, 50479, 545, 411, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 197, 757, 2388, 7751, 2959, 1669, 1532, 2959, 2099, 2648, 37790, 197, 55576, 7751, 295...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBenchmark(t *testing.T) { tests := []struct { msg string n int d time.Duration rps int want int wantDuration time.Duration }{ { msg: "Capped by max requests", n: 100, d: 100 * time.Second, want: 100, }, { msg: "Capped by RPS * duration", d: 500 * time.Millisecond, rps: 120, want: 60, wantDuration: 500 * time.Millisecond, }, { msg: "Capped by duration", d: 500 * time.Millisecond, wantDuration: 500 * time.Millisecond, }, } var requests atomic.Int32 s := newServer(t) defer s.shutdown() s.register(fooMethod, methods.errorIf(func() bool { requests.Inc() return false })) m := benchmarkMethodForTest(t, fooMethod, transport.TChannel) for _, tt := range tests { requests.Store(0) start := time.Now() buf, _, out := getOutput(t) runBenchmark(out, _testLogger, Options{ BOpts: BenchmarkOptions{ MaxRequests: tt.n, MaxDuration: tt.d, RPS: tt.rps, Connections: 50, Concurrency: 2, }, TOpts: s.transportOpts(), }, _resolvedTChannelThrift, fooMethod, m) bufStr := buf.String() assert.Contains(t, bufStr, "Max RPS") assert.NotContains(t, bufStr, "Errors") if tt.want != 0 { assert.EqualValues(t, tt.want, requests.Load(), "%v: Invalid number of requests", tt.msg) } if tt.wantDuration != 0 { // Make sure the total duration is within a delta. slack := testutils.Timeout(500 * time.Millisecond) duration := time.Since(start) assert.True(t, duration <= tt.wantDuration+slack && duration >= tt.wantDuration-slack, "%v: Took %v, wanted duration %v", tt.msg, duration, tt.wantDuration) } } }
explode_data.jsonl/56431
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 831 }
[ 2830, 3393, 84971, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 21169, 688, 914, 198, 197, 9038, 310, 526, 198, 197, 2698, 310, 882, 33795, 198, 197, 7000, 1690, 688, 526, 198, 197, 50780, 260, 526, 198, 197, 507...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCACommon_GenerateECCCertificateRequestFP_Fail(t *testing.T) { _, errCA = CAGenerateECCCertificateRequestFP(&CertRequestFP{ PrivateKeyFilePath: "", CertificateRequestFilePath: filepath.Join(pathcarsapksc1512, caCertificateRequestFileName), SignatureAlgorithm: x509.SHA256WithRSAPSS, Subject: CAMockSubject, }, "PRIVATE KEY") t.Log(errCA) }
explode_data.jsonl/24084
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 168 }
[ 2830, 3393, 5049, 10839, 2646, 13220, 36, 3706, 33202, 1900, 11698, 1400, 604, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 5049, 284, 356, 1890, 13220, 36, 3706, 33202, 1900, 11698, 2099, 36934, 1900, 11698, 515, 197, 197, 75981, 190...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateDeploymentWithReplicas(t *testing.T) { depName := "jonny-dep" replicas := "3" tf := cmdtesting.NewTestFactory().WithNamespace("test") defer tf.Cleanup() ns := scheme.Codecs.WithoutConversion() fakeDiscovery := "{\"kind\":\"APIResourceList\",\"apiVersion\":\"v1\",\"groupVersion\":\"apps/v1\",\"resources\":[{\"name\":\"deployments\",\"singularName\":\"\",\"namespaced\":true,\"kind\":\"Deployment\",\"verbs\":[\"create\",\"delete\",\"deletecollection\",\"get\",\"list\",\"patch\",\"update\",\"watch\"],\"shortNames\":[\"deploy\"],\"categories\":[\"all\"]}]}" tf.Client = &fake.RESTClient{ NegotiatedSerializer: ns, Client: fake.CreateHTTPClient(func(req *http.Request) (*http.Response, error) { return &http.Response{ StatusCode: http.StatusOK, Body: ioutil.NopCloser(bytes.NewBuffer([]byte(fakeDiscovery))), }, nil }), } tf.ClientConfigVal = &restclient.Config{} ioStreams, _, buf, _ := genericclioptions.NewTestIOStreams() cmd := NewCmdCreateDeployment(tf, ioStreams) cmd.Flags().Set("dry-run", "client") cmd.Flags().Set("output", "jsonpath={.spec.replicas}") cmd.Flags().Set("replicas", replicas) cmd.Flags().Set("image", "hollywood/jonny.depp:v2") cmd.Run(cmd, []string{depName}) if buf.String() != replicas { t.Errorf("expected output: %s, but got: %s", replicas, buf.String()) } }
explode_data.jsonl/72191
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 500 }
[ 2830, 3393, 4021, 75286, 2354, 18327, 52210, 1155, 353, 8840, 836, 8, 341, 197, 14891, 675, 1669, 330, 34165, 3834, 6810, 79, 698, 73731, 52210, 1669, 330, 18, 698, 3244, 69, 1669, 5439, 8840, 7121, 2271, 4153, 1005, 2354, 22699, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPilotPlugin(t *testing.T) { s := env.NewTestSetup(env.PilotPluginTLSTest, t) s.EnvoyTemplate = envoyConf grpcServer := grpc.NewServer() lis, err := net.Listen("tcp", fmt.Sprintf(":%d", s.Ports().DiscoveryPort)) if err != nil { t.Fatal(err) } snapshots := cache.NewSnapshotCache(true, mock{}, nil) _ = snapshots.SetSnapshot(id, makeSnapshot(s, t)) server := xds.NewServer(snapshots, nil) discovery.RegisterAggregatedDiscoveryServiceServer(grpcServer, server) go func() { _ = grpcServer.Serve(lis) }() defer grpcServer.GracefulStop() if err := s.SetUp(); err != nil { t.Fatalf("Failed to setup test: %v", err) } defer s.TearDown() s.WaitEnvoyReady() // Issues a GET echo request with 0 size body if _, _, err := env.HTTPGet(fmt.Sprintf("http://localhost:%d/echo", s.Ports().ClientProxyPort)); err != nil { t.Errorf("Failed in request: %v", err) } s.VerifyCheck("http-outbound", checkAttributesOkOutbound) s.VerifyCheck("http-inbound", checkAttributesOkInbound) s.VerifyTwoReports("http", reportAttributesOkOutbound, reportAttributesOkInbound) }
explode_data.jsonl/77841
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 411 }
[ 2830, 3393, 47, 23958, 11546, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 6105, 7121, 2271, 21821, 16978, 1069, 23958, 11546, 13470, 784, 477, 11, 259, 340, 1903, 81214, 2253, 7275, 284, 59530, 15578, 198, 197, 56585, 5475, 1669, 47900, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEnvVarsBasic(t *testing.T) { s := newScaffold(t) defer s.reset() err := s.executeCommand("env", "vars", "dev", "--k8s:kubeconfig=kubeconfig.yaml") require.NoError(t, err) s.assertOutputLineMatch(regexp.MustCompile(`KUBECONFIG='kubeconfig.yaml';`)) s.assertOutputLineMatch(regexp.MustCompile(`KUBE_CLUSTER='dev';`)) s.assertOutputLineMatch(regexp.MustCompile(`KUBE_CONTEXT='dev'`)) s.assertOutputLineMatch(regexp.MustCompile(`KUBE_NAMESPACE='default';`)) s.assertOutputLineMatch(regexp.MustCompile(`export KUBECONFIG KUBE_CLUSTER KUBE_CONTEXT KUBE_NAMESPACE KUBECTL_ARGS`)) }
explode_data.jsonl/244
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 265 }
[ 2830, 3393, 14359, 28305, 15944, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 501, 50, 27864, 1155, 340, 16867, 274, 13857, 741, 9859, 1669, 274, 7769, 4062, 445, 3160, 497, 330, 15380, 497, 330, 3583, 497, 14482, 74, 23, 82, 57071, 3760...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInsertRow(t *testing.T) { xlsx := NewFile() sheet1 := xlsx.GetSheetName(1) r, err := xlsx.workSheetReader(sheet1) assert.NoError(t, err) const ( colCount = 10 rowCount = 10 ) fillCells(xlsx, sheet1, colCount, rowCount) xlsx.SetCellHyperLink(sheet1, "A5", "https://github.com/360EntSecGroup-Skylar/excelize", "External") assert.EqualError(t, xlsx.InsertRow(sheet1, -1), "invalid row number -1") assert.EqualError(t, xlsx.InsertRow(sheet1, 0), "invalid row number 0") assert.NoError(t, xlsx.InsertRow(sheet1, 1)) if !assert.Len(t, r.SheetData.Row, rowCount+1) { t.FailNow() } assert.NoError(t, xlsx.InsertRow(sheet1, 4)) if !assert.Len(t, r.SheetData.Row, rowCount+2) { t.FailNow() } assert.NoError(t, xlsx.SaveAs(filepath.Join("test", "TestInsertRow.xlsx"))) }
explode_data.jsonl/30491
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 353 }
[ 2830, 3393, 13780, 3102, 1155, 353, 8840, 836, 8, 341, 10225, 29017, 1669, 1532, 1703, 741, 1903, 3674, 16, 1669, 856, 29017, 2234, 10541, 675, 7, 16, 340, 7000, 11, 1848, 1669, 856, 29017, 18282, 10541, 5062, 61680, 16, 340, 6948, 35...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestToString(t *testing.T) { // Ensure we don't print the byte content when we // log messages. // Each payload or signature contains '2' so we would've logged // them if not for the overloading of the String() method in SignedGossipMessage // The following line proves that the envelopes constructed in this test // have "2" in them when they are printed assert.Contains(t, fmt.Sprintf("%v", envelopes()[0]), "2") // and the following does the same for payloads: dMsg := &DataMessage{ Payload: &Payload{ SeqNum: 3, Data: []byte{2, 2, 2, 2, 2}, }, } assert.Contains(t, fmt.Sprintf("%v", dMsg), "2") // Now we construct all types of messages that have envelopes or payloads in them // and see that "2" is not outputted into their formatting even though it is found // as a sub-message of the outer message. sMsg := &SignedGossipMessage{ GossipMessage: &GossipMessage{ Tag: GossipMessage_EMPTY, Nonce: 5, Channel: []byte("A"), Content: &GossipMessage_DataMsg{ DataMsg: &DataMessage{ Payload: &Payload{ SeqNum: 3, Data: []byte{2, 2, 2, 2, 2}, }, }, }, }, Envelope: &Envelope{ Payload: []byte{0, 1, 2, 3, 4, 5, 6}, Signature: []byte{0, 1, 2}, SecretEnvelope: &SecretEnvelope{ Payload: []byte{0, 1, 2, 3, 4, 5}, Signature: []byte{0, 1, 2}, }, }, } assert.NotContains(t, fmt.Sprintf("%v", sMsg), "2") sMsg = &SignedGossipMessage{ GossipMessage: &GossipMessage{ Channel: []byte("A"), Tag: GossipMessage_EMPTY, Nonce: 5, Content: &GossipMessage_DataUpdate{ DataUpdate: &DataUpdate{ Nonce: 11, MsgType: PullMsgType_BLOCK_MSG, Data: envelopes(), }, }, }, Envelope: envelopes()[0], } assert.NotContains(t, fmt.Sprintf("%v", sMsg), "2") sMsg = &SignedGossipMessage{ GossipMessage: &GossipMessage{ Channel: []byte("A"), Tag: GossipMessage_EMPTY, Nonce: 5, Content: &GossipMessage_MemRes{ MemRes: &MembershipResponse{ Alive: envelopes(), Dead: envelopes(), }, }, }, Envelope: envelopes()[0], } assert.NotContains(t, fmt.Sprintf("%v", sMsg), "2") sMsg = &SignedGossipMessage{ GossipMessage: &GossipMessage{ Channel: []byte("A"), Tag: GossipMessage_EMPTY, Nonce: 5, Content: &GossipMessage_StateSnapshot{ StateSnapshot: &StateInfoSnapshot{ Elements: envelopes(), }, }, }, Envelope: envelopes()[0], } assert.NotContains(t, fmt.Sprintf("%v", sMsg), "2") sMsg = &SignedGossipMessage{ GossipMessage: &GossipMessage{ Channel: []byte("A"), Tag: GossipMessage_EMPTY, Nonce: 5, Content: &GossipMessage_StateResponse{ StateResponse: &RemoteStateResponse{ Payloads: []*Payload{ {Data: []byte{2, 2, 2}}, }, }, }, }, Envelope: envelopes()[0], } assert.NotContains(t, fmt.Sprintf("%v", sMsg), "2") }
explode_data.jsonl/48937
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1282 }
[ 2830, 3393, 5870, 1155, 353, 8840, 836, 8, 341, 197, 322, 29279, 582, 1513, 944, 1173, 279, 4922, 2213, 979, 582, 198, 197, 322, 1487, 6605, 624, 197, 322, 8886, 7729, 476, 11957, 5610, 364, 17, 6, 773, 582, 1035, 3003, 13726, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSanitizeTeam(t *testing.T) { th := Setup(t) defer th.TearDown() team := &model.Team{ Id: model.NewId(), Email: th.MakeEmail(), InviteId: model.NewId(), AllowedDomains: "example.com", } copyTeam := func() *model.Team { copy := &model.Team{} *copy = *team return copy } t.Run("not a user of the team", func(t *testing.T) { userId := model.NewId() session := model.Session{ Roles: model.SYSTEM_USER_ROLE_ID, TeamMembers: []*model.TeamMember{ { UserId: userId, TeamId: model.NewId(), Roles: model.TEAM_USER_ROLE_ID, }, }, } sanitized := th.App.SanitizeTeam(session, copyTeam()) require.Empty(t, sanitized.Email, "should've sanitized team") require.Empty(t, sanitized.InviteId, "should've sanitized inviteid") }) t.Run("user of the team", func(t *testing.T) { userId := model.NewId() session := model.Session{ Roles: model.SYSTEM_USER_ROLE_ID, TeamMembers: []*model.TeamMember{ { UserId: userId, TeamId: team.Id, Roles: model.TEAM_USER_ROLE_ID, }, }, } sanitized := th.App.SanitizeTeam(session, copyTeam()) require.Empty(t, sanitized.Email, "should've sanitized team") require.NotEmpty(t, sanitized.InviteId, "should have not sanitized inviteid") }) t.Run("team admin", func(t *testing.T) { userId := model.NewId() session := model.Session{ Roles: model.SYSTEM_USER_ROLE_ID, TeamMembers: []*model.TeamMember{ { UserId: userId, TeamId: team.Id, Roles: model.TEAM_USER_ROLE_ID + " " + model.TEAM_ADMIN_ROLE_ID, }, }, } sanitized := th.App.SanitizeTeam(session, copyTeam()) require.NotEmpty(t, sanitized.Email, "shouldn't have sanitized team") require.NotEmpty(t, sanitized.InviteId, "shouldn't have sanitized inviteid") }) t.Run("team admin of another team", func(t *testing.T) { userId := model.NewId() session := model.Session{ Roles: model.SYSTEM_USER_ROLE_ID, TeamMembers: []*model.TeamMember{ { UserId: userId, TeamId: model.NewId(), Roles: model.TEAM_USER_ROLE_ID + " " + model.TEAM_ADMIN_ROLE_ID, }, }, } sanitized := th.App.SanitizeTeam(session, copyTeam()) require.Empty(t, sanitized.Email, "should've sanitized team") require.Empty(t, sanitized.InviteId, "should've sanitized inviteid") }) t.Run("system admin, not a user of team", func(t *testing.T) { userId := model.NewId() session := model.Session{ Roles: model.SYSTEM_USER_ROLE_ID + " " + model.SYSTEM_ADMIN_ROLE_ID, TeamMembers: []*model.TeamMember{ { UserId: userId, TeamId: model.NewId(), Roles: model.TEAM_USER_ROLE_ID, }, }, } sanitized := th.App.SanitizeTeam(session, copyTeam()) require.NotEmpty(t, sanitized.Email, "shouldn't have sanitized team") require.NotEmpty(t, sanitized.InviteId, "shouldn't have sanitized inviteid") }) t.Run("system admin, user of team", func(t *testing.T) { userId := model.NewId() session := model.Session{ Roles: model.SYSTEM_USER_ROLE_ID + " " + model.SYSTEM_ADMIN_ROLE_ID, TeamMembers: []*model.TeamMember{ { UserId: userId, TeamId: team.Id, Roles: model.TEAM_USER_ROLE_ID, }, }, } sanitized := th.App.SanitizeTeam(session, copyTeam()) require.NotEmpty(t, sanitized.Email, "shouldn't have sanitized team") require.NotEmpty(t, sanitized.InviteId, "shouldn't have sanitized inviteid") }) }
explode_data.jsonl/30274
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1505 }
[ 2830, 3393, 23729, 26310, 14597, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 340, 16867, 270, 836, 682, 4454, 2822, 197, 9196, 1669, 609, 2528, 65842, 515, 197, 67211, 25, 1797, 1614, 7121, 764, 3148, 197, 197, 4781, 25, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMaxValue(t *testing.T) { cval := &MaxValue{sel: "db1.table1.amount"} require.Equal(t, "db1.table1.amount", cval.Selector()) require.True(t, cval.ColBounded()) _, err := cval.inferType(nil, nil, "db1", "table1") require.ErrorIs(t, err, ErrUnexpected) err = cval.requiresType(IntegerType, nil, nil, "db1", "table1") require.ErrorIs(t, err, ErrUnexpected) err = cval.updateWith(&Number{val: 10}) require.NoError(t, err) require.Equal(t, IntegerType, cval.Type()) cmp, err := cval.Compare(&Number{val: 10}) require.NoError(t, err) require.Equal(t, 0, cmp) _, err = cval.Compare(&Bool{val: true}) require.Equal(t, ErrNotComparableValues, err) err = cval.updateWith(&Bool{val: true}) require.Equal(t, ErrNotComparableValues, err) err = cval.updateWith(&Number{val: 2}) require.NoError(t, err) cmp, err = cval.Compare(&Number{val: 2}) require.NoError(t, err) require.Equal(t, 1, cmp) cmp, err = cval.Compare(&Number{val: 11}) require.NoError(t, err) require.Equal(t, -1, cmp) // ValueExp sqlt, err := cval.inferType(nil, nil, "db1", "table1") require.NoError(t, err) require.Equal(t, IntegerType, sqlt) err = cval.requiresType(IntegerType, nil, nil, "db1", "table1") require.NoError(t, err) err = cval.requiresType(BooleanType, nil, nil, "db1", "table1") require.ErrorIs(t, err, ErrNotComparableValues) _, err = cval.jointColumnTo(nil, "table1") require.ErrorIs(t, err, ErrUnexpected) _, err = cval.substitute(nil) require.ErrorIs(t, err, ErrUnexpected) _, err = cval.reduce(nil, nil, "db1", "table1") require.ErrorIs(t, err, ErrUnexpected) require.Nil(t, cval.reduceSelectors(nil, "db1", "table1")) require.False(t, cval.isConstant()) require.Nil(t, cval.selectorRanges(nil, nil, nil)) }
explode_data.jsonl/31710
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 725 }
[ 2830, 3393, 5974, 1130, 1155, 353, 8840, 836, 8, 341, 1444, 831, 1669, 609, 5974, 1130, 90, 9507, 25, 330, 1999, 16, 10336, 16, 25276, 16707, 17957, 12808, 1155, 11, 330, 1999, 16, 10336, 16, 25276, 497, 272, 831, 14752, 269, 2398, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMarkResourceNotOwned(t *testing.T) { pa := pa(map[string]string{}) pa.Status.MarkResourceNotOwned("doesn't", "matter") active := pa.Status.GetCondition("Active") if active.Status != corev1.ConditionFalse { t.Errorf("TestMarkResourceNotOwned expected active.Status: False got: %v", active.Status) } if active.Reason != "NotOwned" { t.Errorf("TestMarkResourceNotOwned expected active.Reason: NotOwned got: %v", active.Reason) } }
explode_data.jsonl/27229
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 8949, 4783, 2623, 57641, 1155, 353, 8840, 836, 8, 341, 3223, 64, 1669, 7106, 9147, 14032, 30953, 37790, 3223, 64, 10538, 75888, 4783, 2623, 57641, 445, 71914, 944, 497, 330, 58965, 1138, 74770, 1669, 7106, 10538, 2234, 10547, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_MarshalText(t *testing.T) { r := require.New(t) n := New("mark") b, err := n.MarshalText() r.NoError(err) r.Equal("mark", string(b)) r.NoError((&n).UnmarshalText([]byte("bates"))) r.Equal("bates", n.String()) }
explode_data.jsonl/82650
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 105 }
[ 2830, 3393, 1245, 28423, 1178, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 1373, 7121, 1155, 692, 9038, 1669, 1532, 445, 3987, 1138, 2233, 11, 1848, 1669, 308, 37271, 1178, 741, 7000, 35699, 3964, 340, 7000, 12808, 445, 3987, 497, 914, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServerHTTPPost(t *testing.T) { app := model.App{ API: map[string][]model.API{ "POST": []model.API{model.API{ Endpoint: "/post", Description: "Here is the description", Scenarios: []model.Scenario{ model.Scenario{ Request: model.Request{ Header: map[string][]string{"Accept": []string{"applc"}}, Query: map[string][]string{"id": []string{"one"}}, Payload: model.Payload{ Type: "json", Data: `{"df":"\\w+", "fd": "{{ignore_string}}"}`, }, }, Response: model.Response{ Payload: model.Payload{ Type: "text", Data: "Hello World", }, StatusCode: 201, }, }, model.Scenario{ Request: model.Request{ Header: map[string][]string{"Accept": []string{"application/json"}}, Query: map[string][]string{"id": []string{"oe"}}, Payload: model.Payload{ Type: "json", Data: `{"df":"\\w+", "fd": "{{ignore_string}}"}`, }, }, Response: model.Response{ Payload: model.Payload{ Type: "text", Data: "Hello World", }, StatusCode: 202, }, }, model.Scenario{ Request: model.Request{ Header: map[string][]string{"Accept": []string{"application/json"}}, Query: map[string][]string{"id": []string{"one"}}, Payload: model.Payload{ Type: "json", Data: `{"df":"\\w+", "fd": "{{ignore_string}}"}`, }, }, Response: model.Response{ Payload: model.Payload{ Type: "text", Data: "Hello World", }, StatusCode: 200, Delay: 1, }, }, }, }, }, }, } server := Server{} server.SetWatcher(types.TestWatcher{}) server.SetComparer(comparer.NewRegexComparer()) server.SetApp(app) req := httptest.NewRequest("POST", "/post", bytes.NewReader([]byte(`{"df":"abcd", "fd": "sdfsfs"}`))) req.Header.Set("Accept", "application/json") q := req.URL.Query() q.Add("id", "one") req.URL.RawQuery = q.Encode() w := httptest.NewRecorder() server.ServeHTTP(w, req) resp := w.Result() body, _ := ioutil.ReadAll(resp.Body) if (resp.StatusCode) != 200 { t.Errorf("Status code should be 200 got %v", resp.StatusCode) } if !strings.Contains(string(body), "Hello World") { t.Error("Body not returned as expected") t.Error(string(body)) } }
explode_data.jsonl/17760
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1153 }
[ 2830, 3393, 5475, 9230, 4133, 1155, 353, 8840, 836, 8, 341, 28236, 1669, 1614, 5105, 515, 197, 197, 7082, 25, 2415, 14032, 45725, 2528, 24922, 515, 298, 197, 1, 2946, 788, 3056, 2528, 24922, 90, 2528, 24922, 515, 571, 197, 27380, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestLogAndDebug(t *testing.T) { buf := &bytes.Buffer{} Logger = *log.New(buf, "", 0) Logger.Printf("foo") if buf.String() != "foo\n" { t.Fatalf("want %q, got %q", "foo\n", buf.String()) } buf2 := &bytes.Buffer{} Logger = *log.New(buf2, "", 0) Debug("foo") if buf2.String() != "" { t.Fatalf("expected no log output with debugging disabled, got %q", buf2.String()) } EnableDebug() Debug("foo") if buf2.String() != "foo\n" { t.Fatalf("want %q with debugging enabled, got %q", "foo\n", buf2.String()) } }
explode_data.jsonl/37612
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 224 }
[ 2830, 3393, 2201, 3036, 7939, 1155, 353, 8840, 836, 8, 341, 26398, 1669, 609, 9651, 22622, 16094, 55861, 284, 353, 839, 7121, 10731, 11, 7342, 220, 15, 340, 55861, 19367, 445, 7975, 1138, 743, 6607, 6431, 368, 961, 330, 7975, 1699, 1,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestTxnCoordSenderAddLockOnError(t *testing.T) { defer leaktest.AfterTest(t)() defer log.Scope(t).Close(t) s := createTestDB(t) defer s.Stop() ctx := context.Background() // Create a transaction with intent at "x". key := roachpb.Key("x") txn := kv.NewTxn(ctx, s.DB, 0 /* gatewayNodeID */) tc := txn.Sender().(*TxnCoordSender) // Write so that the coordinator begins tracking this txn. if err := txn.Put(ctx, "x", "y"); err != nil { t.Fatal(err) } { err := txn.CPut(ctx, key, []byte("x"), kvclientutils.StrToCPutExistingValue("born to fail")) if !errors.HasType(err, (*roachpb.ConditionFailedError)(nil)) { t.Fatal(err) } } tc.interceptorAlloc.txnPipeliner.lockFootprint.mergeAndSort() lockSpans := tc.interceptorAlloc.txnPipeliner.lockFootprint.asSlice() expSpans := []roachpb.Span{{Key: key, EndKey: []byte("")}} equal := !reflect.DeepEqual(lockSpans, expSpans) if err := txn.Rollback(ctx); err != nil { t.Fatal(err) } if !equal { t.Fatalf("expected stored locks %v, got %v", expSpans, lockSpans) } }
explode_data.jsonl/76886
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 433 }
[ 2830, 3393, 31584, 77, 19437, 20381, 2212, 11989, 74945, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 16867, 1487, 77940, 1155, 568, 7925, 1155, 340, 1903, 1669, 1855, 2271, 3506, 1155, 340, 16867, 274, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestOpenStaticFileGzip_1(t *testing.T) { file, _ := os.Open(licenseFile) var zipBuf bytes.Buffer fileWriter, _ := gzip.NewWriterLevel(&zipBuf, gzip.BestCompression) io.Copy(fileWriter, file) fileWriter.Close() content, _ := ioutil.ReadAll(&zipBuf) testOpenFile("gzip", content, t) }
explode_data.jsonl/61579
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 5002, 11690, 1703, 38, 9964, 62, 16, 1155, 353, 8840, 836, 8, 341, 17661, 11, 716, 1669, 2643, 12953, 7, 13266, 1703, 340, 2405, 10308, 15064, 5820, 22622, 198, 17661, 6492, 11, 716, 1669, 57795, 7121, 6492, 4449, 2099, 99...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetBundle(t *testing.T) { for _, tt := range []struct { name string err string logMsg string outputMask *types.BundleMask expectLogs []spiretest.LogEntry setBundle bool }{ { name: "Get bundle returns bundle", setBundle: true, expectLogs: []spiretest.LogEntry{ { Level: logrus.InfoLevel, Message: "API accessed", Data: logrus.Fields{ telemetry.Status: "success", telemetry.TrustDomainID: "example.org", telemetry.Type: "audit", }, }, }, }, { name: "Bundle not found", err: `bundle not found`, logMsg: `Bundle not found`, expectLogs: []spiretest.LogEntry{ { Level: logrus.ErrorLevel, Message: "Bundle not found", }, { Level: logrus.InfoLevel, Message: "API accessed", Data: logrus.Fields{ telemetry.Status: "error", telemetry.StatusCode: "NotFound", telemetry.StatusMessage: "bundle not found", telemetry.TrustDomainID: "example.org", telemetry.Type: "audit", }, }, }, }, { name: "Get bundle does not return fields filtered by mask", setBundle: true, outputMask: &types.BundleMask{ RefreshHint: false, SequenceNumber: false, X509Authorities: false, JwtAuthorities: false, }, expectLogs: []spiretest.LogEntry{ { Level: logrus.InfoLevel, Message: "API accessed", Data: logrus.Fields{ telemetry.Status: "success", telemetry.TrustDomainID: "example.org", telemetry.Type: "audit", }, }, }, }, } { tt := tt t.Run(tt.name, func(t *testing.T) { test := setupServiceTest(t) defer test.Cleanup() bundle := makeValidCommonBundle(t, serverTrustDomain) if tt.setBundle { test.setBundle(t, bundle) } b, err := test.client.GetBundle(context.Background(), &bundlev1.GetBundleRequest{ OutputMask: tt.outputMask, }) spiretest.AssertLogs(t, test.logHook.AllEntries(), tt.expectLogs) if tt.err != "" { require.Nil(t, b) require.Error(t, err) require.Contains(t, err.Error(), tt.err) return } require.NoError(t, err) require.NotNil(t, b) assertCommonBundleWithMask(t, bundle, b, tt.outputMask) }) } }
explode_data.jsonl/45886
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1135 }
[ 2830, 3393, 1949, 8409, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17853, 1669, 2088, 3056, 1235, 341, 197, 11609, 981, 914, 198, 197, 9859, 286, 914, 198, 197, 6725, 6611, 257, 914, 198, 197, 21170, 12686, 353, 9242, 14757, 12686, 198...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestReportsTypeErrors(t *testing.T) { for _, file := range []string{ "err1.go", "err2.go", "err3.go", "issue7757.go", "issue8442.go", "issue11097a.go", "issue11097b.go", "issue13129.go", "issue13423.go", "issue13467.go", "issue13635.go", "issue13830.go", "issue16116.go", "issue16591.go", "issue18452.go", "issue18889.go", "issue26745.go", "issue28721.go", } { check(t, file) } if sizeofLongDouble(t) > 8 { for _, file := range []string{ "err4.go", "issue28069.go", } { check(t, file) } } }
explode_data.jsonl/5828
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 289 }
[ 2830, 3393, 23748, 929, 13877, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1034, 1669, 2088, 3056, 917, 515, 197, 197, 1, 615, 16, 18002, 756, 197, 197, 1, 615, 17, 18002, 756, 197, 197, 1, 615, 18, 18002, 756, 197, 197, 1, 11159, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestChaincodeInfoProvider(t *testing.T) { InitializeTestEnv() defer CleanupTestEnv() gb, _ := test.MakeGenesisBlock("ledger1") CreateLedger(gb) mockDeployedCCInfoProvider := &mock.DeployedChaincodeInfoProvider{} mockDeployedCCInfoProvider.ChaincodeInfoStub = func(ccName string, qe ledger.SimpleQueryExecutor) (*ledger.DeployedChaincodeInfo, error) { return constructTestCCInfo(ccName, ccName, ccName), nil } ccInfoProvider := &chaincodeInfoProviderImpl{ platforms.NewRegistry(&golang.Platform{}), mockDeployedCCInfoProvider, } _, err := ccInfoProvider.GetDeployedChaincodeInfo("ledger2", constructTestCCDef("cc2", "1.0", "cc2Hash")) t.Logf("Expected error received = %s", err) assert.Error(t, err) ccInfo, err := ccInfoProvider.GetDeployedChaincodeInfo("ledger1", constructTestCCDef("cc1", "non-matching-version", "cc1")) assert.NoError(t, err) assert.Nil(t, ccInfo) ccInfo, err = ccInfoProvider.GetDeployedChaincodeInfo("ledger1", constructTestCCDef("cc1", "cc1", "non-matching-hash")) assert.NoError(t, err) assert.Nil(t, ccInfo) ccInfo, err = ccInfoProvider.GetDeployedChaincodeInfo("ledger1", constructTestCCDef("cc1", "cc1", "cc1")) assert.NoError(t, err) assert.Equal(t, constructTestCCInfo("cc1", "cc1", "cc1"), ccInfo) }
explode_data.jsonl/77991
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 465 }
[ 2830, 3393, 18837, 1851, 1731, 5179, 1155, 353, 8840, 836, 8, 341, 93904, 2271, 14359, 741, 16867, 53512, 2271, 14359, 741, 3174, 65, 11, 716, 1669, 1273, 50133, 84652, 4713, 445, 50704, 16, 1138, 75569, 60850, 1389, 3268, 65, 692, 7733...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestColDecimal128Array(t *testing.T) { const rows = 50 data := NewArrDecimal128() for i := 0; i < rows; i++ { data.AppendDecimal128([]Decimal128{ Decimal128FromInt(i), Decimal128FromInt(i + 1), Decimal128FromInt(i + 2), }) } var buf Buffer data.EncodeColumn(&buf) t.Run("Golden", func(t *testing.T) { gold.Bytes(t, buf.Buf, "col_arr_decimal128") }) t.Run("Ok", func(t *testing.T) { br := bytes.NewReader(buf.Buf) r := NewReader(br) dec := NewArrDecimal128() require.NoError(t, dec.DecodeColumn(r, rows)) require.Equal(t, data, dec) require.Equal(t, rows, dec.Rows()) dec.Reset() require.Equal(t, 0, dec.Rows()) require.Equal(t, ColumnTypeDecimal128.Array(), dec.Type()) }) t.Run("ErrUnexpectedEOF", func(t *testing.T) { r := NewReader(bytes.NewReader(nil)) dec := NewArrDecimal128() require.ErrorIs(t, dec.DecodeColumn(r, rows), io.ErrUnexpectedEOF) }) }
explode_data.jsonl/68544
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 405 }
[ 2830, 3393, 6127, 11269, 16, 17, 23, 1857, 1155, 353, 8840, 836, 8, 341, 4777, 6978, 284, 220, 20, 15, 198, 8924, 1669, 1532, 8838, 11269, 16, 17, 23, 741, 2023, 600, 1669, 220, 15, 26, 600, 366, 6978, 26, 600, 1027, 341, 197, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestShouldCheckPasswordArgon2idHashedWithAuthelia(t *testing.T) { password := testPassword hash, err := HashPassword(password, "", HashingAlgorithmArgon2id, schema.DefaultCIPasswordConfiguration.Iterations, schema.DefaultCIPasswordConfiguration.Memory*1024, schema.DefaultCIPasswordConfiguration.Parallelism, schema.DefaultCIPasswordConfiguration.KeyLength, schema.DefaultCIPasswordConfiguration.SaltLength) assert.NoError(t, err) equal, err := CheckPassword(password, hash) require.NoError(t, err) assert.True(t, equal) }
explode_data.jsonl/40195
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 168 }
[ 2830, 3393, 14996, 3973, 4876, 2735, 263, 17, 307, 6370, 291, 2354, 5087, 35929, 1155, 353, 8840, 836, 8, 341, 58199, 1669, 1273, 4876, 198, 50333, 11, 1848, 1669, 6531, 4876, 22768, 11, 7342, 6531, 287, 27847, 2735, 263, 17, 307, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAlias(t *testing.T) { fmt.Println("===== Alias =====") var a = Alias(NewStatement("SELECT tt.id, tt.name FROM test_table AS tt WHERE tt.id=?", 100), "a") fmt.Println(a.ToSQL()) }
explode_data.jsonl/1258
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 80 }
[ 2830, 3393, 22720, 1155, 353, 8840, 836, 8, 341, 11009, 12419, 445, 46725, 58040, 30742, 1138, 2405, 264, 284, 58040, 35063, 8636, 445, 4858, 17853, 1764, 11, 17853, 2644, 4295, 1273, 5237, 5752, 17853, 5288, 17853, 1764, 87873, 220, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestOpen_ErrVersionMismatch(t *testing.T) { if pageSize != os.Getpagesize() { t.Skip("page size mismatch") } // Create empty database. db := MustOpenDB() path := db.Path() defer db.MustClose() // Close database. if err := db.DB.Close(); err != nil { t.Fatal(err) } // Read data file. buf, err := ioutil.ReadFile(path) if err != nil { t.Fatal(err) } // Rewrite meta pages. meta0 := (*meta)(unsafe.Pointer(&buf[pageHeaderSize])) meta0.version++ meta1 := (*meta)(unsafe.Pointer(&buf[pageSize+pageHeaderSize])) meta1.version++ if err := ioutil.WriteFile(path, buf, 0666); err != nil { t.Fatal(err) } // Reopen data file. if _, err := bolt.Open(path, 0666, nil); err != bolt.ErrVersionMismatch { t.Fatalf("unexpected error: %s", err) } }
explode_data.jsonl/27461
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 315 }
[ 2830, 3393, 5002, 93623, 5637, 82572, 1155, 353, 8840, 836, 8, 341, 743, 22635, 961, 2643, 2234, 84917, 368, 341, 197, 3244, 57776, 445, 2893, 1379, 35301, 1138, 197, 630, 197, 322, 4230, 4287, 4625, 624, 20939, 1669, 15465, 5002, 3506,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestFromMap(t *testing.T) { cases := []struct { in map[string]string expected List }{ {map[string]string{}, *new(List)}, {nil, *new(List)}, { map[string]string{"Martin": "martin@example.com"}, List{Address{Name: "Martin", Address: "martin@example.com"}}, }, //{ // map[string]string{"Martin": "martin@example.com", "foo": "bar@example.com"}, // List{ // Address{Name: "Martin", Address: "martin@example.com"}, // Address{Name: "foo", Address: "bar@example.com"}, // }, //}, } for _, tc := range cases { t.Run(fmt.Sprintf("%#v", tc.in), func(t *testing.T) { got := FromMap(tc.in) if !reflect.DeepEqual(tc.expected, got) { t.Errorf(diff.Cmp(tc.expected, got)) } }) } }
explode_data.jsonl/70344
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 332 }
[ 2830, 3393, 3830, 2227, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 17430, 981, 2415, 14032, 30953, 198, 197, 42400, 1759, 198, 197, 59403, 197, 197, 90, 2186, 14032, 30953, 22655, 353, 931, 10278, 39781, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCreateAssetTxSyntacticVerifyInitialStates(t *testing.T) { tx, c, ctx := validCreateAssetTx(t) tx.States = []*InitialState{} if err := tx.SyntacticVerify(ctx, c, assetID, 0, 0, 1); err == nil { t.Fatal("CreateAssetTx should have failed syntactic verification due to no Initial States") } tx.States = []*InitialState{ { FxIndex: 5, // Invalid FxIndex Outs: []verify.State{ &secp256k1fx.TransferOutput{ Amt: 12345, OutputOwners: secp256k1fx.OutputOwners{ Threshold: 1, Addrs: []ids.ShortID{keys[0].PublicKey().Address()}, }, }, }, }, } // NumFxs is 1, so FxIndex 5 should cause an error if err := tx.SyntacticVerify(ctx, c, assetID, 0, 0, 1); err == nil { t.Fatal("CreateAssetTx should have failed syntactic verification due to invalid Fx") } uniqueStates := []*InitialState{ { FxIndex: 0, Outs: []verify.State{ &secp256k1fx.TransferOutput{ Amt: 12345, OutputOwners: secp256k1fx.OutputOwners{ Threshold: 1, Addrs: []ids.ShortID{keys[0].PublicKey().Address()}, }, }, }, }, { FxIndex: 1, Outs: []verify.State{ &secp256k1fx.TransferOutput{ Amt: 12345, OutputOwners: secp256k1fx.OutputOwners{ Threshold: 1, Addrs: []ids.ShortID{keys[0].PublicKey().Address()}, }, }, }, }, { FxIndex: 2, Outs: []verify.State{ &secp256k1fx.TransferOutput{ Amt: 12345, OutputOwners: secp256k1fx.OutputOwners{ Threshold: 1, Addrs: []ids.ShortID{keys[0].PublicKey().Address()}, }, }, }, }, } sortInitialStates(uniqueStates) // Put states in unsorted order tx.States = []*InitialState{ uniqueStates[2], uniqueStates[0], } if err := tx.SyntacticVerify(ctx, c, assetID, 0, 0, 3); err == nil { t.Fatal("CreateAssetTx should have failed syntactic verification due to non-sorted initial states") } tx.States = []*InitialState{ uniqueStates[0], uniqueStates[0], } if err := tx.SyntacticVerify(ctx, c, assetID, 0, 0, 3); err == nil { t.Fatal("CreateAssetTx should have failed syntactic verification due to non-unique initial states") } }
explode_data.jsonl/73233
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 967 }
[ 2830, 3393, 4021, 16604, 31584, 34667, 406, 23170, 32627, 6341, 23256, 1155, 353, 8840, 836, 8, 341, 46237, 11, 272, 11, 5635, 1669, 2697, 4021, 16604, 31584, 1155, 692, 46237, 7758, 973, 284, 29838, 80425, 16094, 743, 1848, 1669, 9854, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestPatch(t *testing.T) { tcs := []struct { resource string name string namespace string patch []byte want *unstructured.Unstructured path string }{ { resource: "rtest", name: "normal_patch", path: "/apis/gtest/vtest/rtest/normal_patch", patch: getJSON("gtest/vTest", "rTest", "normal_patch"), want: getObject("gtest/vTest", "rTest", "normal_patch"), }, { resource: "rtest", name: "namespaced_patch", namespace: "nstest", path: "/apis/gtest/vtest/namespaces/nstest/rtest/namespaced_patch", patch: getJSON("gtest/vTest", "rTest", "namespaced_patch"), want: getObject("gtest/vTest", "rTest", "namespaced_patch"), }, { resource: "rtest/srtest", name: "normal_subresource_patch", path: "/apis/gtest/vtest/rtest/normal_subresource_patch/srtest", patch: getJSON("gtest/vTest", "srTest", "normal_subresource_patch"), want: getObject("gtest/vTest", "srTest", "normal_subresource_patch"), }, { resource: "rtest/srtest", name: "namespaced_subresource_patch", namespace: "nstest", path: "/apis/gtest/vtest/namespaces/nstest/rtest/namespaced_subresource_patch/srtest", patch: getJSON("gtest/vTest", "srTest", "namespaced_subresource_patch"), want: getObject("gtest/vTest", "srTest", "namespaced_subresource_patch"), }, } for _, tc := range tcs { gv := &schema.GroupVersion{Group: "gtest", Version: "vtest"} resource := &metav1.APIResource{Name: tc.resource, Namespaced: len(tc.namespace) != 0} cl, srv, err := getClientServer(gv, func(w http.ResponseWriter, r *http.Request) { if r.Method != "PATCH" { t.Errorf("Patch(%q) got HTTP method %s. wanted PATCH", tc.name, r.Method) } if r.URL.Path != tc.path { t.Errorf("Patch(%q) got path %s. wanted %s", tc.name, r.URL.Path, tc.path) } content := r.Header.Get("Content-Type") if content != string(types.StrategicMergePatchType) { t.Errorf("Patch(%q) got Content-Type %s. wanted %s", tc.name, content, types.StrategicMergePatchType) } data, err := ioutil.ReadAll(r.Body) if err != nil { t.Errorf("Patch(%q) unexpected error reading body: %v", tc.name, err) w.WriteHeader(http.StatusInternalServerError) return } w.Header().Set("Content-Type", "application/json") w.Write(data) }) if err != nil { t.Errorf("unexpected error when creating client: %v", err) continue } defer srv.Close() got, err := cl.Resource(resource, tc.namespace).Patch(tc.name, types.StrategicMergePatchType, tc.patch) if err != nil { t.Errorf("unexpected error when patching %q: %v", tc.name, err) continue } if !reflect.DeepEqual(got, tc.want) { t.Errorf("Patch(%q) want: %v\ngot: %v", tc.name, tc.want, got) } } }
explode_data.jsonl/37547
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1238 }
[ 2830, 3393, 43622, 1155, 353, 8840, 836, 8, 341, 3244, 4837, 1669, 3056, 1235, 341, 197, 50346, 220, 914, 198, 197, 11609, 414, 914, 198, 197, 56623, 914, 198, 197, 3223, 754, 257, 3056, 3782, 198, 197, 50780, 414, 353, 359, 51143, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestLoginOK(t *testing.T) { testServer(func(s *core.Server) { headers := make(map[string]string) userpass := base64.StdEncoding.EncodeToString([]byte("badr.adnaan@gmail.com:mypassword")) headers["Authorization"] = "Basic " + userpass //headers["X-Access-Token"] = accessToken //make request client := &http.Client{Transport: &http.Transport{DisableKeepAlives: true}} r, _ := http.NewRequest("POST", fmt.Sprintf("%s%s", host, "/api/v1/developers/login/"), nil) for key, value := range headers { r.Header.Add(key, value) } res, err := client.Do(r) //res, err := testHTTP("GET", "/api/v1/developers/login/", headers) if err != nil { t.Fatalf("login failed! %v", err) } else { body, _ := ioutil.ReadAll(res.Body) if res.StatusCode != 200 { t.Fatalf("unable to login: %v", string(body)) } } }) }
explode_data.jsonl/42209
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 345 }
[ 2830, 3393, 6231, 3925, 1155, 353, 8840, 836, 8, 341, 18185, 5475, 18552, 1141, 353, 2153, 22997, 8, 1476, 197, 67378, 1669, 1281, 9147, 14032, 30953, 340, 197, 19060, 6385, 1669, 2331, 21, 19, 36086, 14690, 50217, 5870, 10556, 3782, 44...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_waitForPodStatus(t *testing.T) { t.Run("successfully errors on cancelled context", func(t *testing.T) { k := &KubeClient{ cli: kfake.NewSimpleClientset(), instanceID: "test", } podSpec := &v1.Pod{ ObjectMeta: metav1.ObjectMeta{Name: "test", Namespace: defaultNamespace}, Spec: v1.PodSpec{ Containers: []v1.Container{ {Name: "test-pod", Image: "containous/whoami"}, }, }, } ctx, cancel := context.WithCancel(context.TODO()) cancel() err := k.waitForPodStatus(ctx, v1.PodRunning, podSpec) if err != context.Canceled { t.Errorf("waitForPodStatus should throw context cancellation error; err=%s", err) } }) t.Run("successfully errors on timeout", func(t *testing.T) { k := &KubeClient{ cli: kfake.NewSimpleClientset(), instanceID: "test", } podSpec := &v1.Pod{ ObjectMeta: metav1.ObjectMeta{Name: "test", Namespace: defaultNamespace}, Spec: v1.PodSpec{ Containers: []v1.Container{ {Name: "test-pod", Image: "containous/whoami"}, }, }, } pod, err := k.cli.CoreV1().Pods(defaultNamespace).Create(context.Background(), podSpec, metav1.CreateOptions{}) if err != nil { t.Errorf("failed to create pod; err=%s", err) } defer k.cli.CoreV1().Pods(defaultNamespace).Delete(context.Background(), pod.Name, metav1.DeleteOptions{}) ctx, cancelFunc := context.WithTimeout(context.TODO(), 0*time.Second) defer cancelFunc() err = k.waitForPodStatus(ctx, v1.PodRunning, podSpec) if err != context.DeadlineExceeded { t.Errorf("waitForPodStatus should throw deadline exceeded error; err=%s", err) } }) }
explode_data.jsonl/20521
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 665 }
[ 2830, 3393, 18760, 2461, 23527, 2522, 1155, 353, 8840, 836, 8, 1476, 3244, 16708, 445, 60505, 5975, 389, 25681, 2266, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 16463, 1669, 609, 42, 3760, 2959, 515, 298, 86448, 25, 286, 595, 30570...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUpdateTicker(t *testing.T) { t.Parallel() cp := currency.NewPairWithDelimiter(currency.BTC.String(), currency.USDT.String(), "/") _, err := f.UpdateTicker(context.Background(), cp, asset.Spot) if err != nil { t.Error(err) } }
explode_data.jsonl/15218
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 92 }
[ 2830, 3393, 4289, 87278, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 52018, 1669, 11413, 7121, 12443, 2354, 91098, 90475, 1785, 7749, 6431, 1507, 11413, 67672, 10599, 6431, 1507, 3521, 1138, 197, 6878, 1848, 1669, 282, 16689, 87...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestAbsCollection_Contains(t *testing.T) { intColl := NewIntCollection([]int{1, 2, 2, 3}) if intColl.Contains(1) != true { t.Fatal("contain 错误1") } if intColl.Contains(5) != false { t.Fatal("contain 错误2") } }
explode_data.jsonl/66455
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 106 }
[ 2830, 3393, 27778, 6482, 62, 23805, 1155, 353, 8840, 836, 8, 341, 2084, 15265, 1669, 1532, 1072, 6482, 10556, 396, 90, 16, 11, 220, 17, 11, 220, 17, 11, 220, 18, 3518, 743, 526, 15265, 11545, 7, 16, 8, 961, 830, 341, 197, 3244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestConvertToInt(t *testing.T) { tif := NewToIntFilter() resp, err := tif.Process([]string{"1", "2", "3"}) if err != nil { t.Fatal(err) } expected := []int{1, 2, 3} if !reflect.DeepEqual(expected, resp) { t.Fatalf("The expected is %v, the actual is %v", expected, resp) } }
explode_data.jsonl/74918
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 123 }
[ 2830, 3393, 12012, 38544, 1155, 353, 8840, 836, 8, 341, 3244, 333, 1669, 1532, 38544, 5632, 741, 34653, 11, 1848, 1669, 259, 333, 29012, 10556, 917, 4913, 16, 497, 330, 17, 497, 330, 18, 23625, 743, 1848, 961, 2092, 341, 197, 3244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBasicUse(t *testing.T) { db, err := sql.Open("oci8", "C##STUDENT/123456@127.0.0.1:1521/ORCL") if err != nil { t.Errorf("Open sql Error") } col1Val := "a" col2Val := 123 col3Val := 12.33 col4Val := []byte{1, 2, 3} // INSERT for i := 0; i < 3; i++ { if i != 0 { _, err = db.Exec("insert into test ( col1, col2, col3, col4 ) values ( :1, :2, :3, :4)", col1Val+fmt.Sprint(i), col2Val+i, col3Val+float64(i), col4Val) } else { _, err = db.Exec("insert into test ( col1, col2, col3, col4 ) values ( :1, :2, :3, :4)", col1Val, col2Val+i, col3Val+float64(i), col4Val) } if err != nil { t.Errorf("INSERT error") } } // DELETE _, err = db.Exec("DELETE FROM TEST WHERE \"COL1\" = :1 AND \"COL2\" = :2", col1Val+fmt.Sprint(1), col2Val+1) if err != nil { t.Errorf("DELETE error") } // UPDATE _, err = db.Exec("UPDATE TEST SET COL1 = :1 ,col2 = :2 WHERE col1 = :3", col1Val, col2Val, col1Val+fmt.Sprint(2)) if err != nil { t.Errorf("UPDATE error") } db.Close() }
explode_data.jsonl/75856
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 478 }
[ 2830, 3393, 15944, 10253, 1155, 353, 8840, 836, 8, 341, 20939, 11, 1848, 1669, 5704, 12953, 445, 2119, 23, 497, 330, 34, 565, 784, 80950, 14, 16, 17, 18, 19, 20, 21, 31, 16, 17, 22, 13, 15, 13, 15, 13, 16, 25, 16, 20, 17, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestActor(t *testing.T) { req := NewInvokeMethodRequest("test_method") req.WithActor("testActor", "1") assert.Equal(t, "testActor", req.Actor().GetActorType()) assert.Equal(t, "1", req.Actor().GetActorId()) }
explode_data.jsonl/46243
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 81 }
[ 2830, 3393, 18870, 1155, 353, 8840, 836, 8, 341, 24395, 1669, 1532, 17604, 3523, 1900, 445, 1944, 9032, 1138, 24395, 26124, 18870, 445, 1944, 18870, 497, 330, 16, 1138, 6948, 12808, 1155, 11, 330, 1944, 18870, 497, 4232, 76094, 1005, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1