text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestNewJsonValidator(t *testing.T) { testCases := []struct { draft string valid bool }{ { "draft-07", true, }, { "draft-06", false, }, { "", false, }, } t.Log("Given the need to test creation of new JsonValidator") { for index, testCase := range testCases { t.Logf("\tTest %d: When trying to create a JsonValidator with %s", index, testCase.draft) { if testCase.valid { if _, err := jsonvalidator.NewJsonValidator(testCase.draft); err != nil { t.Errorf("\t%s\tShould be able to get a reference to a JsonValidator: %v", failed, err) } else { t.Logf("\t%s\tShould be able to get a reference to a JsonValidator", succeed) } } else { if _, err := jsonvalidator.NewJsonValidator(testCase.draft); err == nil { t.Errorf("\t%s\tShould not be able to get a reference to a JsonValidator", failed) } else { t.Logf("\t%s\tShould not be able to get a reference to a JsonValidator: %v", succeed, err) } } } } } }
explode_data.jsonl/6712
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 502 }
[ 2830, 3393, 3564, 5014, 14256, 1155, 353, 8840, 836, 8, 972, 18185, 37302, 1669, 3056, 1235, 972, 197, 2698, 2944, 914, 319, 197, 56322, 1807, 319, 197, 92, 1666, 197, 197, 1666, 298, 197, 44917, 2944, 12, 15, 22, 4723, 298, 42808, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestReadGeneric(t *testing.T) { // first create archive, that we will be able to read updateTestDir, _ := ioutil.TempDir("", "update") defer os.RemoveAll(updateTestDir) archive, err := WriteRootfsImageArchive(updateTestDir, RootfsImageStructOK) assert.NoError(t, err) assert.NotEqual(t, "", archive) // open archive file f, err := os.Open(archive) defer f.Close() assert.NoError(t, err) assert.NotNil(t, f) aReader := NewReader(f) _, err = aReader.Read() assert.NoError(t, err) // WriteRootfsImageArchive() uses `vexpress` as artifact devices_type_compatible f.Seek(0, 0) _, err = aReader.ReadCompatibleWithDevice("non-existing") assert.Error(t, err) f.Seek(0, 0) _, err = aReader.ReadCompatibleWithDevice("vexpress") assert.NoError(t, err) }
explode_data.jsonl/35298
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 294 }
[ 2830, 3393, 4418, 19964, 1155, 353, 8840, 836, 8, 341, 197, 322, 1156, 1855, 18132, 11, 429, 582, 686, 387, 2952, 311, 1349, 198, 27175, 2271, 6184, 11, 716, 1669, 43144, 65009, 6184, 19814, 330, 2386, 1138, 16867, 2643, 84427, 31540, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGet(t *testing.T) { var want = "Iceland" var got = Get("IS") if got != want { t.Errorf("Expected to get %v, got %v instead", want, got) } }
explode_data.jsonl/52091
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 65 }
[ 2830, 3393, 1949, 1155, 353, 8840, 836, 8, 341, 2405, 1366, 284, 330, 40, 35216, 698, 2405, 2684, 284, 2126, 445, 1637, 5130, 743, 2684, 961, 1366, 341, 197, 3244, 13080, 445, 18896, 311, 633, 1018, 85, 11, 2684, 1018, 85, 4518, 497...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestSizeReflectPutReflect_invalid(t *testing.T) { b := make([]byte, 8) var m map[string]int assert.Equal(t, -1, lex.Size(m)) lex.Reflect(b, m) lex.PutReflect(b, m) }
explode_data.jsonl/50174
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 82 }
[ 2830, 3393, 1695, 72789, 19103, 72789, 31433, 1155, 353, 8840, 836, 8, 341, 2233, 1669, 1281, 10556, 3782, 11, 220, 23, 340, 2405, 296, 2415, 14032, 63025, 198, 6948, 12808, 1155, 11, 481, 16, 11, 22429, 2465, 1255, 1171, 197, 2571, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestClusterScope_GetVCN(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() vcnClient := mock_vcn.NewMockClient(mockCtrl) tags := make(map[string]string) tags["CreatedBy"] = "OCIClusterAPIProvider" tags["ClusterUUID"] = "a" vcnClient.EXPECT().ListVcns(gomock.Any(), gomock.Eq(core.ListVcnsRequest{ CompartmentId: common.String("bar"), DisplayName: common.String("foo"), })).Return( core.ListVcnsResponse{ Items: []core.Vcn{ { FreeformTags: tags, Id: common.String("vcn_id"), }, }}, nil) vcnClient.EXPECT().ListVcns(gomock.Any(), gomock.Eq(core.ListVcnsRequest{ CompartmentId: common.String("bar"), DisplayName: common.String("not_found"), })).Return( core.ListVcnsResponse{ Items: []core.Vcn{ { Id: common.String("vcn_id"), }, }}, nil) vcnClient.EXPECT().ListVcns(gomock.Any(), gomock.Eq(core.ListVcnsRequest{ CompartmentId: common.String("bar"), DisplayName: common.String("error"), })).Return( core.ListVcnsResponse{}, errors.New("some error")) vcnClient.EXPECT().GetVcn(gomock.Any(), gomock.Eq(core.GetVcnRequest{ VcnId: common.String("not_managed"), })). Return(core.GetVcnResponse{ Vcn: core.Vcn{ Id: common.String("not_managed"), }, }, nil) tests := []struct { name string spec infrastructurev1beta1.OCIClusterSpec want *core.Vcn expectedError string wantErr bool }{ { name: "vcn id not present in spec find by name successful", spec: infrastructurev1beta1.OCIClusterSpec{ CompartmentId: "bar", NetworkSpec: infrastructurev1beta1.NetworkSpec{ Vcn: infrastructurev1beta1.VCN{ Name: "foo", }, }, }, want: &core.Vcn{ Id: common.String("vcn_id"), FreeformTags: tags, }, wantErr: false, }, { name: "vcn id not present in spec find by name error", spec: infrastructurev1beta1.OCIClusterSpec{ CompartmentId: "bar", NetworkSpec: infrastructurev1beta1.NetworkSpec{ Vcn: infrastructurev1beta1.VCN{ Name: "error", }, }, }, wantErr: true, expectedError: "failed to list vcn by name: some error", }, { name: "vcn id not present in spec not found by name", spec: infrastructurev1beta1.OCIClusterSpec{ CompartmentId: "bar", NetworkSpec: infrastructurev1beta1.NetworkSpec{ Vcn: infrastructurev1beta1.VCN{ Name: "not_found", }, }, }, wantErr: false, }, { name: "vcn id not present in spec but not managed by clusterapi", spec: infrastructurev1beta1.OCIClusterSpec{ NetworkSpec: infrastructurev1beta1.NetworkSpec{ Vcn: infrastructurev1beta1.VCN{ ID: common.String("not_managed"), }, }, }, wantErr: true, expectedError: "cluster api tags have been modified out of context", }, } l := log.FromContext(context.Background()) for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { ociCluster := infrastructurev1beta1.OCICluster{ Spec: tt.spec, ObjectMeta: metav1.ObjectMeta{ UID: "a", }, } s := &ClusterScope{ VCNClient: vcnClient, OCICluster: &ociCluster, Cluster: &clusterv1.Cluster{ ObjectMeta: metav1.ObjectMeta{ UID: "a", }, }, Logger: &l, } got, err := s.GetVCN(context.Background()) if (err != nil) != tt.wantErr { t.Errorf("GetVCN() error = %v, wantErr %v", err, tt.wantErr) return } if !reflect.DeepEqual(got, tt.want) { t.Errorf("GetVCN() got = %v, want %v", got, tt.want) } if err != nil { if err.Error() != tt.expectedError { t.Errorf("GetVCN() expected error = %s, actual error %s", tt.expectedError, err.Error()) } } }) } }
explode_data.jsonl/52632
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1720 }
[ 2830, 3393, 28678, 10803, 13614, 11287, 45, 1155, 353, 8840, 836, 8, 341, 77333, 15001, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 7860, 15001, 991, 18176, 741, 5195, 14271, 2959, 1669, 7860, 2273, 14271, 7121, 11571, 2959, 30389...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestProcessProvisions(t *testing.T) { ctx, _, keeper := createTestInput(t, false, 0) params := defaultParams() keeper.setParams(ctx, params) pool := keeper.GetPool(ctx) // create some candidates some bonded, some unbonded candidates := make([]Candidate, 10) for i := 0; i < 10; i++ { c := Candidate{ Status: Unbonded, PubKey: pks[i], Address: addrs[i], Assets: sdk.NewRat(0), Liabilities: sdk.NewRat(0), } if i < 5 { c.Status = Bonded } mintedTokens := int64((i + 1) * 10000000) pool.TotalSupply += mintedTokens pool, c, _ = pool.candidateAddTokens(c, mintedTokens) keeper.setCandidate(ctx, c) candidates[i] = c } keeper.setPool(ctx, pool) var totalSupply int64 = 550000000 var bondedShares int64 = 150000000 var unbondedShares int64 = 400000000 assert.Equal(t, totalSupply, pool.TotalSupply) assert.Equal(t, bondedShares, pool.BondedPool) assert.Equal(t, unbondedShares, pool.UnbondedPool) // initial bonded ratio ~ 27% assert.True(t, pool.bondedRatio().Equal(sdk.NewRat(bondedShares, totalSupply)), "%v", pool.bondedRatio()) // test the value of candidate shares assert.True(t, pool.bondedShareExRate().Equal(sdk.OneRat()), "%v", pool.bondedShareExRate()) initialSupply := pool.TotalSupply initialUnbonded := pool.TotalSupply - pool.BondedPool // process the provisions a year for hr := 0; hr < 8766; hr++ { pool := keeper.GetPool(ctx) expInflation := keeper.nextInflation(ctx).Round(1000000000) expProvisions := (expInflation.Mul(sdk.NewRat(pool.TotalSupply)).Quo(hrsPerYrRat)).Evaluate() startBondedPool := pool.BondedPool startTotalSupply := pool.TotalSupply pool = keeper.processProvisions(ctx) keeper.setPool(ctx, pool) //fmt.Printf("hr %v, startBondedPool %v, expProvisions %v, pool.BondedPool %v\n", hr, startBondedPool, expProvisions, pool.BondedPool) require.Equal(t, startBondedPool+expProvisions, pool.BondedPool, "hr %v", hr) require.Equal(t, startTotalSupply+expProvisions, pool.TotalSupply) } pool = keeper.GetPool(ctx) assert.NotEqual(t, initialSupply, pool.TotalSupply) assert.Equal(t, initialUnbonded, pool.UnbondedPool) //panic(fmt.Sprintf("debug total %v, bonded %v, diff %v\n", p.TotalSupply, p.BondedPool, pool.TotalSupply-pool.BondedPool)) // initial bonded ratio ~ from 27% to 40% increase for bonded holders ownership of total supply assert.True(t, pool.bondedRatio().Equal(sdk.NewRat(211813022, 611813022)), "%v", pool.bondedRatio()) // global supply assert.Equal(t, int64(611813022), pool.TotalSupply) assert.Equal(t, int64(211813022), pool.BondedPool) assert.Equal(t, unbondedShares, pool.UnbondedPool) // test the value of candidate shares assert.True(t, pool.bondedShareExRate().Mul(sdk.NewRat(bondedShares)).Equal(sdk.NewRat(211813022)), "%v", pool.bondedShareExRate()) }
explode_data.jsonl/12957
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1121 }
[ 2830, 3393, 7423, 1336, 40015, 1155, 353, 8840, 836, 8, 341, 20985, 11, 8358, 53416, 1669, 1855, 2271, 2505, 1155, 11, 895, 11, 220, 15, 340, 25856, 1669, 1638, 4870, 741, 197, 18861, 980, 4870, 7502, 11, 3628, 340, 85273, 1669, 53416...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRemoveProcessedUploadsWithoutBundleFile(t *testing.T) { bundleDir := testRoot(t) ids := []int{1, 2, 3, 4, 5} for _, id := range []int{1, 3, 5} { path := filepath.Join(bundleDir, "dbs", fmt.Sprintf("%d.lsif.db", id)) if err := makeFile(path, time.Now().Local()); err != nil { t.Fatalf("unexpected error creating file %s: %s", path, err) } } mockDB := dbmocks.NewMockDB() mockDB.GetDumpIDsFunc.SetDefaultReturn(ids, nil) j := &Janitor{ db: mockDB, bundleDir: bundleDir, metrics: NewJanitorMetrics(metrics.TestRegisterer), } if err := j.removeProcessedUploadsWithoutBundleFile(); err != nil { t.Fatalf("unexpected error removing processed uploads without bundle files: %s", err) } if len(mockDB.DeleteUploadByIDFunc.History()) != 2 { t.Errorf("unexpected number of DeleteUploadByID calls. want=%d have=%d", 2, len(mockDB.DeleteUploadByIDFunc.History())) } else { ids := []int{ mockDB.DeleteUploadByIDFunc.History()[0].Arg1, mockDB.DeleteUploadByIDFunc.History()[1].Arg1, } sort.Ints(ids) if diff := cmp.Diff([]int{2, 4}, ids); diff != "" { t.Errorf("unexpected dump ids (-want +got):\n%s", diff) } } }
explode_data.jsonl/19684
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 491 }
[ 2830, 3393, 13021, 82535, 13844, 16056, 411, 8409, 1703, 1155, 353, 8840, 836, 8, 341, 2233, 4206, 6184, 1669, 1273, 8439, 1155, 340, 197, 3365, 1669, 3056, 396, 90, 16, 11, 220, 17, 11, 220, 18, 11, 220, 19, 11, 220, 20, 630, 202...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestClientExtractHostname(t *testing.T) { tests := []struct { Scheme string Address string Output string }{ { Scheme: "http", Address: "", Output: "", }, { Scheme: "https", Address: "abc", Output: "", }, { Scheme: "http", Address: "127.0.0.1:19200", Output: "http://127.0.0.1:19200", }, { Scheme: "https", Address: "127.0.0.1:9200", Output: "https://127.0.0.1:9200", }, { Scheme: "http", Address: "myelk.local/10.1.0.24:9200", Output: "http://10.1.0.24:9200", }, } client, err := NewClient(SetSniff(false), SetHealthcheck(false)) if err != nil { t.Fatal(err) } for _, test := range tests { got := client.extractHostname(test.Scheme, test.Address) if want := test.Output; want != got { t.Errorf("expected %q; got: %q", want, got) } } }
explode_data.jsonl/38009
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 416 }
[ 2830, 3393, 2959, 28959, 88839, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 7568, 8058, 220, 914, 198, 197, 98090, 914, 198, 197, 80487, 220, 914, 198, 197, 59403, 197, 197, 515, 298, 7568, 8058, 25, 220, 330, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestStub(t *testing.T) { type test struct { name string mock func() *http.Request handler http.HandlerFunc expect string } cases := []test{ { name: "add stub simple", mock: func() *http.Request { payload := `{ "service": "Testing", "method":"TestMethod", "input":{ "equals":{ "Hola":"Mundo" } }, "output":{ "delay": "1s", "data":{ "Hello":"World" } } }` read := bytes.NewReader([]byte(payload)) return httptest.NewRequest("POST", "/add", read) }, handler: addStub, expect: `Success add stub`, }, { name: "list stub", mock: func() *http.Request { return httptest.NewRequest("GET", "/", nil) }, handler: listStub, expect: "{\"Testing\":{\"TestMethod\":[{\"Input\":{\"equals\":{\"Hola\":\"Mundo\"},\"contains\":null,\"matches\":null},\"Output\":{\"delay\":\"1s\",\"data\":{\"Hello\":\"World\"},\"error\":\"\"}}]}}\n", }, { name: "find stub equals", mock: func() *http.Request { payload := `{"service":"Testing","method":"TestMethod","data":{"Hola":"Mundo"}}` return httptest.NewRequest("POST", "/find", bytes.NewReader([]byte(payload))) }, handler: handleFindStub, expect: "{\"delay\":\"1s\",\"data\":{\"Hello\":\"World\"},\"error\":\"\"}\n", }, { name: "add stub contains", mock: func() *http.Request { payload := `{ "service": "Testing", "method":"TestMethod", "input":{ "contains":{ "field1":"hello field1", "field3":"hello field3" } }, "output":{ "data":{ "hello":"world" } } }` return httptest.NewRequest("POST", "/add", bytes.NewReader([]byte(payload))) }, handler: addStub, expect: `Success add stub`, }, { name: "find stub contains", mock: func() *http.Request { payload := `{ "service":"Testing", "method":"TestMethod", "data":{ "field1":"hello field1", "field2":"hello field2", "field3":"hello field3" } }` return httptest.NewRequest("GET", "/find", bytes.NewReader([]byte(payload))) }, handler: handleFindStub, expect: "{\"delay\":\"0s\",\"data\":{\"hello\":\"world\"},\"error\":\"\"}\n", }, { name: "add stub matches regex", mock: func() *http.Request { payload := `{ "service":"Testing2", "method":"TestMethod", "input":{ "matches":{ "field1":".*ello$" } }, "output":{ "data":{ "reply":"OK" } } }` return httptest.NewRequest("POST", "/add", bytes.NewReader([]byte(payload))) }, handler: addStub, expect: "Success add stub", }, { name: "find stub matches regex", mock: func() *http.Request { payload := `{ "service":"Testing2", "method":"TestMethod", "data":{ "field1":"hello" } }` return httptest.NewRequest("GET", "/find", bytes.NewReader([]byte(payload))) }, handler: handleFindStub, expect: "{\"delay\":\"0s\",\"data\":{\"reply\":\"OK\"},\"error\":\"\"}\n", }, { name: "error find stub contains", mock: func() *http.Request { payload := `{ "service":"Testing", "method":"TestMethod", "data":{ "field1":"hello field1", "field2":"hello field2", "field3":"hello field4" } }` return httptest.NewRequest("GET", "/find", bytes.NewReader([]byte(payload))) }, handler: handleFindStub, expect: "Can't find stub \n\nService: Testing \n\nMethod: TestMethod \n\nInput\n\n{\n\tfield1: hello field1\n\tfield2: hello field2\n\tfield3: hello field4\n}\n\nClosest Match \n\ncontains:{\n\tfield1: hello field1\n\tfield3: hello field3\n}", }, { name: "error find stub equals", mock: func() *http.Request { payload := `{"service":"Testing","method":"TestMethod","data":{"Hola":"Dunia"}}` return httptest.NewRequest("POST", "/find", bytes.NewReader([]byte(payload))) }, handler: handleFindStub, expect: "Can't find stub \n\nService: Testing \n\nMethod: TestMethod \n\nInput\n\n{\n\tHola: Dunia\n}\n\nClosest Match \n\nequals:{\n\tHola: Mundo\n}", }, } for _, v := range cases { t.Run(v.name, func(t *testing.T) { wrt := httptest.NewRecorder() req := v.mock() v.handler(wrt, req) res, err := ioutil.ReadAll(wrt.Result().Body) assert.NoError(t, err) assert.Equal(t, v.expect, string(res)) }) } }
explode_data.jsonl/22751
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2203 }
[ 2830, 3393, 33838, 1155, 353, 8840, 836, 8, 341, 13158, 1273, 2036, 341, 197, 11609, 262, 914, 198, 197, 77333, 262, 2915, 368, 353, 1254, 9659, 198, 197, 53326, 1758, 89164, 198, 197, 24952, 220, 914, 198, 197, 630, 1444, 2264, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRenderWithDHCP(t *testing.T) { g := NewGomegaWithT(t) crd := DHCPConfig.DeepCopy() config := &crd.Spec FillDefaults(config, nil) objs, err := RenderMultus(config, manifestDir) g.Expect(err).NotTo(HaveOccurred()) g.Expect(objs).To(ContainElement(HaveKubernetesID("DaemonSet", "openshift-multus", "dhcp-daemon"))) }
explode_data.jsonl/16734
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 133 }
[ 2830, 3393, 6750, 2354, 51326, 7123, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1532, 38, 32696, 2354, 51, 1155, 692, 1444, 6498, 1669, 57587, 2648, 55602, 12106, 741, 25873, 1669, 609, 5082, 67, 36473, 198, 12727, 483, 16273, 8754, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBroadcastMessageBadInput(t *testing.T) { bm, cancel := newTestBroadcast(t) defer cancel() mdi := bm.database.(*databasemocks.Plugin) mdm := bm.data.(*datamocks.Manager) mbi := bm.blockchain.(*blockchainmocks.Plugin) ctx := context.Background() mbi.On("VerifyIdentitySyntax", ctx, mock.Anything).Return("0x12345", nil) rag := mdi.On("RunAsGroup", ctx, mock.Anything) rag.RunFn = func(a mock.Arguments) { var fn = a[1].(func(context.Context) error) rag.ReturnArguments = mock.Arguments{fn(a[0].(context.Context))} } mdm.On("ResolveInlineDataBroadcast", ctx, "ns1", mock.Anything).Return(nil, nil, fmt.Errorf("pop")) _, err := bm.BroadcastMessage(ctx, "ns1", &fftypes.MessageInOut{ InlineData: fftypes.InlineData{ {Value: fftypes.Byteable(`{"hello": "world"}`)}, }, }, false) assert.EqualError(t, err, "pop") mdi.AssertExpectations(t) mdm.AssertExpectations(t) }
explode_data.jsonl/12106
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 369 }
[ 2830, 3393, 43362, 2052, 17082, 2505, 1155, 353, 8840, 836, 8, 341, 2233, 76, 11, 9121, 1669, 501, 2271, 43362, 1155, 340, 16867, 9121, 741, 2109, 8579, 1669, 34868, 15062, 41399, 67, 2096, 300, 336, 25183, 64378, 340, 2109, 13849, 1669...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetQueueNeverExisted(t *testing.T) { serv, client := setUp(t) defer tearDown(t, serv) getQueueRequest := taskspb.GetQueueRequest{ Name: "hello_q", } gettedQueue, err := client.GetQueue(context.Background(), &getQueueRequest) assert.Nil(t, gettedQueue) st, _ := status.FromError(err) assert.Equal(t, codes.NotFound, st.Code()) }
explode_data.jsonl/72419
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 131 }
[ 2830, 3393, 1949, 7554, 26155, 840, 13236, 1155, 353, 8840, 836, 8, 341, 1903, 648, 11, 2943, 1669, 18620, 1155, 340, 16867, 32825, 1155, 11, 4853, 692, 10366, 7554, 1900, 1669, 3383, 43467, 2234, 7554, 1900, 515, 197, 21297, 25, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGenericStatementPlainType(t *testing.T) { a, errs := ParseString(` x = new List<string>() `) bvmUtils.AssertNow(t, len(errs) == 0, errs.Format()) bvmUtils.AssertNow(t, a != nil, "nil scope") bvmUtils.AssertLength(t, len(a.Sequence), 1) }
explode_data.jsonl/49769
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 113 }
[ 2830, 3393, 19964, 8636, 26982, 929, 1155, 353, 8840, 836, 8, 341, 11323, 11, 70817, 1669, 14775, 703, 61528, 197, 10225, 284, 501, 1759, 4947, 18949, 197, 24183, 2233, 7338, 4209, 11711, 7039, 1155, 11, 2422, 3964, 82, 8, 621, 220, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCommandHelp(t *testing.T) { var result string f := func(c *Command, args *Args) { result = args.FirstParam() } c := &Command{Usage: "foo", Run: f} args := NewArgs([]string{"foo", "-h"}) c.Call(args) assert.Equal(t, "", result) }
explode_data.jsonl/60588
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 95 }
[ 2830, 3393, 4062, 12689, 1155, 353, 8840, 836, 8, 341, 2405, 1102, 914, 198, 1166, 1669, 2915, 1337, 353, 4062, 11, 2827, 353, 4117, 8, 314, 1102, 284, 2827, 15926, 2001, 368, 456, 1444, 1669, 609, 4062, 90, 14783, 25, 330, 7975, 49...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestApplyAndReturnValidatorSetUpdatesInserted(t *testing.T) { app, ctx, addrs, _ := bootstrapValidatorTest(t, 1000, 20) powers := []int64{10, 20, 5, 15, 25} var validators [5]types.Validator for i, power := range powers { validators[i] = types.NewValidator(sdk.ValAddress(addrs[i]), PKs[i], types.Description{}) tokens := sdk.TokensFromConsensusPower(power) validators[i], _ = validators[i].AddTokensFromDel(tokens) } validators[0] = keeper.TestingUpdateValidator(app.StakingKeeper, ctx, validators[0], false) validators[1] = keeper.TestingUpdateValidator(app.StakingKeeper, ctx, validators[1], false) require.Equal(t, 2, len(app.StakingKeeper.ApplyAndReturnValidatorSetUpdates(ctx))) // test validtor added at the beginning // tendermintUpdate set: {} -> {c0} app.StakingKeeper.SetValidator(ctx, validators[2]) app.StakingKeeper.SetValidatorByPowerIndex(ctx, validators[2]) updates := app.StakingKeeper.ApplyAndReturnValidatorSetUpdates(ctx) validators[2], _ = app.StakingKeeper.GetValidator(ctx, validators[2].OperatorAddress) require.Equal(t, 1, len(updates)) require.Equal(t, validators[2].ABCIValidatorUpdate(), updates[0]) // test validtor added at the beginning // tendermintUpdate set: {} -> {c0} app.StakingKeeper.SetValidator(ctx, validators[3]) app.StakingKeeper.SetValidatorByPowerIndex(ctx, validators[3]) updates = app.StakingKeeper.ApplyAndReturnValidatorSetUpdates(ctx) validators[3], _ = app.StakingKeeper.GetValidator(ctx, validators[3].OperatorAddress) require.Equal(t, 1, len(updates)) require.Equal(t, validators[3].ABCIValidatorUpdate(), updates[0]) // test validtor added at the end // tendermintUpdate set: {} -> {c0} app.StakingKeeper.SetValidator(ctx, validators[4]) app.StakingKeeper.SetValidatorByPowerIndex(ctx, validators[4]) updates = app.StakingKeeper.ApplyAndReturnValidatorSetUpdates(ctx) validators[4], _ = app.StakingKeeper.GetValidator(ctx, validators[4].OperatorAddress) require.Equal(t, 1, len(updates)) require.Equal(t, validators[4].ABCIValidatorUpdate(), updates[0]) }
explode_data.jsonl/6106
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 748 }
[ 2830, 3393, 28497, 3036, 5598, 14256, 1649, 37091, 91269, 1155, 353, 8840, 836, 8, 341, 28236, 11, 5635, 11, 912, 5428, 11, 716, 1669, 26925, 14256, 2271, 1155, 11, 220, 16, 15, 15, 15, 11, 220, 17, 15, 692, 3223, 15965, 1669, 3056,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestWhileLoopWithAssignment(t *testing.T) { testStr := `<? while ($var = mysql_assoc()) { echo $var; }` p := NewParser() p.disableScoping = true p.Debug = true p.MaxErrors = 0 a, _ := p.Parse("test.php", testStr) if len(a.Nodes) == 0 { t.Fatalf("While loop did not correctly parse") } tree := &ast.WhileStmt{ Termination: ast.AssignmentExpr{ Assignee: ast.NewVariable("var"), Value: &ast.FunctionCallExpr{ FunctionName: &ast.Identifier{Value: "mysql_assoc"}, Arguments: make([]ast.Expr, 0), }, Operator: "=", }, LoopBlock: &ast.Block{ Statements: []ast.Statement{ ast.Echo(ast.NewVariable("var")), }, }, } if !assertEquals(a.Nodes[0], tree) { t.Fatalf("While loop with assignment did not correctly parse") } }
explode_data.jsonl/28445
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 336 }
[ 2830, 3393, 7983, 14620, 2354, 41613, 1155, 353, 8840, 836, 8, 341, 18185, 2580, 1669, 1565, 53075, 220, 1393, 1711, 947, 284, 10564, 18891, 2140, 341, 262, 1687, 400, 947, 280, 220, 335, 3989, 3223, 1669, 1532, 6570, 741, 3223, 42628, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDirectModeHandler_nonDIRECT_MODE(t *testing.T) { invalidModes := []signingtypes.SignMode{ signingtypes.SignMode_SIGN_MODE_TEXTUAL, signingtypes.SignMode_SIGN_MODE_LEGACY_AMINO_JSON, signingtypes.SignMode_SIGN_MODE_UNSPECIFIED, } for _, invalidMode := range invalidModes { t.Run(invalidMode.String(), func(t *testing.T) { var dh signModeDirectHandler var signingData signing.SignerData _, err := dh.GetSignBytes(invalidMode, signingData, nil) require.Error(t, err) wantErr := fmt.Errorf("expected %s, got %s", signingtypes.SignMode_SIGN_MODE_DIRECT, invalidMode) require.Equal(t, err, wantErr) }) } }
explode_data.jsonl/38086
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 250 }
[ 2830, 3393, 16027, 3636, 3050, 21637, 57747, 8414, 1155, 353, 8840, 836, 8, 341, 197, 11808, 70035, 1669, 3056, 7752, 287, 9242, 41152, 3636, 515, 197, 69054, 287, 9242, 41152, 3636, 36727, 8414, 10243, 16383, 345, 197, 69054, 287, 9242, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHttpClient_GetConn(t *testing.T) { defer func() { if err := recover(); err != nil { log.Error(err) } }() c := CreateHttpClient(2, 30, 15).GetConn() res, err := c.Get("https://sssxx.com/") if err != nil { log.Error(err.Error()) } defer res.Body.Close() log.Info(res.StatusCode) }
explode_data.jsonl/70025
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 133 }
[ 2830, 3393, 26316, 13614, 9701, 1155, 353, 8840, 836, 8, 341, 16867, 2915, 368, 341, 197, 743, 1848, 1669, 11731, 2129, 1848, 961, 2092, 341, 298, 6725, 6141, 3964, 340, 197, 197, 532, 197, 66816, 1444, 1669, 4230, 26316, 7, 17, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMessagesWithStdin(t *testing.T) { stdin := &bytes.Buffer{} writer := gio.NewDelimitedWriter(stdin) gherkin := `Feature: Minimal Scenario: a Given a Scenario: b Given b ` wrapper := &messages.Envelope{ Message: &messages.Envelope_Source{ Source: &messages.Source{ Uri: "features/test.feature", Data: gherkin, Media: &messages.Media{ Encoding: messages.Media_UTF8, ContentType: "text/x.cucumber.gherkin+plain", }, }, }, } writer.WriteMsg(wrapper) writer.WriteMsg(wrapper) wrappers, err := Messages( nil, stdin, "en", true, true, true, nil, false, ) if err != nil { t.Error(err) } if len(wrappers) != 8 { t.Fatalf("%d != %d", len(wrappers), 8) } }
explode_data.jsonl/54896
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 347 }
[ 2830, 3393, 15820, 2354, 22748, 258, 1155, 353, 8840, 836, 8, 341, 6736, 258, 1669, 609, 9651, 22622, 16094, 38959, 1669, 53043, 7121, 16532, 31511, 6492, 69567, 692, 197, 866, 261, 7989, 1669, 1565, 13859, 25, 75112, 271, 220, 58663, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMapProxy_ContainsValueWithNilValue(t *testing.T) { _, err := mp.ContainsValue(nil) AssertErrorNotNil(t, err, "containsValue did not return an error for nil value") mp.Clear() }
explode_data.jsonl/56979
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 2227, 16219, 62, 23805, 1130, 2354, 19064, 1130, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 10490, 11545, 1130, 27907, 340, 18017, 1454, 96144, 1155, 11, 1848, 11, 330, 13372, 1130, 1521, 537, 470, 458, 1465, 369, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestNewScalarBaseMultOne(t *testing.T) { // Should be base Point curve := btcec.S256() num := big.NewInt(1) p, err := NewScalarBaseMult(curve, num) if err != nil { t.Errorf("NewScalarBaseMult failed: %v", err) } if p == nil { t.Errorf("NewScalarBaseMult failed when it should've succeeded.") t.FailNow() } if !bytes.Equal(p.Bytes(), append(curve.Gx.Bytes(), curve.Gy.Bytes()...)) { t.Errorf("NewScalarBaseMult should've returned the base Point.") } }
explode_data.jsonl/75659
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 194 }
[ 2830, 3393, 3564, 20639, 3978, 40404, 3966, 1155, 353, 8840, 836, 8, 341, 197, 322, 12260, 387, 2331, 5126, 198, 33209, 586, 1669, 19592, 68955, 808, 17, 20, 21, 741, 22431, 1669, 2409, 7121, 1072, 7, 16, 340, 3223, 11, 1848, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestTransportRetryAfterGOAWAY(t *testing.T) { var dialer struct { sync.Mutex count int } ct1 := make(chan *clientTester) ct2 := make(chan *clientTester) ln := newLocalListener(t) defer ln.Close() tr := &Transport{ TLSClientConfig: tlsConfigInsecure, } tr.DialTLS = func(network, addr string, cfg *tls.Config) (net.Conn, error) { dialer.Lock() defer dialer.Unlock() dialer.count++ if dialer.count == 3 { return nil, errors.New("unexpected number of dials") } cc, err := net.Dial("tcp", ln.Addr().String()) if err != nil { return nil, fmt.Errorf("dial error: %v", err) } sc, err := ln.Accept() if err != nil { return nil, fmt.Errorf("accept error: %v", err) } ct := &clientTester{ t: t, tr: tr, cc: cc, sc: sc, fr: NewFramer(sc, sc), } switch dialer.count { case 1: ct1 <- ct case 2: ct2 <- ct } return cc, nil } errs := make(chan error, 3) done := make(chan struct{}) defer close(done) // Client. go func() { req, _ := http.NewRequest("GET", "https://dummy.tld/", nil) res, err := tr.RoundTrip(req) if res != nil { res.Body.Close() if got := res.Header.Get("Foo"); got != "bar" { err = fmt.Errorf("foo header = %q; want bar", got) } } if err != nil { err = fmt.Errorf("RoundTrip: %v", err) } errs <- err }() connToClose := make(chan io.Closer, 2) // Server for the first request. go func() { var ct *clientTester select { case ct = <-ct1: case <-done: return } connToClose <- ct.cc ct.greet() hf, err := ct.firstHeaders() if err != nil { errs <- fmt.Errorf("server1 failed reading HEADERS: %v", err) return } t.Logf("server1 got %v", hf) if err := ct.fr.WriteGoAway(0 /*max id*/, ErrCodeNo, nil); err != nil { errs <- fmt.Errorf("server1 failed writing GOAWAY: %v", err) return } errs <- nil }() // Server for the second request. go func() { var ct *clientTester select { case ct = <-ct2: case <-done: return } connToClose <- ct.cc ct.greet() hf, err := ct.firstHeaders() if err != nil { errs <- fmt.Errorf("server2 failed reading HEADERS: %v", err) return } t.Logf("server2 got %v", hf) var buf bytes.Buffer enc := hpack.NewEncoder(&buf) enc.WriteField(hpack.HeaderField{Name: ":status", Value: "200"}) enc.WriteField(hpack.HeaderField{Name: "foo", Value: "bar"}) err = ct.fr.WriteHeaders(HeadersFrameParam{ StreamID: hf.StreamID, EndHeaders: true, EndStream: false, BlockFragment: buf.Bytes(), }) if err != nil { errs <- fmt.Errorf("server2 failed writing response HEADERS: %v", err) } else { errs <- nil } }() for k := 0; k < 3; k++ { select { case err := <-errs: if err != nil { t.Error(err) } case <-time.After(1 * time.Second): t.Errorf("timed out") } } for { select { case c := <-connToClose: c.Close() default: return } } }
explode_data.jsonl/16137
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1344 }
[ 2830, 3393, 27560, 51560, 6025, 15513, 14419, 3022, 1155, 353, 8840, 836, 8, 341, 2405, 27860, 261, 2036, 341, 197, 1903, 1721, 99014, 198, 197, 18032, 526, 198, 197, 532, 89216, 16, 1669, 1281, 35190, 353, 2972, 58699, 340, 89216, 17, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestFactConvertV2(t *testing.T) { now := time.Now() syms := &datalog.SymbolTable{} in := &datalog.Fact{Predicate: datalog.Predicate{ Name: datalog.String(42), Terms: []datalog.Term{ datalog.String(1), datalog.Integer(2), datalog.Variable(3), datalog.Bytes([]byte("bytes")), syms.Insert("abcd"), datalog.Date(now.Unix()), datalog.Bool(true), datalog.Set{ syms.Insert("abc"), syms.Insert("def"), }, }, }} name1 := uint64(42) expectedPbFact := &pb.FactV2{Predicate: &pb.PredicateV2{ Name: &name1, Terms: []*pb.TermV2{ {Content: &pb.TermV2_String_{String_: 1}}, {Content: &pb.TermV2_Integer{Integer: 2}}, {Content: &pb.TermV2_Variable{Variable: 3}}, {Content: &pb.TermV2_Bytes{Bytes: []byte("bytes")}}, {Content: &pb.TermV2_String_{String_: syms.Index("abcd")}}, {Content: &pb.TermV2_Date{Date: uint64(now.Unix())}}, {Content: &pb.TermV2_Bool{Bool: true}}, {Content: &pb.TermV2_Set{Set: &pb.TermSet{Set: []*pb.TermV2{ {Content: &pb.TermV2_String_{String_: syms.Index("abc")}}, {Content: &pb.TermV2_String_{String_: syms.Index("def")}}, }}}}, }, }} pbFact, err := tokenFactToProtoFactV2(*in) require.NoError(t, err) require.Equal(t, expectedPbFact, pbFact) out, err := protoFactToTokenFactV2(pbFact) require.NoError(t, err) require.Equal(t, in, out) }
explode_data.jsonl/51995
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 642 }
[ 2830, 3393, 17417, 12012, 53, 17, 1155, 353, 8840, 836, 8, 341, 80922, 1669, 882, 13244, 741, 1903, 75025, 1669, 609, 67, 7750, 65995, 2556, 31483, 17430, 1669, 609, 67, 7750, 991, 531, 90, 36329, 25, 3258, 30951, 96719, 515, 197, 212...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Wireguard_addRule(t *testing.T) { t.Parallel() const rulePriority = 987 const firewallMark = 456 errDummy := errors.New("dummy") testCases := map[string]struct { expectedRule *netlink.Rule ruleAddErr error err error ruleDelErr error cleanupErr error }{ "success": { expectedRule: &netlink.Rule{ Invert: true, Priority: rulePriority, Mark: firewallMark, Table: firewallMark, Mask: -1, Goto: -1, Flow: -1, SuppressIfgroup: -1, SuppressPrefixlen: -1, }, }, "rule add error": { expectedRule: &netlink.Rule{ Invert: true, Priority: rulePriority, Mark: firewallMark, Table: firewallMark, Mask: -1, Goto: -1, Flow: -1, SuppressIfgroup: -1, SuppressPrefixlen: -1, }, ruleAddErr: errDummy, err: errors.New("dummy: when adding rule: ip rule 987: from <nil> table 456"), }, "rule delete error": { expectedRule: &netlink.Rule{ Invert: true, Priority: rulePriority, Mark: firewallMark, Table: firewallMark, Mask: -1, Goto: -1, Flow: -1, SuppressIfgroup: -1, SuppressPrefixlen: -1, }, ruleDelErr: errDummy, cleanupErr: errors.New("dummy: when deleting rule: ip rule 987: from <nil> table 456"), }, } for name, testCase := range testCases { testCase := testCase t.Run(name, func(t *testing.T) { t.Parallel() ctrl := gomock.NewController(t) netLinker := NewMockNetLinker(ctrl) wg := Wireguard{ netlink: netLinker, } netLinker.EXPECT().RuleAdd(testCase.expectedRule). Return(testCase.ruleAddErr) cleanup, err := wg.addRule(rulePriority, firewallMark) if testCase.err != nil { require.Error(t, err) assert.Equal(t, testCase.err.Error(), err.Error()) return } require.NoError(t, err) netLinker.EXPECT().RuleDel(testCase.expectedRule). Return(testCase.ruleDelErr) err = cleanup() if testCase.cleanupErr != nil { require.Error(t, err) assert.Equal(t, testCase.cleanupErr.Error(), err.Error()) } else { require.NoError(t, err) } }) } }
explode_data.jsonl/52480
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1238 }
[ 2830, 3393, 2763, 554, 26098, 2891, 11337, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 4777, 5912, 20555, 284, 220, 24, 23, 22, 198, 4777, 49877, 8949, 284, 220, 19, 20, 21, 271, 9859, 43344, 1669, 5975, 7121, 445, 31390, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRandomBuildTag(t *testing.T) { tests := []struct { namespace, name string want string }{ {"test", "build-1", "test/build-1:f1f85ff5"}, // For long build namespace + build name, the returned random build tag // would be longer than the limit of reference.NameTotalLengthMax (255 // chars). We do not truncate the repository name because it could create an // invalid repository name (e.g., namespace=abc, name=d, repo=abc/d, // trucated=abc/ -> invalid), so we simply take a SHA1 hash of the // repository name (which is guaranteed to be a valid repository name) and // preserve the random tag. { "namespace" + strings.Repeat(".namespace", 20), "name" + strings.Repeat(".name", 20), "47c1d5c686ce4563521c625457e79ca23c07bc27:f1f85ff5", }, } for _, tt := range tests { rand.Seed(0) got := randomBuildTag(tt.namespace, tt.name) if !reflect.DeepEqual(got, tt.want) { t.Errorf("randomBuildTag(%q, %q) = %q, want %q", tt.namespace, tt.name, got, tt.want) } } }
explode_data.jsonl/26152
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 398 }
[ 2830, 3393, 13999, 11066, 5668, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 56623, 11, 829, 914, 198, 197, 50780, 310, 914, 198, 197, 59403, 197, 197, 4913, 1944, 497, 330, 5834, 12, 16, 497, 330, 1944, 30593, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestExcelizeHello(t *testing.T) { f := excelize.NewFile() // 创建一个工作表 index := f.NewSheet("Sheet2") // 设置单元格的值 f.SetCellValue("Sheet2", "A2", "Hello work!") f.SetCellValue("Sheet1", "B2", 100) // 设置工作簿的默认工作表 f.SetActiveSheet(index) // 根据指定路径保存文件 if err := f.SaveAs(pathPrefix + "BookHello_out.xlsx"); err != nil { fmt.Println(err) } }
explode_data.jsonl/472
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 222 }
[ 2830, 3393, 20055, 551, 9707, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 3438, 19412, 7121, 1703, 741, 197, 322, 47758, 46944, 99257, 20742, 198, 26327, 1669, 282, 7121, 10541, 445, 10541, 17, 1138, 197, 322, 53054, 106251, 33983, 9370, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRequireBadArgumentCount(t *testing.T) { default_suite.expectBundled(t, bundled{ files: map[string]string{ "/entry.js": ` require() require("a", "b") try { require() require("a", "b") } catch { } `, }, entryPaths: []string{"/entry.js"}, options: config.Options{ Mode: config.ModeBundle, AbsOutputFile: "/out.js", }, }) }
explode_data.jsonl/38476
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 192 }
[ 2830, 3393, 17959, 17082, 9171, 2507, 1155, 353, 8840, 836, 8, 341, 11940, 57239, 25952, 33, 1241, 832, 1155, 11, 51450, 515, 197, 74075, 25, 2415, 14032, 30953, 515, 298, 197, 3115, 4085, 2857, 788, 22074, 571, 17957, 741, 571, 17957, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_fsimpl_ReadJson(t *testing.T) { tests := map[string]struct { o interface{} wantErr string beforeFn func() FS assertFn func(*testing.T, interface{}) }{ "Should read a simple json file": { o: &SampleJson{}, beforeFn: func() FS { j, _ := json.Marshal(&SampleJson{ Name: "name", }) memfs := memfs.New() _ = billyUtils.WriteFile(memfs, "filename", j, 0666) return Create(memfs) }, assertFn: func(t *testing.T, o interface{}) { j := o.(*SampleJson) assert.Equal(t, "name", j.Name) }, }, "Should fail if file does not exist": { o: &SampleJson{}, wantErr: os.ErrNotExist.Error(), beforeFn: func() FS { memfs := memfs.New() return Create(memfs) }, }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { fs := tt.beforeFn() if err := fs.ReadJson("filename", tt.o); err != nil { if tt.wantErr != "" { assert.EqualError(t, err, tt.wantErr) } else { t.Errorf("ReadYamls() error = %v", err) } return } if tt.assertFn != nil { tt.assertFn(t, tt.o) } }) } }
explode_data.jsonl/58985
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 560 }
[ 2830, 3393, 34470, 6383, 38381, 5014, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 2415, 14032, 60, 1235, 341, 197, 22229, 286, 3749, 16094, 197, 50780, 7747, 220, 914, 198, 197, 63234, 24911, 2915, 368, 24289, 198, 197, 6948, 24911, 2915...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_ResetTeamPassword(t *testing.T) { // error payload w := httptest.NewRecorder() jsonData, _ := json.Marshal(map[string]interface{}{ "IDd": 3, }) req, _ := http.NewRequest("POST", "/api/manager/team/resetPassword", bytes.NewBuffer(jsonData)) req.Header.Set("Authorization", managerToken) router.ServeHTTP(w, req) assert.Equal(t, 400, w.Code) // team not found w = httptest.NewRecorder() jsonData, _ = json.Marshal(map[string]interface{}{ "ID": 233, }) req, _ = http.NewRequest("POST", "/api/manager/team/resetPassword", bytes.NewBuffer(jsonData)) req.Header.Set("Authorization", managerToken) router.ServeHTTP(w, req) assert.Equal(t, 404, w.Code) // success w = httptest.NewRecorder() jsonData, _ = json.Marshal(map[string]interface{}{ "ID": 3, }) req, _ = http.NewRequest("POST", "/api/manager/team/resetPassword", bytes.NewBuffer(jsonData)) req.Header.Set("Authorization", managerToken) router.ServeHTTP(w, req) assert.Equal(t, 200, w.Code) }
explode_data.jsonl/77160
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 378 }
[ 2830, 3393, 67771, 14597, 4876, 1155, 353, 8840, 836, 8, 341, 197, 322, 1465, 7729, 198, 6692, 1669, 54320, 70334, 7121, 47023, 741, 30847, 1043, 11, 716, 1669, 2951, 37271, 9147, 14032, 31344, 67066, 197, 197, 1, 915, 67, 788, 220, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCStore_ToMap(t *testing.T) { type args struct { key string value string } tests := []struct { name string args args want args }{ {"key_value_ok", args{"testKey", "testValue"}, args{"testKey", "testValue"}}, {"value_empty", args{"testKey", ""}, args{"testKey", ""}}, {"key_empty", args{"", "testValue"}, args{"", "testValue"}}, {"both_empty", args{"", ""}, args{"", ""}}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { cs := New() cs.Store(tt.args.key, tt.args.value) cc := cs.ToMap() val, ok := cc[tt.args.key] if !ok { t.Errorf("key-value pair %q:%q not present in copied map", tt.args.key, tt.args.value) } if val != tt.args.value { t.Errorf("copied map contains %q instead of %q for key %q", val, tt.args.value, tt.args.key) } }) } }
explode_data.jsonl/59221
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 366 }
[ 2830, 3393, 34, 6093, 38346, 2227, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 23634, 256, 914, 198, 197, 16309, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197, 50780,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestErrors_ErrorConext(t *testing.T) { // "Before": error without a way to know the source file. err := &errors.Error{ErrorToken: mockToken(111, "moyles", 16, 5), ExpectedTokens: []string{"ant", "dec"}} assertEqual(t, `16:5: error: expected either ant or dec; got: "moyles"`, err.Error()) // Now attach a Context that implements `Source() string` and verify we // get a proper File-Line-Column error using the same err. err.ErrorToken.Context = &lexer.SourceContext{Filepath: "/addicted/to/plaice.lyrics"} assertEqual(t, `/addicted/to/plaice.lyrics:16:5: error: expected either ant or dec; got: "moyles"`, err.Error()) }
explode_data.jsonl/29099
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 219 }
[ 2830, 3393, 13877, 28651, 1109, 427, 1155, 353, 8840, 836, 8, 341, 197, 322, 330, 10227, 788, 1465, 2041, 264, 1616, 311, 1414, 279, 2530, 1034, 624, 9859, 1669, 609, 7650, 6141, 90, 1454, 3323, 25, 7860, 3323, 7, 16, 16, 16, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRemoveProjectSource(t *testing.T) { fixture.EnsureCleanState(t) projectName := "proj-" + strconv.FormatInt(time.Now().Unix(), 10) _, err := fixture.AppClientset.ArgoprojV1alpha1().AppProjects(fixture.ArgoCDNamespace).Create(context.Background(), &v1alpha1.AppProject{ ObjectMeta: metav1.ObjectMeta{Name: projectName}, Spec: v1alpha1.AppProjectSpec{ SourceRepos: []string{"https://github.com/argoproj/argo-cd.git"}, }, }, metav1.CreateOptions{}) assert.NoError(t, err) _, err = fixture.RunCli("proj", "remove-source", projectName, "https://github.com/argoproj/argo-cd.git") assert.NoError(t, err) _, err = fixture.RunCli("proj", "remove-source", projectName, "https://github.com/argoproj/argo-cd.git") assert.NoError(t, err) proj, err := fixture.AppClientset.ArgoprojV1alpha1().AppProjects(fixture.ArgoCDNamespace).Get(context.Background(), projectName, metav1.GetOptions{}) assert.NoError(t, err) assert.Equal(t, projectName, proj.Name) assert.Equal(t, 0, len(proj.Spec.SourceRepos)) assertProjHasEvent(t, proj, "update", argo.EventReasonResourceUpdated) }
explode_data.jsonl/58444
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 418 }
[ 2830, 3393, 13021, 7849, 3608, 1155, 353, 8840, 836, 8, 341, 1166, 12735, 22834, 19098, 27529, 1397, 1155, 692, 72470, 675, 1669, 330, 30386, 27651, 488, 33317, 9978, 1072, 9730, 13244, 1005, 55832, 1507, 220, 16, 15, 340, 197, 6878, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetZoneByNodeName(t *testing.T) { c := &Cloud{} zone, err := c.GetZoneByNodeName(context.Background(), types.NodeName("192.168.10.5")) if nil != err { t.Fatalf("GetZoneByNodeName failed: %s", err) } if "" != zone.Region { t.Fatalf("Zone Region is not empty") } if "" != zone.FailureDomain { t.Fatalf("Zone FailureDomain is not empty") } }
explode_data.jsonl/77151
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 140 }
[ 2830, 3393, 1949, 15363, 1359, 1955, 675, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 609, 16055, 16094, 197, 8684, 11, 1848, 1669, 272, 2234, 15363, 1359, 1955, 675, 5378, 19047, 1507, 4494, 21714, 675, 445, 16, 24, 17, 13, 16, 21, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestSplitProto(t *testing.T) { var tests = []struct { input string proto string addr string }{ { input: "localhost", proto: "", addr: "localhost", }, { input: "tls://my.local.domain", proto: "tls", addr: "my.local.domain", }, { input: "starttls://my.local.domain", proto: "starttls", addr: "my.local.domain", }, } for i, test := range tests { testName := test.input t.Run(testName, func(t *testing.T) { pa := splitProto(test.input) if pa.protocol != test.proto { t.Errorf("Testcase %d: Incorrect proto: expected %v, got %v", i, test.proto, pa.protocol) } if pa.address != test.addr { t.Errorf("Testcase %d: Incorrect addr: expected %v, got %v", i, test.addr, pa.address) } }) } }
explode_data.jsonl/944
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 367 }
[ 2830, 3393, 20193, 31549, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 3056, 1235, 341, 197, 22427, 914, 198, 197, 197, 15110, 914, 198, 197, 53183, 220, 914, 198, 197, 59403, 197, 197, 515, 298, 22427, 25, 330, 8301, 756, 298, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetPartitionKey(t *testing.T) { record := map[interface{}]interface{}{ "testKey": []byte("test value with no nested keys"), "testKeyWithOneNestedKey": map[interface{}]interface{}{ "nestedKey": []byte("test value with one nested key"), }, "testKeyWithNestedKeys": map[interface{}]interface{}{ "outerKey": map[interface{}]interface{}{ "innerKey": []byte("test value with inner key"), }, }, } //test getPartitionKey() with single partition key outputPlugin, _ := newMockOutputPlugin(nil, false) outputPlugin.partitionKey = "testKey" value, hasValue := outputPlugin.getPartitionKey(record) assert.Equal(t, true, hasValue, "Should find value") assert.Equal(t, value, "test value with no nested keys") //test getPartitionKey() with nested partition key outputPlugin.partitionKey = "testKeyWithOneNestedKey->nestedKey" value, hasValue = outputPlugin.getPartitionKey(record) assert.Equal(t, true, hasValue, "Should find value") assert.Equal(t, value, "test value with one nested key") outputPlugin.partitionKey = "testKeyWithNestedKeys->outerKey->innerKey" value, hasValue = outputPlugin.getPartitionKey(record) assert.Equal(t, true, hasValue, "Should find value") assert.Equal(t, value, "test value with inner key") //test getPartitionKey() with partition key not found outputPlugin.partitionKey = "some key" value, hasValue = outputPlugin.getPartitionKey(record) assert.Equal(t, false, hasValue, "Should not find value") assert.Len(t, value, 0, "This should be an empty string") outputPlugin.partitionKey = "testKeyWithOneNestedKey" value, hasValue = outputPlugin.getPartitionKey(record) assert.Equal(t, false, hasValue, "Should not find value") assert.Len(t, value, 0, "This should be an empty string") outputPlugin.partitionKey = "testKeyWithOneNestedKey->someKey" value, hasValue = outputPlugin.getPartitionKey(record) assert.Equal(t, false, hasValue, "Should not find value") assert.Len(t, value, 0, "This should be an empty string") }
explode_data.jsonl/74109
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 659 }
[ 2830, 3393, 1949, 49978, 1592, 1155, 353, 8840, 836, 8, 341, 71952, 1669, 2415, 58, 4970, 78134, 4970, 67066, 197, 197, 1, 1944, 1592, 788, 3056, 3782, 445, 1944, 897, 448, 902, 24034, 6894, 4461, 197, 197, 1, 1944, 1592, 2354, 3966, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCheckNoResourceMatchError(t *testing.T) { testCheckError(t, []checkErrTestCase{ { &meta.NoResourceMatchError{PartialResource: schema.GroupVersionResource{Resource: "foo"}}, `the server doesn't have a resource type "foo"`, DefaultErrorExitCode, }, { &meta.NoResourceMatchError{PartialResource: schema.GroupVersionResource{Version: "theversion", Resource: "foo"}}, `the server doesn't have a resource type "foo" in version "theversion"`, DefaultErrorExitCode, }, { &meta.NoResourceMatchError{PartialResource: schema.GroupVersionResource{Group: "thegroup", Version: "theversion", Resource: "foo"}}, `the server doesn't have a resource type "foo" in group "thegroup" and version "theversion"`, DefaultErrorExitCode, }, { &meta.NoResourceMatchError{PartialResource: schema.GroupVersionResource{Group: "thegroup", Resource: "foo"}}, `the server doesn't have a resource type "foo" in group "thegroup"`, DefaultErrorExitCode, }, }) }
explode_data.jsonl/38774
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 331 }
[ 2830, 3393, 3973, 2753, 4783, 8331, 1454, 1155, 353, 8840, 836, 8, 341, 18185, 3973, 1454, 1155, 11, 3056, 2028, 7747, 16458, 515, 197, 197, 515, 298, 197, 5, 5490, 16766, 4783, 8331, 1454, 90, 37314, 4783, 25, 10802, 5407, 5637, 4783...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInspectBlock(t *testing.T) { blockDest := filepath.Join(tmpDir, "block") config := configtxgentest.Load(genesisconfig.SampleInsecureSoloProfile) assert.NoError(t, doOutputBlock(config, "foo", blockDest), "Good block generation request") assert.NoError(t, doInspectBlock(blockDest), "Good block inspection request") }
explode_data.jsonl/14584
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 103 }
[ 2830, 3393, 58533, 4713, 1155, 353, 8840, 836, 8, 341, 47996, 34830, 1669, 26054, 22363, 10368, 6184, 11, 330, 4574, 5130, 25873, 1669, 2193, 3998, 15772, 477, 13969, 36884, 13774, 1676, 76266, 641, 25132, 89299, 8526, 692, 6948, 35699, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBool(t *testing.T) { tests := []struct { name string in interface{} want string // JSON }{ { name: "null_for_unset", in: struct { True Bool False Bool Unset Bool }{ True: "true", False: "false", }, want: `{"True":true,"False":false,"Unset":null}`, }, { name: "omitempty_unset", in: struct { True Bool False Bool Unset Bool `json:",omitempty"` }{ True: "true", False: "false", }, want: `{"True":true,"False":false}`, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { j, err := json.Marshal(tt.in) if err != nil { t.Fatal(err) } if string(j) != tt.want { t.Errorf("wrong JSON:\n got: %s\nwant: %s\n", j, tt.want) } // And back again: newVal := reflect.New(reflect.TypeOf(tt.in)) out := newVal.Interface() if err := json.Unmarshal(j, out); err != nil { t.Fatalf("Unmarshal %#q: %v", j, err) } got := newVal.Elem().Interface() if !reflect.DeepEqual(tt.in, got) { t.Errorf("value mismatch\n got: %+v\nwant: %+v\n", got, tt.in) } }) } }
explode_data.jsonl/27563
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 571 }
[ 2830, 3393, 11233, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 17430, 256, 3749, 16094, 197, 50780, 914, 442, 4718, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 330, 2921, 5478, 98109, 756...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestHttpParser_PhraseContainsSpaces(t *testing.T) { logp.TestingSetup(logp.WithSelectors("http")) response_404 := "HTTP/1.1 404 Not Found\r\n" + "Server: Apache-Coyote/1.1\r\n" + "Content-Type: text/html;charset=utf-8\r\n" + "Content-Length: 18\r\n" + "Date: Mon, 31 Jul 2017 11:31:53 GMT\r\n" + "\r\n" + "Http Response Body" r, ok, complete := testParse(nil, response_404) assert.True(t, ok) assert.True(t, complete) assert.Equal(t, 18, r.contentLength) assert.Equal(t, "Not Found", string(r.statusPhrase)) assert.Equal(t, 404, int(r.statusCode)) response_500 := "HTTP/1.1 500 Internal Server Error\r\n" + "Server: Apache-Coyote/1.1\r\n" + "Content-Type: text/html;charset=utf-8\r\n" + "Content-Length: 2\r\n" + "Date: Mon, 30 Jul 2017 00:00:00 GMT\r\n" + "\r\n" + "xx" r, ok, complete = testParse(nil, response_500) assert.True(t, ok) assert.True(t, complete) assert.Equal(t, 2, r.contentLength) assert.Equal(t, "Internal Server Error", string(r.statusPhrase)) assert.Equal(t, 500, int(r.statusCode)) broken := "HTTP/1.1 500 \r\n" + "Server: Apache-Coyote/1.1\r\n" + "Content-Type: text/html;charset=utf-8\r\n" + "Content-Length: 2\r\n" + "Date: Mon, 30 Jul 2017 00:00:00 GMT\r\n" + "\r\n" + "xx" r, ok, complete = testParse(nil, broken) assert.False(t, ok) assert.False(t, complete) }
explode_data.jsonl/16503
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 623 }
[ 2830, 3393, 2905, 6570, 1088, 4079, 519, 23805, 71324, 1155, 353, 8840, 836, 8, 341, 6725, 79, 8787, 287, 21821, 12531, 79, 26124, 96995, 445, 1254, 5455, 21735, 62, 19, 15, 19, 1669, 330, 9230, 14, 16, 13, 16, 220, 19, 15, 19, 28...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateActionProxyRewritePathForRegexp(t *testing.T) { tests := []string{"/rewrite$1", "test", `/$2`, `\"test\"`} for _, test := range tests { allErrs := validateActionProxyRewritePathForRegexp(test, field.NewPath("rewritePath")) if len(allErrs) != 0 { t.Errorf("validateActionProxyRewritePathForRegexp(%v) returned errors for valid input: %v", test, allErrs) } } }
explode_data.jsonl/65898
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 147 }
[ 2830, 3393, 17926, 2512, 16219, 58465, 1247, 1820, 2461, 3477, 4580, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 917, 90, 3115, 52473, 3, 16, 497, 330, 1944, 497, 1565, 10749, 17, 7808, 1565, 2105, 1944, 2105, 53136, 2023, 8358, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestFreelist_free(t *testing.T) { f := newFreelist() f.free(100, &page{id: 12}) if !reflect.DeepEqual([]pgid{12}, f.pending[100]) { t.Fatalf("exp=%v; got=%v", []pgid{12}, f.pending[100]) } }
explode_data.jsonl/43709
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 97 }
[ 2830, 3393, 36731, 19230, 8905, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 36731, 19230, 741, 1166, 45595, 7, 16, 15, 15, 11, 609, 2893, 61761, 25, 220, 16, 17, 3518, 743, 753, 34913, 94750, 10556, 3517, 307, 90, 16, 17, 2137, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMultiClusterTickRegionUpdate(t *testing.T) { testMultiClusterTickRegionUpdate(t, false, false) testMultiClusterTickRegionUpdate(t, false, true) testMultiClusterTickRegionUpdate(t, true, false) }
explode_data.jsonl/24610
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 20358, 28678, 22213, 14091, 4289, 1155, 353, 8840, 836, 8, 341, 18185, 20358, 28678, 22213, 14091, 4289, 1155, 11, 895, 11, 895, 340, 18185, 20358, 28678, 22213, 14091, 4289, 1155, 11, 895, 11, 830, 340, 18185, 20358, 28678, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestInvalidStruct(t *testing.T) { validate := New() s := &SubTest{ Test: "1", } err := validate.Struct(s.Test) NotEqual(t, err, nil) Equal(t, err.Error(), "validator: (nil string)") err = validate.Struct(nil) NotEqual(t, err, nil) Equal(t, err.Error(), "validator: (nil)") err = validate.StructPartial(nil, "SubTest.Test") NotEqual(t, err, nil) Equal(t, err.Error(), "validator: (nil)") err = validate.StructExcept(nil, "SubTest.Test") NotEqual(t, err, nil) Equal(t, err.Error(), "validator: (nil)") }
explode_data.jsonl/77334
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 222 }
[ 2830, 3393, 7928, 9422, 1155, 353, 8840, 836, 8, 1476, 197, 7067, 1669, 1532, 2822, 1903, 1669, 609, 3136, 2271, 515, 197, 73866, 25, 330, 16, 756, 197, 630, 9859, 1669, 9593, 51445, 1141, 8787, 340, 197, 2623, 2993, 1155, 11, 1848, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTypicalFlow(t *testing.T) { r := &PodAutoscaler{} r.Status.InitializeConditions() checkConditionOngoingPodAutoscaler(r.Status, PodAutoscalerConditionActive, t) checkConditionOngoingPodAutoscaler(r.Status, PodAutoscalerConditionReady, t) // When we see traffic, mark ourselves active. r.Status.MarkActive() checkConditionSucceededPodAutoscaler(r.Status, PodAutoscalerConditionActive, t) checkConditionSucceededPodAutoscaler(r.Status, PodAutoscalerConditionReady, t) // Check idempotency. r.Status.MarkActive() checkConditionSucceededPodAutoscaler(r.Status, PodAutoscalerConditionActive, t) checkConditionSucceededPodAutoscaler(r.Status, PodAutoscalerConditionReady, t) // When we stop seeing traffic, mark outselves inactive. r.Status.MarkInactive("TheReason", "the message") checkConditionFailedPodAutoscaler(r.Status, PodAutoscalerConditionActive, t) checkConditionFailedPodAutoscaler(r.Status, PodAutoscalerConditionReady, t) // When traffic hits the activator and we scale up the deployment we mark // ourselves as activating. r.Status.MarkActivating("Activating", "Red team, GO!") checkConditionOngoingPodAutoscaler(r.Status, PodAutoscalerConditionActive, t) checkConditionOngoingPodAutoscaler(r.Status, PodAutoscalerConditionReady, t) // When the activator successfully forwards traffic to the deployment, // we mark ourselves as active once more. r.Status.MarkActive() checkConditionSucceededPodAutoscaler(r.Status, PodAutoscalerConditionActive, t) checkConditionSucceededPodAutoscaler(r.Status, PodAutoscalerConditionReady, t) }
explode_data.jsonl/32172
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 514 }
[ 2830, 3393, 12834, 938, 18878, 1155, 353, 8840, 836, 8, 341, 7000, 1669, 609, 23527, 19602, 436, 63084, 16094, 7000, 10538, 45829, 35435, 741, 25157, 10547, 46, 85071, 23527, 19602, 436, 63084, 2601, 10538, 11, 16821, 19602, 436, 63084, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_calculateCompressedBlobSizeFromToolOutput(t *testing.T) { tests := []struct { name string output string is_err bool size int64 }{ { "Success", "Wrote 5 bytes", false, 5, }, { "Error", "Wrote bytes", true, 0, }, { "EmptyOutput", "", true, 0, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { var bytes bytes.Buffer bytes.WriteString(test.output) size, err := calculateCompressedBlobSizeFromToolOutput(bytes) if !test.is_err && (err != nil) { t.Fatalf("Calculation returned an error when expected success: %s", err) } else if test.is_err && (err == nil) { t.Fatalf("Calculation returned success when expecting error") } if test.size != size { t.Fatalf("Calculation returns %d; expect %d", size, test.size) } }) } }
explode_data.jsonl/12619
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 376 }
[ 2830, 3393, 24005, 11207, 1092, 14318, 37985, 1695, 3830, 7740, 5097, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 256, 914, 198, 197, 21170, 914, 198, 197, 19907, 9266, 1807, 198, 197, 13832, 256, 526, 21, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestStatus(t *testing.T) { status := Status{NewId(), STATUS_ONLINE, true, 0, "123"} json := status.ToJson() status2 := StatusFromJson(strings.NewReader(json)) assert.Equal(t, status.UserId, status2.UserId, "UserId should have matched") assert.Equal(t, status.Status, status2.Status, "Status should have matched") assert.Equal(t, status.LastActivityAt, status2.LastActivityAt, "LastActivityAt should have matched") assert.Equal(t, status.Manual, status2.Manual, "Manual should have matched") assert.Equal(t, "", status2.ActiveChannel) json = status.ToClusterJson() status2 = StatusFromJson(strings.NewReader(json)) assert.Equal(t, status.ActiveChannel, status2.ActiveChannel) }
explode_data.jsonl/67568
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 236 }
[ 2830, 3393, 2522, 1155, 353, 8840, 836, 8, 341, 23847, 1669, 8104, 90, 3564, 764, 1507, 24014, 11077, 8265, 11, 830, 11, 220, 15, 11, 330, 16, 17, 18, 16707, 30847, 1669, 2639, 3274, 5014, 741, 23847, 17, 1669, 8104, 3830, 5014, 514...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBlockWrapsAround(t *testing.T) { s, c := makeTestServer() defer s.Shutdown() n := 65535 * 512 for i := n - 2; i < n+2; i++ { testSendReceive(t, c, int64(i)) } }
explode_data.jsonl/17551
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 81 }
[ 2830, 3393, 4713, 35214, 2625, 43580, 1155, 353, 8840, 836, 8, 341, 1903, 11, 272, 1669, 1281, 2271, 5475, 741, 16867, 274, 10849, 18452, 741, 9038, 1669, 220, 21, 20, 20, 18, 20, 353, 220, 20, 16, 17, 198, 2023, 600, 1669, 308, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCallLessArgsDynamic(t *testing.T) { const SCRIPT = ` function A(a, b, c) { // Make it stashful function B() { return a; } return String(a) + " " + String(b) + " " + String(c); } var rv = A(1, 2); ` testScript(SCRIPT, asciiString("1 2 undefined"), t) }
explode_data.jsonl/75214
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 7220, 27451, 4117, 21752, 1155, 353, 8840, 836, 8, 341, 4777, 53679, 284, 22074, 1688, 362, 2877, 11, 293, 11, 272, 8, 341, 197, 322, 7405, 432, 64037, 1262, 198, 7527, 425, 368, 341, 197, 853, 264, 280, 197, 532, 853, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_DeployHandler_Execution_Errors_LockExistsError(t *testing.T) { release := MockRelease() awsc := MockAwsClients(release) state_machine := createTestStateMachine(t, awsc) awsc.S3.AddGetObject(*release.RootLockPath(), `{"uuid":"notuuid"}`, nil) exec, err := state_machine.Execute(release) assert.Error(t, err) assert.Regexp(t, "LockExistsError", exec.LastOutputJSON) assert.Regexp(t, "Lock Already Exists", exec.LastOutputJSON) assert.Equal(t, []string{ "Validate", "Lock", "FailureClean", }, exec.Path()) t.Run("no locks acquired in dynamodb", func(t *testing.T) { assert.Equal(t, 0, len(awsc.DynamoDB.PutItemInputs)) }) }
explode_data.jsonl/62296
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 253 }
[ 2830, 3393, 90680, 1989, 3050, 62, 20294, 93623, 1087, 2351, 1176, 15575, 1454, 1155, 353, 8840, 836, 8, 341, 17200, 1623, 1669, 14563, 16077, 741, 197, 672, 2388, 1669, 14563, 47359, 47174, 5801, 1623, 692, 24291, 38695, 1669, 1855, 2271...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetenvBool(t *testing.T) { job := mkJob(t, "dummy") job.SetenvBool("foo", true) if val := job.GetenvBool("foo"); !val { t.Fatalf("GetenvBool returns incorrect value: %t", val) } job.SetenvBool("bar", false) if val := job.GetenvBool("bar"); val { t.Fatalf("GetenvBool returns incorrect value: %t", val) } if val := job.GetenvBool("nonexistent"); val { t.Fatalf("GetenvBool returns incorrect value: %t", val) } }
explode_data.jsonl/15387
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 178 }
[ 2830, 3393, 1649, 3160, 11233, 1155, 353, 8840, 836, 8, 341, 68577, 1669, 23789, 12245, 1155, 11, 330, 31390, 1138, 68577, 4202, 3160, 11233, 445, 7975, 497, 830, 340, 743, 1044, 1669, 2618, 64883, 11233, 445, 7975, 5038, 753, 831, 341,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestInvalidTrailerClosesConnection(t *testing.T) { defer afterTest(t) for _, handler := range testHandlerBodyConsumers { conn := new(testConn) conn.readBuf.WriteString("POST /public HTTP/1.1\r\n" + "Host: test\r\n" + "Trailer: hack\r\n" + "Transfer-Encoding: chunked\r\n" + "\r\n" + "3\r\n" + "hax\r\n" + "0\r\n" + "I'm not a valid trailer\r\n" + "GET /secret HTTP/1.1\r\n" + "Host: test\r\n" + "\r\n") conn.closec = make(chan bool, 1) ln := &oneConnListener{conn} var numReqs int go Serve(ln, HandlerFunc(func(_ ResponseWriter, req *Request) { numReqs++ if strings.Contains(req.URL.Path, "secret") { t.Errorf("Handler %s, Request for /secret encountered, should not have happened.", handler.name) } handler.f(req.Body) })) <-conn.closec if numReqs != 1 { t.Errorf("Handler %s: got %d reqs; want 1", handler.name, numReqs) } } }
explode_data.jsonl/22419
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 424 }
[ 2830, 3393, 7928, 1282, 38782, 34, 49341, 4526, 1155, 353, 8840, 836, 8, 341, 16867, 1283, 2271, 1155, 340, 2023, 8358, 7013, 1669, 2088, 1273, 3050, 5444, 41966, 388, 341, 197, 32917, 1669, 501, 8623, 9701, 340, 197, 32917, 4125, 15064...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCheckMountDestFalsePositive(t *testing.T) { dest := "/rootfs/sysfiles/fs/cgroup" err := checkMountDestination("/rootfs", dest) if err != nil { t.Fatal(err) } }
explode_data.jsonl/1880
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 69 }
[ 2830, 3393, 3973, 16284, 34830, 4049, 35490, 1155, 353, 8840, 836, 8, 341, 49616, 1669, 3521, 2888, 3848, 41836, 7198, 73036, 2899, 4074, 698, 9859, 1669, 1779, 16284, 33605, 4283, 2888, 3848, 497, 3201, 340, 743, 1848, 961, 2092, 341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestInvalidId(t *testing.T) { var pet Pet err := json.Unmarshal([]byte(testJSONBadID), &pet) if err.Error() != `invalid ObjectId in JSON: "bad"` { t.Errorf("Expected error %s, but was %s", `invalid ObjectId in JSON: "bad"`, err) } }
explode_data.jsonl/13929
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 98 }
[ 2830, 3393, 7928, 764, 1155, 353, 8840, 836, 8, 341, 2405, 6753, 11333, 198, 9859, 1669, 2951, 38097, 10556, 3782, 8623, 5370, 17082, 915, 701, 609, 6862, 692, 743, 1848, 6141, 368, 961, 1565, 11808, 50090, 304, 4718, 25, 330, 13855, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSerializeBatch(t *testing.T) { m := MustMetric( metric.New( "cpu", map[string]string{}, map[string]interface{}{ "value": 42.0, }, time.Unix(0, 0), ), ) metrics := []telegraf.Metric{m, m} s, _ := NewSerializer(0) buf, err := s.SerializeBatch(metrics) require.NoError(t, err) require.Equal(t, []byte(`{"metrics":[{"fields":{"value":42},"name":"cpu","tags":{},"timestamp":0},{"fields":{"value":42},"name":"cpu","tags":{},"timestamp":0}]}`), buf) }
explode_data.jsonl/47354
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 213 }
[ 2830, 3393, 15680, 21074, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 15465, 54310, 1006, 197, 2109, 16340, 7121, 1006, 298, 197, 1, 16475, 756, 298, 19567, 14032, 30953, 38837, 298, 19567, 14032, 31344, 67066, 571, 197, 63307, 788, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValueString(t *testing.T) { t.Parallel() ctx := NewIsolate().NewContext() testcases := []struct{ jsCode, toString string }{ // primitives: {`"some string"`, `some string`}, {`5`, `5`}, {`5.123`, `5.123`}, {`true`, `true`}, {`false`, `false`}, {`null`, `null`}, {`undefined`, `undefined`}, // more complicated objects: {`(function x() { return 1 + 2; })`, `function x() { return 1 + 2; }`}, {`([1,2,3])`, `1,2,3`}, {`({x: 5})`, `[object Object]`}, // basically a primitive, but an interesting case still: {`JSON.stringify({x: 5})`, `{"x":5}`}, } for i, test := range testcases { res, err := ctx.Eval(test.jsCode, "test.js") if err != nil { t.Fatalf("Case %d: Error evaluating javascript %#q, err: %v", i, test.jsCode, err) } if res.String() != test.toString { t.Errorf("Case %d: Got %#q, expected %#q from running js %#q", i, res.String(), test.toString, test.jsCode) } } }
explode_data.jsonl/81543
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 415 }
[ 2830, 3393, 1130, 703, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 20985, 1669, 1532, 3872, 33066, 1005, 3564, 1972, 2822, 18185, 23910, 1669, 3056, 1235, 90, 6994, 2078, 11, 14330, 914, 335, 515, 197, 197, 322, 71194, 510, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGenerateMetricTasksNoIntervalOrCount(t *testing.T) { run := &v1alpha1.AnalysisRun{ Spec: v1alpha1.AnalysisRunSpec{ Metrics: []v1alpha1.Metric{{ Name: "success-rate", }}, }, Status: v1alpha1.AnalysisRunStatus{ Phase: v1alpha1.AnalysisPhaseRunning, MetricResults: []v1alpha1.MetricResult{{ Name: "success-rate", Count: 1, Measurements: []v1alpha1.Measurement{{ Value: "99", Phase: v1alpha1.AnalysisPhaseSuccessful, StartedAt: timePtr(metav1.NewTime(time.Now().Add(-50 * time.Second))), FinishedAt: timePtr(metav1.NewTime(time.Now().Add(-50 * time.Second))), }}, }}, }, } { // ensure we don't take measurement when result count indicates we completed tasks := generateMetricTasks(run, run.Spec.Metrics) assert.Equal(t, 0, len(tasks)) } { // ensure we do take measurements when measurement has not been taken successRate := run.Status.MetricResults[0] successRate.Measurements = nil successRate.Count = 0 run.Status.MetricResults[0] = successRate tasks := generateMetricTasks(run, run.Spec.Metrics) assert.Equal(t, 1, len(tasks)) } }
explode_data.jsonl/75809
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 470 }
[ 2830, 3393, 31115, 54310, 25449, 2753, 10256, 2195, 2507, 1155, 353, 8840, 836, 8, 341, 56742, 1669, 609, 85, 16, 7141, 16, 8624, 9092, 6727, 515, 197, 7568, 992, 25, 348, 16, 7141, 16, 8624, 9092, 6727, 8327, 515, 298, 9209, 13468, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInstallNewPipeline(t *testing.T) { pusher, err := InstallNewPipeline(validConfig) if err != nil { t.Fatalf("Failed to create install pipeline with error %v", err) } if global.GetMeterProvider() != pusher.MeterProvider() { t.Fatalf("Failed to register push Controller provider globally") } }
explode_data.jsonl/8039
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 103 }
[ 2830, 3393, 24690, 3564, 34656, 1155, 353, 8840, 836, 8, 341, 43155, 261, 11, 1848, 1669, 19242, 3564, 34656, 41529, 2648, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 9408, 311, 1855, 4582, 15301, 448, 1465, 1018, 85, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMySQL(t *testing.T) { sqltrace.Register("mysql", &mysql.MySQLDriver{}, sqltrace.WithServiceName("mysql-test")) db, err := Open("mysql", "test:test@tcp(127.0.0.1:3306)/test") if err != nil { log.Fatal(err) } defer db.Close() testConfig := &sqltest.Config{ DB: db.DB(), DriverName: "mysql", TableName: tableName, ExpectName: "mysql.query", ExpectTags: map[string]interface{}{ ext.ServiceName: "mysql-test", ext.SpanType: ext.SpanTypeSQL, ext.TargetHost: "127.0.0.1", ext.TargetPort: "3306", "db.user": "test", "db.name": "test", }, } sqltest.RunAll(t, testConfig) }
explode_data.jsonl/31330
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 306 }
[ 2830, 3393, 59224, 1155, 353, 8840, 836, 8, 341, 30633, 15067, 19983, 445, 12272, 497, 609, 12272, 27054, 6688, 11349, 22655, 5704, 15067, 26124, 1860, 675, 445, 12272, 16839, 5455, 20939, 11, 1848, 1669, 5264, 445, 12272, 497, 330, 1944,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMatches(t *testing.T) { storage, closer := NewTestStorage(t, 1) defer closer.Close() samples := make([]*model.Sample, 100) fingerprints := make(model.Fingerprints, 100) for i := range samples { metric := model.Metric{ model.MetricNameLabel: model.LabelValue(fmt.Sprintf("test_metric_%d", i)), "label1": model.LabelValue(fmt.Sprintf("test_%d", i/10)), "label2": model.LabelValue(fmt.Sprintf("test_%d", (i+5)/10)), "all": "const", } samples[i] = &model.Sample{ Metric: metric, Timestamp: model.Time(i), Value: model.SampleValue(i), } fingerprints[i] = metric.FastFingerprint() } for _, s := range samples { storage.Append(s) } storage.WaitForIndexing() newMatcher := func(matchType metric.MatchType, name model.LabelName, value model.LabelValue) *metric.LabelMatcher { lm, err := metric.NewLabelMatcher(matchType, name, value) if err != nil { t.Fatalf("error creating label matcher: %s", err) } return lm } var matcherTests = []struct { matchers metric.LabelMatchers expected model.Fingerprints }{ { matchers: metric.LabelMatchers{newMatcher(metric.Equal, "label1", "x")}, expected: model.Fingerprints{}, }, { matchers: metric.LabelMatchers{newMatcher(metric.Equal, "label1", "test_0")}, expected: fingerprints[:10], }, { matchers: metric.LabelMatchers{ newMatcher(metric.Equal, "label1", "test_0"), newMatcher(metric.Equal, "label2", "test_1"), }, expected: fingerprints[5:10], }, { matchers: metric.LabelMatchers{ newMatcher(metric.Equal, "all", "const"), newMatcher(metric.NotEqual, "label1", "x"), }, expected: fingerprints, }, { matchers: metric.LabelMatchers{ newMatcher(metric.Equal, "all", "const"), newMatcher(metric.NotEqual, "label1", "test_0"), }, expected: fingerprints[10:], }, { matchers: metric.LabelMatchers{ newMatcher(metric.Equal, "all", "const"), newMatcher(metric.NotEqual, "label1", "test_0"), newMatcher(metric.NotEqual, "label1", "test_1"), newMatcher(metric.NotEqual, "label1", "test_2"), }, expected: fingerprints[30:], }, { matchers: metric.LabelMatchers{ newMatcher(metric.Equal, "label1", ""), }, expected: fingerprints[:0], }, { matchers: metric.LabelMatchers{ newMatcher(metric.NotEqual, "label1", "test_0"), newMatcher(metric.Equal, "label1", ""), }, expected: fingerprints[:0], }, { matchers: metric.LabelMatchers{ newMatcher(metric.NotEqual, "label1", "test_0"), newMatcher(metric.Equal, "label2", ""), }, expected: fingerprints[:0], }, { matchers: metric.LabelMatchers{ newMatcher(metric.Equal, "all", "const"), newMatcher(metric.NotEqual, "label1", "test_0"), newMatcher(metric.Equal, "not_existant", ""), }, expected: fingerprints[10:], }, { matchers: metric.LabelMatchers{ newMatcher(metric.RegexMatch, "label1", `test_[3-5]`), }, expected: fingerprints[30:60], }, { matchers: metric.LabelMatchers{ newMatcher(metric.Equal, "all", "const"), newMatcher(metric.RegexNoMatch, "label1", `test_[3-5]`), }, expected: append(append(model.Fingerprints{}, fingerprints[:30]...), fingerprints[60:]...), }, { matchers: metric.LabelMatchers{ newMatcher(metric.RegexMatch, "label1", `test_[3-5]`), newMatcher(metric.RegexMatch, "label2", `test_[4-6]`), }, expected: fingerprints[35:60], }, { matchers: metric.LabelMatchers{ newMatcher(metric.RegexMatch, "label1", `test_[3-5]`), newMatcher(metric.NotEqual, "label2", `test_4`), }, expected: append(append(model.Fingerprints{}, fingerprints[30:35]...), fingerprints[45:60]...), }, { matchers: metric.LabelMatchers{ newMatcher(metric.Equal, "label1", `nonexistent`), newMatcher(metric.RegexMatch, "label2", `test`), }, expected: model.Fingerprints{}, }, { matchers: metric.LabelMatchers{ newMatcher(metric.Equal, "label1", `test_0`), newMatcher(metric.RegexMatch, "label2", `nonexistent`), }, expected: model.Fingerprints{}, }, } for _, mt := range matcherTests { res := storage.MetricsForLabelMatchers(mt.matchers...) if len(mt.expected) != len(res) { t.Fatalf("expected %d matches for %q, found %d", len(mt.expected), mt.matchers, len(res)) } for fp1 := range res { found := false for _, fp2 := range mt.expected { if fp1 == fp2 { found = true break } } if !found { t.Errorf("expected fingerprint %s for %q not in result", fp1, mt.matchers) } } } }
explode_data.jsonl/34431
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2071 }
[ 2830, 3393, 42470, 1155, 353, 8840, 836, 8, 341, 197, 16172, 11, 12128, 1669, 1532, 2271, 5793, 1155, 11, 220, 16, 340, 16867, 12128, 10421, 2822, 1903, 4023, 1669, 1281, 85288, 2528, 76266, 11, 220, 16, 15, 15, 340, 1166, 5137, 25738...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPlanner(t *testing.T) { e, err := setupExecutor(t) require.NoError(t, err) testCases := []struct { Name string Input string Output []*Plan }{ { Name: "query with fields from the same service", Input: `query Test { s1fff { name } }`, Output: []*Plan{ { Service: "schema1", Type: "Query", Kind: "query", SelectionSet: mustParse(`{ s1fff { name } }`), After: nil, }, }, }, { Name: "query with fields from two seperate services", Input: `query Test { s1fff { name s2ok } }`, Output: []*Plan{ { Service: "schema1", Type: "Query", Kind: "query", SelectionSet: mustParse(`{ s1fff { name _federation { name } } }`), After: []*Plan{ { Path: []PathStep{ {Kind: KindField, Name: "s1fff"}, }, Type: "Foo", Kind: "query", Service: "schema2", SelectionSet: mustParse(`{ s2ok }`), }, }, }, }, }, { Name: "query with fields from nested levels between services", Input: `query Test { s1fff { s2bar { s1baz } } }`, Output: []*Plan{ { Service: "schema1", Type: "Query", Kind: "query", SelectionSet: mustParse(`{ s1fff { _federation { name } } }`), After: []*Plan{ { Path: []PathStep{ {Kind: KindField, Name: "s1fff"}, }, Type: "Foo", Kind: "query", Service: "schema2", SelectionSet: mustParse(`{ s2bar { _federation { id } } }`), After: []*Plan{ { Path: []PathStep{ {Kind: KindField, Name: "s2bar"}, }, Type: "Bar", Kind: "query", Service: "schema1", SelectionSet: mustParse(`{ s1baz }`), }, }, }, }, }, }, }, { Name: "with union types resolved by different services", Input: `query Test { s1both { __typename ... on Foo { a: s1nest { b: s1nest { c: s1nest { s2ok } } } name s1hmm s2ok } ... on Bar { id s1baz } } }`, Output: []*Plan{ { Service: "schema1", Type: "Query", Kind: "query", SelectionSet: mustParse(`{ s1both { __typename ... on Bar { __typename id s1baz } ... on Foo { __typename a: s1nest { b: s1nest { c: s1nest { _federation { name } } } } name s1hmm _federation { name } } } }`), After: []*Plan{ { Path: []PathStep{ {Kind: KindField, Name: "s1both"}, {Kind: KindType, Name: "Foo"}, {Kind: KindField, Name: "a"}, {Kind: KindField, Name: "b"}, {Kind: KindField, Name: "c"}, }, Type: "Foo", Kind: "query", Service: "schema2", SelectionSet: mustParse(`{ s2ok }`), }, { Path: []PathStep{ {Kind: KindField, Name: "s1both"}, {Kind: KindType, Name: "Foo"}, }, Type: "Foo", Kind: "query", Service: "schema2", SelectionSet: mustParse(`{ s2ok }`), }, }, }, }, }, { Name: "kitchen sink query", Input: `query Test { s1echo(foo: "foo", pair: {a: 1, b: 3}) s1fff { a: s1nest { b: s1nest { c: s1nest { s2ok } } } s1hmm s1nest { name } s2bar { id s1baz } s2ok } s2root }`, Output: []*Plan{ { Service: "schema1", Type: "Query", Kind: "query", SelectionSet: mustParse(`{ s1echo(foo: "foo", pair: {a: 1, b: 3}) s1fff { a: s1nest { b: s1nest { c: s1nest { _federation { name } } } } s1hmm s1nest { name } _federation { name } } }`), After: []*Plan{ { Path: []PathStep{ {Kind: KindField, Name: "s1fff"}, {Kind: KindField, Name: "a"}, {Kind: KindField, Name: "b"}, {Kind: KindField, Name: "c"}, }, Type: "Foo", Kind: "query", Service: "schema2", SelectionSet: mustParse(`{ s2ok }`), }, { Path: []PathStep{ {Kind: KindField, Name: "s1fff"}, }, Type: "Foo", Kind: "query", Service: "schema2", SelectionSet: mustParse(`{ s2bar { id _federation { id } } s2ok }`), After: []*Plan{ { Path: []PathStep{ {Kind: KindField, Name: "s2bar"}, }, Type: "Bar", Kind: "query", Service: "schema1", SelectionSet: mustParse(`{ s1baz }`), }, }, }, }, }, { Service: "schema2", Type: "Query", Kind: "query", SelectionSet: mustParse(`{ s2root }`), }, }, }, } for _, testCase := range testCases { t.Run(testCase.Name, func(t *testing.T) { plan, err := e.planRoot(graphql.MustParse(testCase.Input, map[string]interface{}{})) require.NoError(t, err) assert.Equal(t, testCase.Output, plan.After) }) } }
explode_data.jsonl/11035
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 3498 }
[ 2830, 3393, 2120, 4887, 1155, 353, 8840, 836, 8, 341, 7727, 11, 1848, 1669, 6505, 25255, 1155, 340, 17957, 35699, 1155, 11, 1848, 692, 18185, 37302, 1669, 3056, 1235, 341, 197, 21297, 256, 914, 198, 197, 66588, 220, 914, 198, 197, 804...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewInMemory(t *testing.T) { t.Parallel() tests := []struct { bl jwt.Blacklister }{ {containable.NewInMemory()}, {containable.NewInMemory()}, } for i, test := range tests { id := []byte(`eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9`) assert.NoError(t, test.bl.Set(id, time.Second*1), "Index %d", i) assert.NoError(t, test.bl.Set(appendTo(id, "2"), time.Second*2), "Index %d", i) assert.True(t, test.bl.Has(id), "Index %d", i) time.Sleep(time.Second * 3) assert.NoError(t, test.bl.Set(appendTo(id, "3"), time.Second*2), "Index %d", i) assert.False(t, test.bl.Has(id), "Index %d", i) assert.False(t, test.bl.Has(appendTo(id, "2")), "Index %d", i) assert.False(t, test.bl.Has(id), "Index %d", i) assert.True(t, test.bl.Has(appendTo(id, "3")), "Index %d", i) } }
explode_data.jsonl/42545
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 375 }
[ 2830, 3393, 3564, 641, 10642, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 78216, 1669, 3056, 1235, 341, 197, 96421, 24589, 28829, 75, 1571, 198, 197, 59403, 197, 197, 90, 52095, 480, 7121, 641, 10642, 78108, 197, 197, 90, 52...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUTXOID(t *testing.T) { c := codec.NewDefault() utxoID := UTXOID{ TxID: ids.ID{ 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, }, OutputIndex: 0x20212223, } if err := utxoID.Verify(); err != nil { t.Fatal(err) } bytes, err := c.Marshal(&utxoID) if err != nil { t.Fatal(err) } newUTXOID := UTXOID{} if err := c.Unmarshal(bytes, &newUTXOID); err != nil { t.Fatal(err) } if err := newUTXOID.Verify(); err != nil { t.Fatal(err) } if utxoID.InputID() != newUTXOID.InputID() { t.Fatalf("Parsing returned the wrong UTXO ID") } }
explode_data.jsonl/13787
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 416 }
[ 2830, 3393, 1381, 55, 29805, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 34647, 7121, 3675, 2822, 197, 332, 40822, 915, 1669, 547, 22867, 29805, 515, 197, 10261, 87, 915, 25, 14151, 9910, 515, 298, 197, 15, 87, 15, 15, 11, 220, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestRuleInsertValues(t *testing.T) { common.Log.Debug("Entering function: %s", common.GetFunctionName()) sqls := [][]string{ { `INSERT INTO tb VALUES (1), (2)`, `REPLACE INTO tb VALUES (1), (2)`, }, { `INSERT INTO tb VALUES (1)`, }, } oldMaxValueCount := common.Config.MaxValueCount common.Config.MaxValueCount = 1 for _, sql := range sqls[0] { q, err := NewQuery4Audit(sql) if err == nil { rule := q.RuleInsertValues() if rule.Item != "ARG.012" { t.Error("Rule not match:", rule.Item, "Expect : ARG.012") } } else { t.Error("sqlparser.Parse Error:", err) } } for _, sql := range sqls[1] { q, err := NewQuery4Audit(sql) if err == nil { rule := q.RuleInsertValues() if rule.Item != "OK" { t.Error("Rule not match:", rule.Item, "Expect : OK") } } else { t.Error("sqlparser.Parse Error:", err) } } common.Config.MaxValueCount = oldMaxValueCount common.Log.Debug("Exiting function: %s", common.GetFunctionName()) }
explode_data.jsonl/76810
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 430 }
[ 2830, 3393, 11337, 13780, 6227, 1155, 353, 8840, 836, 8, 341, 83825, 5247, 20345, 445, 82867, 729, 25, 1018, 82, 497, 4185, 2234, 5152, 675, 2398, 30633, 82, 1669, 52931, 917, 515, 197, 197, 515, 298, 197, 63, 12698, 12496, 16363, 147...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestGet(t *testing.T) { conf, err := newTestConfig() if err != nil { t.Fatal(err) } saveTestConfig(t, conf) defer cleanTestConfig(t) conf2, err := TryLoadFromDisk() if err != nil { t.Fatal(err) } if diff := cmp.Diff(conf, conf2); diff != "" { t.Fatal(diff) } }
explode_data.jsonl/20979
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 127 }
[ 2830, 3393, 1949, 1155, 353, 8840, 836, 8, 341, 67850, 11, 1848, 1669, 501, 2271, 2648, 741, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 49230, 2271, 2648, 1155, 11, 2335, 340, 16867, 4240, 2271, 2648, 1155, 692,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestCheckValidHTTP2Request(t *testing.T) { tests := []struct { h http.Header want error }{ { h: http.Header{"Te": {"trailers"}}, want: nil, }, { h: http.Header{"Te": {"trailers", "bogus"}}, want: errors.New(`request header "TE" may only be "trailers" in HTTP/2`), }, { h: http.Header{"Foo": {""}}, want: nil, }, { h: http.Header{"Connection": {""}}, want: errors.New(`request header "Connection" is not valid in HTTP/2`), }, { h: http.Header{"Proxy-Connection": {""}}, want: errors.New(`request header "Proxy-Connection" is not valid in HTTP/2`), }, { h: http.Header{"Keep-Alive": {""}}, want: errors.New(`request header "Keep-Alive" is not valid in HTTP/2`), }, { h: http.Header{"Upgrade": {""}}, want: errors.New(`request header "Upgrade" is not valid in HTTP/2`), }, } for i, tt := range tests { got := checkValidHTTP2RequestHeaders(tt.h) if !reflect.DeepEqual(got, tt.want) { t.Errorf("%d. checkValidHTTP2Request = %v; want %v", i, got, tt.want) } } }
explode_data.jsonl/71699
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 485 }
[ 2830, 3393, 3973, 4088, 9230, 17, 1900, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 9598, 262, 1758, 15753, 198, 197, 50780, 1465, 198, 197, 59403, 197, 197, 515, 298, 9598, 25, 262, 1758, 15753, 4913, 6639, 788, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestVerifyClient(t *testing.T) { info := &certInfo{ CommonName: "test", Curve: "P256", } cfg, err := info.CreateCertConfig() if err != nil { t.Error(err) return } cfg.VerifyClient = true filterChains := []v2.FilterChain{ { TLS: *cfg, }, } lc := &v2.Listener{} lc.FilterChains = filterChains ctxMng, err := NewTLSServerContextManager(lc, nil, log.StartLogger) if err != nil { t.Errorf("create context manager failed %v", err) return } server := MockServer{ Mng: ctxMng, t: t, } server.GoListenAndServe(t) defer server.Close() time.Sleep(time.Second) //wait server start clientConfigs := []*v2.TLSConfig{ // Verify Server { Status: true, CACert: cfg.CACert, CertChain: cfg.CertChain, PrivateKey: cfg.PrivateKey, ServerName: "127.0.0.1", }, // Skip Verify Server { Status: true, CertChain: cfg.CertChain, PrivateKey: cfg.PrivateKey, InsecureSkip: true, }, } for i, cfg := range clientConfigs { cltMng, err := NewTLSClientContextManager(cfg, nil) if err != nil { t.Errorf("#%d create client context manager failed %v", i, err) continue } resp, err := MockClient(t, server.Addr, cltMng) if err != nil { t.Errorf("request server error %v", err) continue } ioutil.ReadAll(resp.Body) resp.Body.Close() } cfg = &v2.TLSConfig{ Status: true, ServerName: "127.0.0.1", InsecureSkip: true, } cltMng, err := NewTLSClientContextManager(cfg, nil) if err != nil { t.Errorf("create client context manager failed %v", err) return } resp, err := MockClient(t, server.Addr, cltMng) // expected bad certificate if err == nil { ioutil.ReadAll(resp.Body) resp.Body.Close() t.Errorf("server should verify client certificate") return } }
explode_data.jsonl/1521
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 788 }
[ 2830, 3393, 32627, 2959, 1155, 353, 8840, 836, 8, 341, 27043, 1669, 609, 12246, 1731, 515, 197, 90580, 675, 25, 330, 1944, 756, 197, 6258, 73047, 25, 414, 330, 47, 17, 20, 21, 756, 197, 532, 50286, 11, 1848, 1669, 3546, 7251, 36934,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func Test_GkeAbacEnabled(t *testing.T) { var tests = []struct { name string source string mustIncludeResultCode scanner.RuleID mustExcludeResultCode scanner.RuleID }{ { name: "check google_container_cluster with enable_legacy_abac set to true", source: ` resource "google_container_cluster" "gke" { enable_legacy_abac = "true" }`, mustIncludeResultCode: checks.GkeAbacEnabled, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { results := scanSource(test.source) assertCheckCode(t, test.mustIncludeResultCode, test.mustExcludeResultCode, results) }) } }
explode_data.jsonl/22948
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 269 }
[ 2830, 3393, 2646, 440, 5830, 580, 5462, 1155, 353, 8840, 836, 8, 1476, 2405, 7032, 284, 3056, 1235, 341, 197, 11609, 1698, 914, 198, 197, 47418, 394, 914, 198, 197, 2109, 590, 22283, 2077, 2078, 20775, 63961, 915, 198, 197, 2109, 590,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInstrumentationLibraryMetricsSlice_RemoveIf(t *testing.T) { // Test RemoveIf on empty slice emptySlice := NewInstrumentationLibraryMetricsSlice() emptySlice.RemoveIf(func(el InstrumentationLibraryMetrics) bool { t.Fail() return false }) // Test RemoveIf filtered := generateTestInstrumentationLibraryMetricsSlice() pos := 0 filtered.RemoveIf(func(el InstrumentationLibraryMetrics) bool { pos++ return pos%3 == 0 }) assert.Equal(t, 5, filtered.Len()) }
explode_data.jsonl/32676
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 161 }
[ 2830, 3393, 56324, 367, 16915, 27328, 33236, 66843, 2679, 1155, 353, 8840, 836, 8, 341, 197, 322, 3393, 10783, 2679, 389, 4287, 15983, 198, 197, 3194, 33236, 1669, 1532, 56324, 367, 16915, 27328, 33236, 741, 197, 3194, 33236, 13270, 2679,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRollDPoS_convertToConsensusEvt(t *testing.T) { t.Parallel() ctrl := gomock.NewController(t) defer ctrl.Finish() r, err := NewRollDPoSBuilder(). SetConfig(config.RollDPoS{}). SetAddr(newTestAddr()). SetBlockchain(mock_blockchain.NewMockBlockchain(ctrl)). SetActPool(mock_actpool.NewMockActPool(ctrl)). SetP2P(mock_network.NewMockOverlay(ctrl)). Build() assert.NoError(t, err) assert.NotNil(t, r) // Test propose msg addr := newTestAddr() transfer, err := action.NewTransfer(1, big.NewInt(100), "src", "dst", []byte{}, testutil.TestGasLimit, big.NewInt(10)) require.NoError(t, err) selfPubKey := testaddress.Addrinfo["producer"].PublicKey selfPubKeyHash := keypair.HashPubKey(selfPubKey) address := address.New(config.Default.Chain.ID, selfPubKeyHash[:]) vote, err := action.NewVote(2, address.IotxAddress(), address.IotxAddress(), testutil.TestGasLimit, big.NewInt(10)) require.NoError(t, err) var prevHash hash.Hash32B blk := blockchain.NewBlock( 1, 1, prevHash, testutil.TimestampNow(), selfPubKey, []action.Action{transfer, vote}, ) roundNum := uint32(0) pMsg := iproto.ProposePb{ Block: blk.ConvertToBlockPb(), Proposer: addr.RawAddress, Round: roundNum, } pEvt, err := r.cfsm.newProposeBlkEvtFromProposePb(&pMsg) assert.NoError(t, err) assert.NotNil(t, pEvt) assert.NotNil(t, pEvt.block) // Test proposal endorse msg blkHash := blk.HashBlock() en := endorsement.NewEndorsement( endorsement.NewConsensusVote( blkHash, blk.Height(), roundNum, endorsement.PROPOSAL, ), addr, ) msg := en.ToProtoMsg() eEvt, err := r.cfsm.newEndorseEvtWithEndorsePb(msg) assert.NoError(t, err) assert.NotNil(t, eEvt) // Test commit endorse msg en = endorsement.NewEndorsement( endorsement.NewConsensusVote( blkHash, blk.Height(), roundNum, endorsement.LOCK, ), addr, ) msg = en.ToProtoMsg() eEvt, err = r.cfsm.newEndorseEvtWithEndorsePb(msg) assert.NoError(t, err) assert.NotNil(t, eEvt) }
explode_data.jsonl/48878
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 861 }
[ 2830, 3393, 32355, 10298, 72743, 34910, 1249, 15220, 13626, 89120, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 7000, 11, 1848, 1669, 1532, 3235...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSumPerKeyWithPartitionsNoNoiseInt(t *testing.T) { for _, tc := range []struct { minValue float64 maxValue float64 lInfSensitivity float64 inMemory bool }{ { minValue: 1.0, maxValue: 3.0, lInfSensitivity: 3.0, inMemory: false, }, { minValue: 1.0, maxValue: 3.0, lInfSensitivity: 3.0, inMemory: true, }, { minValue: 0.0, maxValue: 2.0, lInfSensitivity: 2.0, inMemory: false, }, { minValue: 0.0, maxValue: 2.0, lInfSensitivity: 2.0, inMemory: true, }, { minValue: -10.0, maxValue: 10.0, lInfSensitivity: 10.0, inMemory: false, }, { minValue: -10.0, maxValue: 10.0, lInfSensitivity: 10.0, inMemory: true, }, } { // ID:1 contributes to 8 partitions, only 3 of which are public partitions. So none // should be dropped with maxPartitionsContributed=3. // Tests that cross-partition contribution bounding happens after non-public partitions are dropped. triples := testutils.ConcatenateTriplesWithIntValue( testutils.MakeSampleTripleWithIntValue(7, 0), testutils.MakeSampleTripleWithIntValue(58, 1), testutils.MakeSampleTripleWithIntValue(99, 2), testutils.MakeSampleTripleWithIntValue(1, 5), testutils.MakeSampleTripleWithIntValue(1, 6), testutils.MakeSampleTripleWithIntValue(1, 7), testutils.MakeSampleTripleWithIntValue(1, 8), testutils.MakeSampleTripleWithIntValue(1, 9)) publicPartitionsSlice := []int{0, 2, 5, 10, 11} // Keep partitions 0, 2 and 5. // drop partition 6 to 9. // Add partitions 10 and 11. result := []testutils.TestInt64Metric{ {0, 7}, {2, 99}, {5, 1}, {10, 0}, {11, 0}, } p, s, col, want := ptest.CreateList2(triples, result) col = beam.ParDo(s, testutils.ExtractIDFromTripleWithIntValue, col) var publicPartitions interface{} if tc.inMemory { publicPartitions = publicPartitionsSlice } else { publicPartitions = beam.CreateList(s, publicPartitionsSlice) } // We have ε=50, δ=0, and l1Sensitivity=3*lInfSensitivity, to scale the noise with different MinValues and MaxValues. epsilon, delta, k, l1Sensitivity := 50.0, 0.0, 25.0, 3.0*tc.lInfSensitivity pcol := MakePrivate(s, col, NewPrivacySpec(epsilon, delta)) pcol = ParDo(s, testutils.TripleWithIntValueToKV, pcol) sumParams := SumParams{MaxPartitionsContributed: 3, MinValue: tc.minValue, MaxValue: tc.maxValue, NoiseKind: LaplaceNoise{}, PublicPartitions: publicPartitions} got := SumPerKey(s, pcol, sumParams) want = beam.ParDo(s, testutils.Int64MetricToKV, want) if err := testutils.ApproxEqualsKVInt64(s, got, want, testutils.RoundedLaplaceTolerance(k, l1Sensitivity, epsilon)); err != nil { t.Fatalf("TestSumPerKeyWithPartitionsNoNoiseInt test case=+%v: %v", tc, err) } if err := ptest.Run(p); err != nil { t.Errorf("TestSumPerKeyWithPartitionsNoNoiseInt test case=+%v: SumPerKey(%v) = %v, expected %v: %v", tc, col, got, want, err) } } }
explode_data.jsonl/42955
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1381 }
[ 2830, 3393, 9190, 3889, 1592, 2354, 5800, 5930, 2753, 61819, 1072, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 25320, 1130, 286, 2224, 21, 19, 198, 197, 22543, 1130, 286, 2224, 21, 19, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestState_ChannelsByUser(t *testing.T) { t.Parallel() st := setupNewState() if got := st.ChannelsByUser(users[0]); got != nil { t.Errorf("Expected: %v to be nil.", got) } st.addUser(users[0]) st.addChannel(channels[0]) st.addChannel(channels[1]) st.addToChannel(users[0], channels[0]) st.addToChannel(users[0], channels[1]) if got, exp := len(st.ChannelsByUser(users[0])), 2; exp != got { t.Errorf("Expected: %v, got: %v", exp, got) } for _, uc := range st.ChannelsByUser(users[0]) { has := false for _, channel := range channels { if channel == uc { has = true break } } if got, exp := has, true; exp != got { t.Errorf("Expected: %v, got: %v", exp, got) } } }
explode_data.jsonl/32088
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 313 }
[ 2830, 3393, 1397, 27588, 6680, 1359, 1474, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 18388, 1669, 6505, 3564, 1397, 741, 743, 2684, 1669, 357, 6353, 6680, 1359, 1474, 35438, 58, 15, 14979, 2684, 961, 2092, 341, 197, 3244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestServerClient(t *testing.T) { m1, _ := multiaddr.NewMultiaddr("/ip4/127.0.0.1/tcp/10000") m2, _ := multiaddr.NewMultiaddr("/ip4/127.0.0.1/tcp/10001") srvHost := newHost(t, m1) clientHost := newHost(t, m2) defer srvHost.Close() defer clientHost.Close() srvHost.Peerstore().AddAddrs(clientHost.ID(), clientHost.Addrs(), peerstore.PermanentAddrTTL) clientHost.Peerstore().AddAddrs(srvHost.ID(), srvHost.Addrs(), peerstore.PermanentAddrTTL) listener, err := gostream.Listen(srvHost, "/testiti-test") if err != nil { t.Fatal(err) } defer listener.Close() go func() { http.HandleFunc("/hello", func(w http.ResponseWriter, r *http.Request) { defer r.Body.Close() body, err := ioutil.ReadAll(r.Body) if err != nil { http.Error(w, err.Error(), 500) return } resp := fmt.Sprintf("Hi %s!", body) w.Write([]byte(resp)) }) server := &http.Server{} server.Serve(listener) }() tr := &http.Transport{} tr.RegisterProtocol("libp2p", NewTransport(clientHost, ProtocolOption("/testiti-test"))) client := &http.Client{Transport: tr} buf := bytes.NewBufferString("Hector") res, err := client.Post(fmt.Sprintf("libp2p://%s/hello", srvHost.ID().Pretty()), "text/plain", buf) if err != nil { t.Fatal(err) } defer res.Body.Close() text, err := ioutil.ReadAll(res.Body) if err != nil { t.Fatal(err) } if string(text) != "Hi Hector!" { t.Errorf("expected Hi Hector! but got %s", text) } t.Log(string(text)) }
explode_data.jsonl/56647
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 628 }
[ 2830, 3393, 5475, 2959, 1155, 353, 8840, 836, 8, 341, 2109, 16, 11, 716, 1669, 7299, 6214, 7121, 20358, 6214, 4283, 573, 19, 14, 16, 17, 22, 13, 15, 13, 15, 13, 16, 95958, 14, 16, 15, 15, 15, 15, 1138, 2109, 17, 11, 716, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestLeadingZeros(t *testing.T) { var x Word = _B >> 1 for i := 0; i <= _W; i++ { if int(leadingZeros(x)) != i { t.Errorf("failed at %x: got %d want %d", x, leadingZeros(x), i) } x >>= 1 } }
explode_data.jsonl/2190
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 101 }
[ 2830, 3393, 69750, 57, 6264, 1155, 353, 8840, 836, 8, 341, 2405, 856, 9322, 284, 716, 33, 3578, 220, 16, 198, 2023, 600, 1669, 220, 15, 26, 600, 2651, 716, 54, 26, 600, 1027, 341, 197, 743, 526, 7, 20654, 57, 6264, 2075, 593, 96...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestInvalidDuration(t *testing.T) { os.Setenv("DURATION", "should-be-a-valid-duration") defer os.Clearenv() cfg := Config{} assert.Error(t, env.Parse(&cfg)) }
explode_data.jsonl/7487
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 7928, 12945, 1155, 353, 8840, 836, 8, 341, 25078, 4202, 3160, 445, 35, 28328, 497, 330, 5445, 15150, 7409, 84810, 64383, 1138, 16867, 2643, 727, 273, 9151, 85, 2822, 50286, 1669, 5532, 16094, 6948, 6141, 1155, 11, 6105, 8937...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSokuonN(t *testing.T) { const want = "nnanninnunnenno" for _, v := range []string{"っなっにっぬっねっの", "ッナッニッヌッネッノ"} { got, err := KanaToRomaji(v) assert.Equal(t, want, got) assert.Nil(t, err) } }
explode_data.jsonl/11349
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 111 }
[ 2830, 3393, 50, 16493, 263, 45, 1155, 353, 8840, 836, 8, 341, 4777, 1366, 284, 330, 7370, 1020, 6130, 359, 12495, 2152, 1837, 2023, 8358, 348, 1669, 2088, 3056, 917, 4913, 41791, 25770, 41791, 19655, 125388, 105, 41791, 124381, 41791, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestEngineRoot(t *testing.T) { tmp, err := ioutil.TempDir("", "docker-test-TestEngineCreateDir") if err != nil { t.Fatal(err) } defer os.RemoveAll(tmp) dir := path.Join(tmp, "dir") eng, err := New(dir) if err != nil { t.Fatal(err) } if st, err := os.Stat(dir); err != nil { t.Fatal(err) } else if !st.IsDir() { t.Fatalf("engine.New() created something other than a directory at %s", dir) } if r := eng.Root(); r != dir { t.Fatalf("Expected: %v\nReceived: %v", dir, r) } }
explode_data.jsonl/45350
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 219 }
[ 2830, 3393, 4571, 8439, 1155, 353, 8840, 836, 8, 341, 20082, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 28648, 16839, 12, 2271, 4571, 4021, 6184, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 16867, 2643, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestJobRunsController_Create_InvalidID(t *testing.T) { t.Parallel() ethClient, _, assertMocksCalled := cltest.NewEthMocksWithStartupAssertions(t) defer assertMocksCalled() app, cleanup := cltest.NewApplication(t, ethClient, ) defer cleanup() app.Start() client := app.NewHTTPClient() resp, cleanup := client.Post("/v2/specs/garbageID/runs", bytes.NewBuffer([]byte{})) defer cleanup() assert.Equal(t, http.StatusUnprocessableEntity, resp.StatusCode, "Response should be unprocessable entity") }
explode_data.jsonl/49850
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 182 }
[ 2830, 3393, 12245, 73920, 2051, 34325, 62, 7928, 915, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 769, 2959, 11, 8358, 2060, 72577, 20960, 1669, 1185, 1944, 7121, 65390, 11571, 16056, 39076, 90206, 1155, 340, 16867, 2060, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUserWillLogInIn_Passed(t *testing.T) { th := Setup(t).InitBasic() defer th.TearDown() err := th.App.UpdatePassword(th.BasicUser, "hunter2") if err != nil { t.Errorf("Error updating user password: %s", err) } tearDown, _, _ := SetAppEnvironmentWithPlugins(t, []string{ ` package main import ( "github.com/blastbao/mattermost-server/plugin" "github.com/blastbao/mattermost-server/model" ) type MyPlugin struct { plugin.MattermostPlugin } func (p *MyPlugin) UserWillLogIn(c *plugin.Context, user *model.User) string { return "" } func main() { plugin.ClientMain(&MyPlugin{}) } `}, th.App, th.App.NewPluginAPI) defer tearDown() r := &http.Request{} w := httptest.NewRecorder() session, err := th.App.DoLogin(w, r, th.BasicUser, "") if err != nil { t.Errorf("Expected nil, got %s", err) } if session.UserId != th.BasicUser.Id { t.Errorf("Expected %s, got %s", th.BasicUser.Id, session.UserId) } }
explode_data.jsonl/30309
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 402 }
[ 2830, 3393, 1474, 9945, 2201, 641, 641, 1088, 59004, 1155, 353, 8840, 836, 8, 341, 70479, 1669, 18626, 1155, 568, 3803, 15944, 741, 16867, 270, 836, 682, 4454, 2822, 9859, 1669, 270, 5105, 16689, 4876, 24365, 48868, 1474, 11, 330, 98272...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestReadImportSegment(t *testing.T) { exp := &segments.ImportSegment{ Module: "abc", Name: "ABC", Desc: &segments.ImportDesc{Kind: 0, TypeIndexPtr: utils.Uint32Ptr(10)}, } buf := []byte{byte(len(exp.Module))} buf = append(buf, exp.Module...) buf = append(buf, byte(len(exp.Name))) buf = append(buf, exp.Name...) buf = append(buf, 0x00, 0x0a) actual, err := segments.ReadImportSegment(bytes.NewReader(buf)) if err != nil { t.Fail() } if !reflect.DeepEqual(exp, actual) { t.Fail() } }
explode_data.jsonl/69291
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 217 }
[ 2830, 3393, 4418, 11511, 21086, 1155, 353, 8840, 836, 8, 341, 48558, 1669, 609, 56829, 67275, 21086, 515, 197, 197, 3332, 25, 330, 13683, 756, 197, 21297, 25, 256, 330, 25411, 756, 197, 10957, 3300, 25, 256, 609, 56829, 67275, 11065, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestPrepareProposalRemoveTxs(t *testing.T) { const height = 2 ctx, cancel := context.WithCancel(context.Background()) defer cancel() logger := log.NewNopLogger() eventBus := eventbus.NewDefault(logger) require.NoError(t, eventBus.Start(ctx)) state, stateDB, privVals := makeState(t, 1, height) stateStore := sm.NewStore(stateDB) evpool := &mocks.EvidencePool{} evpool.On("PendingEvidence", mock.Anything).Return([]types.Evidence{}, int64(0)) txs := factory.MakeNTxs(height, 10) mp := &mpmocks.Mempool{} mp.On("ReapMaxBytesMaxGas", mock.Anything, mock.Anything).Return(types.Txs(txs)) trs := txsToTxRecords(types.Txs(txs)) trs[0].Action = abci.TxRecord_REMOVED trs[1].Action = abci.TxRecord_REMOVED mp.On("RemoveTxByKey", mock.Anything).Return(nil).Twice() app := abcimocks.NewApplication(t) app.On("PrepareProposal", mock.Anything, mock.Anything).Return(&abci.ResponsePrepareProposal{ TxRecords: trs, }, nil) cc := abciclient.NewLocalClient(logger, app) proxyApp := proxy.New(cc, logger, proxy.NopMetrics()) err := proxyApp.Start(ctx) require.NoError(t, err) blockExec := sm.NewBlockExecutor( stateStore, logger, proxyApp, mp, evpool, nil, eventBus, sm.NopMetrics(), ) pa, _ := state.Validators.GetByIndex(0) commit, _ := makeValidCommit(ctx, t, height, types.BlockID{}, state.Validators, privVals) block, err := blockExec.CreateProposalBlock(ctx, height, state, commit, pa) require.NoError(t, err) require.Len(t, block.Data.Txs.ToSliceOfBytes(), len(trs)-2) require.Equal(t, -1, block.Data.Txs.Index(types.Tx(trs[0].Tx))) require.Equal(t, -1, block.Data.Txs.Index(types.Tx(trs[1].Tx))) mp.AssertCalled(t, "RemoveTxByKey", types.Tx(trs[0].Tx).Key()) mp.AssertCalled(t, "RemoveTxByKey", types.Tx(trs[1].Tx).Key()) mp.AssertExpectations(t) }
explode_data.jsonl/49121
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 734 }
[ 2830, 3393, 50590, 98637, 13021, 51, 18561, 1155, 353, 8840, 836, 8, 341, 4777, 2608, 284, 220, 17, 198, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 16867, 9121, 2822, 17060, 1669, 1487, 7121, 45, 453, 7395, 741, 28302,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIncompatibleWithNonShardedAlgo(t *testing.T) { i1 := placement.NewInstance().SetID("i1").SetEndpoint("e1").SetWeight(1) i2 := placement.NewInstance().SetID("i2").SetEndpoint("e2").SetWeight(1) i3 := placement.NewInstance().SetID("i3").SetEndpoint("e3").SetWeight(1) i4 := placement.NewInstance().SetID("i4").SetEndpoint("e4").SetWeight(1) p, err := newShardedAlgorithm(placement.NewOptions()).InitialPlacement([]placement.Instance{i1, i2}, []uint32{1, 2}, 1) assert.NoError(t, err) a := newNonShardedAlgorithm() _, err = a.AddReplica(p) assert.Error(t, err) assert.Equal(t, errInCompatibleWithNonShardedAlgo, err) _, err = a.AddInstances(p, []placement.Instance{i3}) assert.Error(t, err) assert.Equal(t, errInCompatibleWithNonShardedAlgo, err) _, err = a.RemoveInstances(p, []string{"i1"}) assert.Error(t, err) assert.Equal(t, errInCompatibleWithNonShardedAlgo, err) _, err = a.ReplaceInstances(p, []string{"i1"}, []placement.Instance{i3, i4}) assert.Error(t, err) assert.Equal(t, errInCompatibleWithNonShardedAlgo, err) }
explode_data.jsonl/28019
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 418 }
[ 2830, 3393, 641, 34842, 2354, 8121, 2016, 20958, 2101, 3346, 1155, 353, 8840, 836, 8, 341, 8230, 16, 1669, 21448, 7121, 2523, 1005, 1649, 915, 445, 72, 16, 1827, 1649, 27380, 445, 68, 16, 1827, 1649, 8295, 7, 16, 340, 8230, 17, 1669...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServicedCLI_CmdHostList_all(t *testing.T) { expected, err := DefaultHostAPITest.GetHosts() if err != nil { t.Fatal(err) } var actual []*host.Host output := pipe(InitHostAPITest, "serviced", "host", "list", "--verbose") if err := json.Unmarshal(output, &actual); err != nil { t.Fatalf("error unmarshaling resource: %s", err) } // Did you remember to update Host.Equals? if len(actual) != len(expected) { t.Fatalf("\ngot:\n%+v\nwant:\n%+v", actual, expected) } for i, _ := range actual { if !actual[i].Equals(expected[i]) { t.Fatalf("\ngot:\n%+v\nwant:\n%+v", actual, expected) } } }
explode_data.jsonl/37379
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 259 }
[ 2830, 3393, 39159, 7572, 63959, 98253, 9296, 852, 5705, 1155, 353, 8840, 836, 8, 341, 42400, 11, 1848, 1669, 7899, 9296, 2537, 952, 477, 2234, 9296, 82, 741, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 630, 2405, 5042...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestTxInvalidCredential(t *testing.T) { ctx := NewContext(t) c, m := setupCodec() if err := c.RegisterType(&avax.TestVerifiable{}); err != nil { t.Fatal(err) } tx := &Tx{ UnsignedTx: &BaseTx{BaseTx: avax.BaseTx{ NetworkID: networkID, BlockchainID: chainID, Ins: []*avax.TransferableInput{{ UTXOID: avax.UTXOID{ TxID: ids.Empty, OutputIndex: 0, }, Asset: avax.Asset{ID: assetID}, In: &secp256k1fx.TransferInput{ Amt: 20 * units.KiloAvax, Input: secp256k1fx.Input{ SigIndices: []uint32{ 0, }, }, }, }}, }}, Creds: []*FxCredential{{Verifiable: &avax.TestVerifiable{Err: errors.New("")}}}, } if err := tx.SignSECP256K1Fx(m, nil); err != nil { t.Fatal(err) } if err := tx.SyntacticVerify(ctx, m, ids.Empty, 0, 0, 1); err == nil { t.Fatalf("Tx should have failed due to an invalid credential") } }
explode_data.jsonl/48913
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 450 }
[ 2830, 3393, 31584, 7928, 48265, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 1532, 1972, 1155, 340, 1444, 11, 296, 1669, 6505, 36913, 741, 743, 1848, 1669, 272, 81703, 2099, 10914, 8787, 10141, 22619, 6257, 1215, 1848, 961, 2092, 341, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestIntegration_SyncJobRuns(t *testing.T) { t.Parallel() wsserver, wsserverCleanup := cltest.NewEventWebSocketServer(t) defer wsserverCleanup() config, _ := cltest.NewConfig(t) config.Set("EXPLORER_URL", wsserver.URL.String()) rpcClient, gethClient, _, assertMockCalls := cltest.NewEthMocksWithStartupAssertions(t) defer assertMockCalls() app, cleanup := cltest.NewApplicationWithConfig(t, config, eth.NewClientWith(rpcClient, gethClient), ) kst := new(mocks.KeyStoreInterface) app.Store.KeyStore = kst defer cleanup() app.InstantClock() require.NoError(t, app.Start()) j := cltest.FixtureCreateJobViaWeb(t, app, "fixtures/web/run_at_job.json") cltest.CallbackOrTimeout(t, "stats pusher connects", func() { <-wsserver.Connected }, 5*time.Second) var message string cltest.CallbackOrTimeout(t, "stats pusher sends", func() { message = <-wsserver.ReceivedText }, 5*time.Second) var run models.JobRun err := json.Unmarshal([]byte(message), &run) require.NoError(t, err) assert.Equal(t, j.ID, run.JobSpecID) cltest.WaitForJobRunToComplete(t, app.Store, run) kst.AssertExpectations(t) }
explode_data.jsonl/75900
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 439 }
[ 2830, 3393, 52464, 1098, 1721, 12245, 73920, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 6692, 778, 2836, 11, 289, 778, 2836, 67335, 1669, 1185, 1944, 7121, 1556, 61238, 5475, 1155, 340, 16867, 289, 778, 2836, 67335, 2822, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateProfileImage(t *testing.T) { b, err := CreateProfileImage("Corey Hulen", "eo1zkdr96pdj98pjmq8zy35wba", "nunito-bold.ttf") if err != nil { t.Fatal(err) } rdr := bytes.NewReader(b) img, _, err2 := image.Decode(rdr) if err2 != nil { t.Fatal(err) } colorful := color.RGBA{116, 49, 196, 255} if img.At(1, 1) != colorful { t.Fatal("Failed to create correct color") } }
explode_data.jsonl/31411
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 182 }
[ 2830, 3393, 4021, 8526, 1906, 1155, 353, 8840, 836, 8, 341, 2233, 11, 1848, 1669, 4230, 8526, 1906, 445, 5386, 88, 472, 94211, 497, 330, 24612, 16, 40696, 3612, 24, 21, 15360, 73, 24, 23, 91116, 27674, 23, 4246, 18, 20, 86, 4645, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestFiltersFromQueryParams(t *testing.T) { type args struct { in interface{} } tests := []struct { name string args args want []octant.Filter wantErr bool }{ { name: "single filter", args: args{in: "foo:bar"}, want: []octant.Filter{{ Key: "foo", Value: "bar", }}, wantErr: false, }, { name: "multiple filters", args: args{in: []interface{}{"foo:bar", "baz:qux"}}, want: []octant.Filter{ {Key: "foo", Value: "bar"}, {Key: "baz", Value: "qux"}, }, wantErr: false, }, { name: "unknown input", args: args{in: 1}, want: nil, wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := api.FiltersFromQueryParams(tt.args.in) if tt.wantErr { require.Error(t, err) return } require.NoError(t, err) assert.Equal(t, tt.want, got) }) } }
explode_data.jsonl/48849
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 456 }
[ 2830, 3393, 28351, 3830, 2859, 4870, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 17430, 3749, 16094, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 31215, 262, 2827, 198, 197, 50780, 262, 3056, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStart(t *testing.T) { keyTwinUpdate := keyTwinUpdateFunc() contentKeyTwin, _ := json.Marshal(keyTwinUpdate) commChan := make(map[string]chan interface{}) commChannel := make(chan interface{}) commChan[dtcommon.CommModule] = commChannel context := dtcontext.DTContext{ DeviceList: &sync.Map{}, DeviceMutex: &sync.Map{}, Mutex: &sync.RWMutex{}, CommChan: commChan, ModulesHealth: &sync.Map{}, } var testMutex sync.Mutex context.DeviceMutex.Store(deviceB, &testMutex) msgAttr := make(map[string]*dttype.MsgAttr) device := dttype.Device{ ID: "id1", Name: deviceB, Attributes: msgAttr, Twin: keyTwinUpdate.Twin, } context.DeviceList.Store(deviceB, &device) msg := &dttype.DTMessage{ Msg: &model.Message{ Header: model.MessageHeader{ ID: "id1", ParentID: "pid1", Timestamp: 0, Sync: false, }, Router: model.MessageRoute{ Source: "source", Resource: "resource", Group: "group", Operation: "op", }, Content: contentKeyTwin, }, Action: dtcommon.TwinGet, Type: dtcommon.CommModule, } msgHeartPing := "ping" msgHeartStop := "stop" receiverChannel := make(chan interface{}) heartbeatChannel := make(chan interface{}) tests := []struct { name string tw TwinWorker actionType string contentType interface{} msgType string }{ { name: "TestStart(): Case 1: ReceiverChan case when error is nil", tw: twinWorkerFunc(receiverChannel, nil, nil, context, ""), actionType: dtcommon.TwinGet, contentType: contentKeyTwin, }, { name: "TestStart(): Case 2: ReceiverChan case error log; TwinModule deal event failed, not found callback", tw: twinWorkerFunc(receiverChannel, nil, nil, context, ""), actionType: dtcommon.SendToEdge, contentType: contentKeyTwin, }, { name: "TestStart(): Case 3: ReceiverChan case error log; TwinModule deal event failed", tw: twinWorkerFunc(receiverChannel, nil, nil, context, ""), actionType: dtcommon.TwinGet, }, { name: "TestStart(): Case 4: HeartBeatChan case when error is nil", tw: twinWorkerFunc(nil, nil, heartbeatChannel, context, "Group1"), msgType: msgHeartPing, }, { name: "TestStart(): Case 5: HeartBeatChan case when error is not nil", tw: twinWorkerFunc(nil, nil, heartbeatChannel, context, "Group1"), msgType: msgHeartStop, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { go test.tw.sendMsg(msg, test.msgType, test.actionType, test.contentType) go test.tw.Start() time.Sleep(100 * time.Millisecond) message := &dttype.DTMessage{} go receiveMsg(commChannel, message) time.Sleep(100 * time.Millisecond) if (test.tw.ReceiverChan != nil) && !reflect.DeepEqual(message.Identity, msg.Identity) && !reflect.DeepEqual(message.Type, msg.Type) { t.Errorf("DTManager.TestStart() case failed: got = %v, Want = %v", message, msg) } if _, exist := context.ModulesHealth.Load("Group1"); test.tw.HeartBeatChan != nil && !exist { t.Errorf("DTManager.TestStart() case failed: HeartBeatChan received no string") } }) } }
explode_data.jsonl/30632
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1352 }
[ 2830, 3393, 3479, 1155, 353, 8840, 836, 8, 341, 23634, 51, 7526, 4289, 1669, 1376, 51, 7526, 4289, 9626, 741, 27751, 1592, 51, 7526, 11, 716, 1669, 2951, 37271, 4857, 51, 7526, 4289, 692, 197, 3621, 46019, 1669, 1281, 9147, 14032, 60,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func Test_maxProduct(t *testing.T) { testCases := []struct { Prices []int Expected int }{ { []int{7, 1, 5, 3, 6, 4}, 7, }, { []int{7, 6, 5, 4, 3, 2, 1}, 0, }, { []int{4, 5, 6, 1}, 2, }, { []int{2, 3, 7}, 5, }, { []int{2, 1, 7}, 6, }, { []int{4, 7}, 3, }, { []int{1, 7, 2, 3, 6, 7, 6, 7}, 12, }, } for _, tc := range testCases { result := maxProfit(tc.Prices) assert.Equal(t, tc.Expected, result, fmt.Sprintf("Failed for %v", tc.Prices)) } }
explode_data.jsonl/69708
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 320 }
[ 2830, 3393, 6345, 4816, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 197, 62718, 256, 3056, 396, 198, 197, 197, 18896, 526, 198, 197, 59403, 197, 197, 515, 298, 197, 1294, 396, 90, 22, 11, 220, 16, 11, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRangePresent(t *testing.T) { for _, test := range []struct { rs Ranges r Range wantPresent bool }{ { r: Range{Pos: 1, Size: 0}, rs: Ranges{}, wantPresent: true, }, { r: Range{Pos: 1, Size: 0}, rs: Ranges(nil), wantPresent: true, }, { r: Range{Pos: 0, Size: 1}, rs: Ranges{}, wantPresent: false, }, { r: Range{Pos: 0, Size: 1}, rs: Ranges(nil), wantPresent: false, }, { r: Range{Pos: 1, Size: 2}, rs: Ranges{ Range{Pos: 1, Size: 1}, }, wantPresent: false, }, { r: Range{Pos: 1, Size: 2}, rs: Ranges{ Range{Pos: 1, Size: 2}, }, wantPresent: true, }, { r: Range{Pos: 1, Size: 2}, rs: Ranges{ Range{Pos: 1, Size: 10}, }, wantPresent: true, }, { r: Range{Pos: 1, Size: 2}, rs: Ranges{ Range{Pos: 5, Size: 2}, }, wantPresent: false, }, { r: Range{Pos: 1, Size: 9}, rs: Ranges{ Range{Pos: 2, Size: 1}, Range{Pos: 4, Size: 1}, }, wantPresent: false, }, { r: Range{Pos: 2, Size: 8}, rs: Ranges{ Range{Pos: 2, Size: 1}, Range{Pos: 4, Size: 1}, }, wantPresent: false, }, { r: Range{Pos: 3, Size: 7}, rs: Ranges{ Range{Pos: 2, Size: 1}, Range{Pos: 4, Size: 1}, }, wantPresent: false, }, { r: Range{Pos: 4, Size: 6}, rs: Ranges{ Range{Pos: 2, Size: 1}, Range{Pos: 4, Size: 1}, }, wantPresent: false, }, { r: Range{Pos: 5, Size: 5}, rs: Ranges{ Range{Pos: 2, Size: 1}, Range{Pos: 4, Size: 1}, }, wantPresent: false, }, } { what := fmt.Sprintf("test r=%v, rs=%v", test.r, test.rs) checkRanges(t, test.rs, what) gotPresent := test.rs.Present(test.r) assert.Equal(t, test.wantPresent, gotPresent, what) checkRanges(t, test.rs, what) } }
explode_data.jsonl/2643
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1049 }
[ 2830, 3393, 6046, 21195, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 3056, 1235, 341, 197, 41231, 688, 431, 5520, 198, 197, 7000, 1843, 16437, 198, 197, 50780, 21195, 1807, 198, 197, 59403, 197, 197, 515, 298, 7000, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSmokeTest(t *testing.T) { tests := []struct { filename string }{ {filename: "Roboto-BoldItalic.ttf"}, {filename: "Raleway-v4020-Regular.otf"}, {filename: "open-sans-v15-latin-regular.woff"}, {filename: "Go-Regular.woff2"}, } for _, test := range tests { filename := filepath.Join("testdata", test.filename) file, err := os.Open(filename) if err != nil { t.Errorf("Failed to open %q: %s\n", filename, err) } font, err := StrictParse(file) if err != nil { t.Errorf("StrictParse(%q) err = %q, want nil", filename, err) continue } if _, err := font.WriteOTF(ioutil.Discard); err != nil { t.Errorf("WriteOTF(%q) err = %q, want nil", filename, err) continue } file.Close() } }
explode_data.jsonl/30245
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 319 }
[ 2830, 3393, 76880, 2271, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 66434, 914, 198, 197, 59403, 197, 197, 90, 8404, 25, 330, 53532, 89581, 85292, 45192, 7115, 197, 197, 90, 8404, 25, 330, 49, 1574, 3117, 8273, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestAllocateTokensToManyValidators(t *testing.T) { ctx, _, k, sk, fck := CreateTestInputDefault(t, false, 1000) sh := staking.NewHandler(sk) // create validator with 50% commission commission := staking.NewCommissionMsg(sdk.NewDecWithPrec(5, 1), sdk.NewDecWithPrec(5, 1), sdk.NewDec(0)) msg := staking.NewMsgCreateValidator(valOpAddr1, valConsPk1, sdk.NewCoin(sdk.DefaultBondDenom, sdk.NewInt(100)), staking.Description{}, commission, sdk.OneInt(), sdk.OneInt(), sdk.OneInt()) require.True(t, sh(ctx, msg).IsOK()) // create second validator with 0% commission commission = staking.NewCommissionMsg(sdk.NewDec(0), sdk.NewDec(0), sdk.NewDec(0)) msg = staking.NewMsgCreateValidator(valOpAddr2, valConsPk2, sdk.NewCoin(sdk.DefaultBondDenom, sdk.NewInt(100)), staking.Description{}, commission, sdk.OneInt(), sdk.OneInt(), sdk.OneInt()) require.True(t, sh(ctx, msg).IsOK()) abciValA := abci.Validator{ Address: valConsPk1.Address(), Power: 100, } abciValB := abci.Validator{ Address: valConsPk2.Address(), Power: 100, } // assert initial state: zero outstanding rewards, zero community pool, zero commission, zero current rewards require.True(t, k.GetValidatorOutstandingRewards(ctx, valOpAddr1).IsZero()) require.True(t, k.GetValidatorOutstandingRewards(ctx, valOpAddr2).IsZero()) require.True(t, k.GetFeePool(ctx).CommunityPool.IsZero()) require.True(t, k.GetValidatorAccumulatedCommission(ctx, valOpAddr1).IsZero()) require.True(t, k.GetValidatorAccumulatedCommission(ctx, valOpAddr2).IsZero()) require.True(t, k.GetValidatorCurrentRewards(ctx, valOpAddr1).Rewards.IsZero()) require.True(t, k.GetValidatorCurrentRewards(ctx, valOpAddr2).Rewards.IsZero()) // allocate tokens as if both had voted and second was proposer fees := sdk.Coins{ {sdk.DefaultBondDenom, sdk.NewInt(100)}, } fck.SetCollectedFees(fees) votes := []abci.VoteInfo{ { Validator: abciValA, SignedLastBlock: true, }, { Validator: abciValB, SignedLastBlock: true, }, } k.AllocateTokens(ctx, 200, 200, valConsAddr2, votes) // 98 outstanding rewards (100 less 2 to community pool) require.Equal(t, sdk.DecCoins{{sdk.DefaultBondDenom, sdk.NewDecWithPrec(465, 1)}}, k.GetValidatorOutstandingRewards(ctx, valOpAddr1)) require.Equal(t, sdk.DecCoins{{sdk.DefaultBondDenom, sdk.NewDecWithPrec(515, 1)}}, k.GetValidatorOutstandingRewards(ctx, valOpAddr2)) // 2 community pool coins require.Equal(t, sdk.DecCoins{{sdk.DefaultBondDenom, sdk.NewDec(2)}}, k.GetFeePool(ctx).CommunityPool) // 50% commission for first proposer, (0.5 * 93%) * 100 / 2 = 23.25 require.Equal(t, sdk.DecCoins{{sdk.DefaultBondDenom, sdk.NewDecWithPrec(2325, 2)}}, k.GetValidatorAccumulatedCommission(ctx, valOpAddr1)) // zero commission for second proposer require.True(t, k.GetValidatorAccumulatedCommission(ctx, valOpAddr2).IsZero()) // just staking.proportional for first proposer less commission = (0.5 * 93%) * 100 / 2 = 23.25 require.Equal(t, sdk.DecCoins{{sdk.DefaultBondDenom, sdk.NewDecWithPrec(2325, 2)}}, k.GetValidatorCurrentRewards(ctx, valOpAddr1).Rewards) // proposer reward + staking.proportional for second proposer = (5 % + 0.5 * (93%)) * 100 = 51.5 require.Equal(t, sdk.DecCoins{{sdk.DefaultBondDenom, sdk.NewDecWithPrec(515, 1)}}, k.GetValidatorCurrentRewards(ctx, valOpAddr2).Rewards) }
explode_data.jsonl/45677
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1309 }
[ 2830, 3393, 75380, 29300, 27361, 31748, 1155, 353, 8840, 836, 8, 341, 20985, 11, 8358, 595, 11, 1901, 11, 282, 377, 1669, 4230, 2271, 2505, 3675, 1155, 11, 895, 11, 220, 16, 15, 15, 15, 340, 36196, 1669, 357, 1765, 7121, 3050, 68907...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTokenAuthNon200Non401Status(t *testing.T) { var ( baseTransport = &http.Transport{} gun data.GUN = "test" ) s := httptest.NewServer(http.HandlerFunc(NotFoundTestHandler)) defer s.Close() auth, err := tokenAuth(s.URL, baseTransport, gun, readOnly) require.NoError(t, err) require.Nil(t, auth) }
explode_data.jsonl/77491
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 145 }
[ 2830, 3393, 3323, 5087, 8121, 17, 15, 15, 8121, 19, 15, 16, 2522, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 24195, 27560, 688, 284, 609, 1254, 87669, 16094, 197, 3174, 359, 1843, 821, 1224, 1861, 284, 330, 1944, 698, 197, 340, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestImportMetaNoBundle(t *testing.T) { default_suite.expectBundled(t, bundled{ files: map[string]string{ "/entry.js": ` console.log(import.meta.url, import.meta.path) `, }, entryPaths: []string{"/entry.js"}, options: config.Options{ AbsOutputFile: "/out.js", }, }) }
explode_data.jsonl/38557
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 130 }
[ 2830, 3393, 11511, 12175, 2753, 8409, 1155, 353, 8840, 836, 8, 341, 11940, 57239, 25952, 33, 1241, 832, 1155, 11, 51450, 515, 197, 74075, 25, 2415, 14032, 30953, 515, 298, 197, 3115, 4085, 2857, 788, 22074, 571, 12160, 1665, 83886, 2488...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultipleReferencedSecrets(t *testing.T) { var ( ns = "myns" serviceAccountName = "mysa" serviceAccountUID = "mysauid" token1 = "token1" token2 = "token2" ) admit := NewServiceAccount() informerFactory := informers.NewSharedInformerFactory(nil, controller.NoResyncPeriodFunc()) admit.SetExternalKubeInformerFactory(informerFactory) admit.MountServiceAccountToken = true admit.RequireAPIToken = true sa := &corev1.ServiceAccount{ ObjectMeta: metav1.ObjectMeta{ Name: serviceAccountName, UID: types.UID(serviceAccountUID), Namespace: ns, }, Secrets: []corev1.ObjectReference{ {Name: token1}, {Name: token2}, }, } informerFactory.Core().V1().ServiceAccounts().Informer().GetStore().Add(sa) // Add two tokens for the service account into the cache. informerFactory.Core().V1().Secrets().Informer().GetStore().Add(&corev1.Secret{ ObjectMeta: metav1.ObjectMeta{ Name: token2, Namespace: ns, Annotations: map[string]string{ api.ServiceAccountNameKey: serviceAccountName, api.ServiceAccountUIDKey: serviceAccountUID, }, }, Type: corev1.SecretTypeServiceAccountToken, Data: map[string][]byte{ api.ServiceAccountTokenKey: []byte("token-data"), }, }) informerFactory.Core().V1().Secrets().Informer().GetStore().Add(&corev1.Secret{ ObjectMeta: metav1.ObjectMeta{ Name: token1, Namespace: ns, Annotations: map[string]string{ api.ServiceAccountNameKey: serviceAccountName, api.ServiceAccountUIDKey: serviceAccountUID, }, }, Type: corev1.SecretTypeServiceAccountToken, Data: map[string][]byte{ api.ServiceAccountTokenKey: []byte("token-data"), }, }) pod := &api.Pod{ Spec: api.PodSpec{ ServiceAccountName: serviceAccountName, Containers: []api.Container{ {Name: "container-1"}, }, }, } attrs := admission.NewAttributesRecord(pod, nil, api.Kind("Pod").WithVersion("version"), ns, "myname", api.Resource("pods").WithVersion("version"), "", admission.Create, false, nil) if err := admit.Admit(attrs); err != nil { t.Fatal(err) } if n := len(pod.Spec.Volumes); n != 1 { t.Fatalf("expected 1 volume mount, got %d", n) } if name := pod.Spec.Volumes[0].Name; name != token1 { t.Errorf("expected first referenced secret to be mounted, got %q", name) } }
explode_data.jsonl/61356
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 928 }
[ 2830, 3393, 32089, 47447, 5767, 19773, 82, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 84041, 338, 284, 330, 76, 1872, 82, 698, 197, 52934, 7365, 675, 284, 330, 8209, 64, 698, 197, 52934, 7365, 6463, 220, 284, 330, 8209, 64, 2423...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_Option_WithSqlParameters(t *testing.T) { opt := WithSqlParameters(false) p := New(opt) assert.Equal(t, false, p.(opentracingPlugin).opt.logSqlParameters) }
explode_data.jsonl/45442
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 64 }
[ 2830, 3393, 2232, 560, 62, 2354, 8269, 9706, 1155, 353, 8840, 836, 8, 341, 64838, 1669, 3085, 8269, 9706, 3576, 340, 3223, 1669, 1532, 24539, 340, 6948, 12808, 1155, 11, 895, 11, 281, 12832, 453, 23745, 4527, 11546, 568, 2912, 1665, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestFloatFormatter(t *testing.T) { testCases := []struct { testhelper.ID ff colfmt.Float val interface{} expStr string }{ { ID: testhelper.MkID("basic"), val: 1.23, expStr: "1", }, { ID: testhelper.MkID("basic, pass nil"), expStr: "%!f(<nil>)", }, { ID: testhelper.MkID("ignore nil, pass a value"), ff: colfmt.Float{IgnoreNil: true}, val: 1.23, expStr: "1", }, { ID: testhelper.MkID("ignore nil, pass nil"), ff: colfmt.Float{IgnoreNil: true}, expStr: "", }, { ID: testhelper.MkID("with precision"), ff: colfmt.Float{Prec: 2}, val: 1.2345, expStr: "1.23", }, { ID: testhelper.MkID("with bad precision"), ff: colfmt.Float{Prec: -1}, val: 1.2345, expStr: "1", }, { ID: testhelper.MkID("with zero handling, large (just) value"), ff: colfmt.Float{ Zeroes: &colfmt.FloatZeroHandler{ Handle: true, Replace: "abcd", }, }, val: 0.50000001, expStr: "1", }, { ID: testhelper.MkID( "with zero handling, borderline value, zero precision"), ff: colfmt.Float{ Zeroes: &colfmt.FloatZeroHandler{ Handle: true, Replace: "abcd", }, }, val: 0.5, expStr: "a", }, { ID: testhelper.MkID( "with zero handling, large (just) value, non-zero precision"), ff: colfmt.Float{ Zeroes: &colfmt.FloatZeroHandler{ Handle: true, Replace: "abcd", }, Prec: 1, }, val: 0.05, expStr: "0.1", }, { ID: testhelper.MkID( "with zero handling, small value, non-zero precision"), ff: colfmt.Float{ Zeroes: &colfmt.FloatZeroHandler{ Handle: true, Replace: "abcd", }, Prec: 1, }, val: 0.04999, expStr: "abc", }, { ID: testhelper.MkID( "with zero handling, small -ve value, non-zero precision"), ff: colfmt.Float{ Zeroes: &colfmt.FloatZeroHandler{ Handle: true, Replace: "abcd", }, Prec: 1, }, val: -0.04999, expStr: "abc", }, { ID: testhelper.MkID( "with zero handling, small value, non-zero precision & width"), ff: colfmt.Float{ Zeroes: &colfmt.FloatZeroHandler{ Handle: true, Replace: "abcd", }, Prec: 1, W: 6, }, val: 0.04999, expStr: "abcd", }, { ID: testhelper.MkID( "with zero handling, zero value, as float64"), ff: colfmt.Float{ Zeroes: &colfmt.FloatZeroHandler{ Handle: true, Replace: "", }, }, val: float64(0.0), expStr: "", }, { ID: testhelper.MkID( "with zero handling, zero value, as float32"), ff: colfmt.Float{ Zeroes: &colfmt.FloatZeroHandler{ Handle: true, Replace: "", }, }, val: float32(0.0), expStr: "", }, } for _, tc := range testCases { s := tc.ff.Formatted(tc.val) testhelper.DiffString(t, tc.IDStr(), "formatted value", s, tc.expStr) } }
explode_data.jsonl/45425
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1599 }
[ 2830, 3393, 5442, 14183, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 18185, 18764, 9910, 198, 197, 67399, 257, 1375, 12501, 29794, 198, 197, 19302, 262, 3749, 16094, 197, 48558, 2580, 914, 198, 197, 59403, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRunAutoTLSServer(t *testing.T) { cfg := initTest() cfg.Core.AutoTLS.Enabled = true ctx, cancel := context.WithCancel(context.Background()) go func() { assert.NoError(t, RunHTTPServer(ctx, cfg, q)) }() defer func() { // close the server cancel() }() // have to wait for the goroutine to start and run the server // otherwise the main thread will complete time.Sleep(5 * time.Millisecond) }
explode_data.jsonl/67603
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 6727, 13253, 13470, 1220, 2836, 1155, 353, 8840, 836, 8, 341, 50286, 1669, 2930, 2271, 741, 50286, 12777, 6477, 45439, 13690, 284, 830, 198, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 30680, 2915, 368, 341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetPricetoBid(t *testing.T) { tests := []struct { spotPercentage float64 currentSpotPrice float64 currentOnDemandPrice float64 policy string want float64 }{ { spotPercentage: 50.0, currentSpotPrice: 0.0216, currentOnDemandPrice: 0.0464, policy: "aggressive", want: 0.0324, }, { spotPercentage: 79.0, currentSpotPrice: 0.0216, currentOnDemandPrice: 0.0464, policy: "aggressive", want: 0.038664, }, { spotPercentage: 79.0, currentSpotPrice: 0.0216, currentOnDemandPrice: 0.0464, policy: "normal", want: 0.0464, }, { spotPercentage: 200.0, currentSpotPrice: 0.0216, currentOnDemandPrice: 0.0464, policy: "aggressive", want: 0.0464, }, } for _, tt := range tests { cfg := &Config{ SpotPriceBufferPercentage: tt.spotPercentage, BiddingPolicy: tt.policy, } i := &instance{ region: &region{ name: "us-east-1", conf: cfg, }, } currentSpotPrice := tt.currentSpotPrice currentOnDemandPrice := tt.currentOnDemandPrice actualPrice := i.getPricetoBid(currentOnDemandPrice, currentSpotPrice) if math.Abs(actualPrice-tt.want) > 0.000001 { t.Errorf("percentage = %.2f, policy = %s, expected price = %.5f, want %.5f, currentSpotPrice = %.5f", tt.spotPercentage, tt.policy, actualPrice, tt.want, currentSpotPrice) } } }
explode_data.jsonl/55201
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 804 }
[ 2830, 3393, 1949, 47, 2216, 11023, 65452, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 1903, 19099, 36167, 981, 2224, 21, 19, 198, 197, 20121, 47049, 6972, 257, 2224, 21, 19, 198, 197, 20121, 1925, 81027, 6972, 222...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCalcSegmentTime(t *testing.T) { now, _ := ParseTimestamp("20190702 12:30:30", "20060102 15:04:05") t1, _ := ParseTimestamp("20190702 00:00:00", "20060102 15:04:05") calc := dayCalculator assert.Equal(t, t1, calc.CalcSegmentTime(now)) t1, _ = ParseTimestamp("20190701 00:00:00", "20060102 15:04:05") calc = monthCalculator assert.Equal(t, t1, calc.CalcSegmentTime(now)) t1, _ = ParseTimestamp("20190101 00:00:00", "20060102 15:04:05") calc = yearCalculator assert.Equal(t, t1, calc.CalcSegmentTime(now)) }
explode_data.jsonl/75706
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 233 }
[ 2830, 3393, 47168, 21086, 1462, 1155, 353, 8840, 836, 8, 341, 80922, 11, 716, 1669, 14775, 20812, 445, 17, 15, 16, 24, 15, 22, 15, 17, 220, 16, 17, 25, 18, 15, 25, 18, 15, 497, 330, 17, 15, 15, 21, 15, 16, 15, 17, 220, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTeamsService_ReviewProjectsBySlug(t *testing.T) { client, mux, _, teardown := setup() defer teardown() wantAcceptHeaders := []string{mediaTypeProjectsPreview} mux.HandleFunc("/orgs/o/teams/s/projects/1", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") testHeader(t, r, "Accept", strings.Join(wantAcceptHeaders, ", ")) fmt.Fprint(w, `{"id":1}`) }) ctx := context.Background() project, _, err := client.Teams.ReviewTeamProjectsBySlug(ctx, "o", "s", 1) if err != nil { t.Errorf("Teams.ReviewTeamProjectsBySlug returned error: %v", err) } want := &Project{ID: Int64(1)} if !cmp.Equal(project, want) { t.Errorf("Teams.ReviewTeamProjectsBySlug returned %+v, want %+v", project, want) } const methodName = "ReviewTeamProjectsBySlug" testBadOptions(t, methodName, func() (err error) { _, _, err = client.Teams.ReviewTeamProjectsBySlug(ctx, "\n", "\n", -1) return err }) testNewRequestAndDoFailure(t, methodName, client, func() (*Response, error) { got, resp, err := client.Teams.ReviewTeamProjectsBySlug(ctx, "o", "s", 1) if got != nil { t.Errorf("testNewRequestAndDoFailure %v = %#v, want nil", methodName, got) } return resp, err }) }
explode_data.jsonl/4548
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 480 }
[ 2830, 3393, 60669, 1860, 62, 19432, 29958, 1359, 54968, 1155, 353, 8840, 836, 8, 341, 25291, 11, 59807, 11, 8358, 49304, 1669, 6505, 741, 16867, 49304, 2822, 50780, 16646, 10574, 1669, 3056, 917, 90, 7399, 929, 29958, 24625, 532, 2109, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1