text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestDPAReconciler_updateRegistryRoute(t *testing.T) { tests := []struct { name string route *routev1.Route bsl *velerov1.BackupStorageLocation dpa *oadpv1alpha1.DataProtectionApplication wantErr bool }{ { name: "Given DPA CR and BSL and SVC instance, appropriate registry route gets updated", wantErr: false, bsl: &velerov1.BackupStorageLocation{ ObjectMeta: metav1.ObjectMeta{ Name: "test-bsl", Namespace: "test-ns", }, Spec: velerov1.BackupStorageLocationSpec{ Provider: "test-provider", }, }, dpa: &oadpv1alpha1.DataProtectionApplication{ ObjectMeta: metav1.ObjectMeta{ Name: "Velero-test-CR", Namespace: "test-ns", }, }, route: &routev1.Route{ ObjectMeta: metav1.ObjectMeta{ Name: "oadp-" + "test-bsl" + "-" + "test-provider" + "-registry-route", Namespace: "test-ns", }, Spec: routev1.RouteSpec{ To: routev1.RouteTargetReference{ Kind: "Service", Name: "oadp-" + "test-bsl" + "-" + "test-provider" + "-registry-svc", }, }, }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { fakeClient, err := getFakeClientFromObjects() if err != nil { t.Errorf("error in creating fake client, likely programmer error") } r := &DPAReconciler{ Client: fakeClient, Scheme: fakeClient.Scheme(), Log: logr.Discard(), Context: newContextForTest(tt.name), NamespacedName: types.NamespacedName{ Namespace: tt.dpa.Namespace, Name: tt.dpa.Name, }, EventRecorder: record.NewFakeRecorder(10), } wantRoute := &routev1.Route{ ObjectMeta: metav1.ObjectMeta{ Name: "oadp-" + "test-bsl" + "-" + "test-provider" + "-registry-route", Namespace: "test-ns", Labels: map[string]string{ "component": "oadp-" + "test-bsl" + "-" + "test-provider" + "-registry", "service": "oadp-" + "test-bsl" + "-" + "test-provider" + "-registry-svc", "track": "registry-routes", oadpv1alpha1.OadpOperatorLabel: "True", }, OwnerReferences: []metav1.OwnerReference{{ APIVersion: oadpv1alpha1.SchemeBuilder.GroupVersion.String(), Kind: "DataProtectionApplication", Name: tt.dpa.Name, UID: tt.dpa.UID, Controller: pointer.BoolPtr(true), BlockOwnerDeletion: pointer.BoolPtr(true), }}, }, Spec: routev1.RouteSpec{ To: routev1.RouteTargetReference{ Kind: "Service", Name: "oadp-" + "test-bsl" + "-" + "test-provider" + "-registry-svc", }, }, } if err := r.updateRegistryRoute(tt.route, tt.bsl, tt.dpa); (err != nil) != tt.wantErr { t.Errorf("updateRegistryRoute() error = %v, wantErr %v", err, tt.wantErr) } if !reflect.DeepEqual(tt.route, wantRoute) { t.Errorf("expected bsl labels to be %#v, got %#v", tt.route, wantRoute) } }) } }
explode_data.jsonl/45547
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1509 }
[ 2830, 3393, 35, 8041, 693, 40446, 5769, 8882, 15603, 4899, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 7000, 2133, 256, 353, 8966, 85, 16, 58004, 198, 197, 93801, 75, 257, 353, 889, 26...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRequireMainCacheCommonJS(t *testing.T) { default_suite.expectBundled(t, bundled{ files: map[string]string{ "/entry.js": ` console.log('is main:', require.main === module) console.log(require('./is-main')) console.log('cache:', require.cache); `, "/is-main.js": ` module.exports = require.main === module `, }, entryPaths: []string{"/entry.js"}, options: config.Options{ Mode: config.ModeBundle, Platform: config.PlatformNode, AbsOutputFile: "/out.js", OutputFormat: config.FormatCommonJS, }, }) }
explode_data.jsonl/38601
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 247 }
[ 2830, 3393, 17959, 6202, 8233, 10839, 12545, 1155, 353, 8840, 836, 8, 341, 11940, 57239, 25952, 33, 1241, 832, 1155, 11, 51450, 515, 197, 74075, 25, 2415, 14032, 30953, 515, 298, 197, 3115, 4085, 2857, 788, 22074, 571, 12160, 1665, 492,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMinifiedBundleEndingWithImportantSemicolon(t *testing.T) { default_suite.expectBundled(t, bundled{ files: map[string]string{ "/entry.js": ` while(foo()); // This semicolon must not be stripped `, }, entryPaths: []string{"/entry.js"}, options: config.Options{ Mode: config.ModeBundle, RemoveWhitespace: true, OutputFormat: config.FormatIIFE, AbsOutputFile: "/out.js", }, }) }
explode_data.jsonl/38507
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 188 }
[ 2830, 3393, 6217, 1870, 8409, 88701, 2354, 52280, 50, 8111, 72269, 1155, 353, 8840, 836, 8, 341, 11940, 57239, 25952, 33, 1241, 832, 1155, 11, 51450, 515, 197, 74075, 25, 2415, 14032, 30953, 515, 298, 197, 3115, 4085, 2857, 788, 22074, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNameAndSelectorDaemonSetLaunchesPods(t *testing.T) { for _, strategy := range updateStrategies() { ds := newDaemonSet("foo") ds.Spec.UpdateStrategy = *strategy ds.Spec.Template.Spec.NodeSelector = simpleNodeLabel ds.Spec.Template.Spec.NodeName = "node-6" manager, podControl, _, err := newTestController(ds) if err != nil { t.Fatalf("error creating DaemonSets controller: %v", err) } addNodes(manager.nodeStore, 0, 4, nil) addNodes(manager.nodeStore, 4, 3, simpleNodeLabel) manager.dsStore.Add(ds) syncAndValidateDaemonSets(t, manager, ds, podControl, 1, 0, 0) } }
explode_data.jsonl/50329
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 233 }
[ 2830, 3393, 675, 3036, 5877, 89177, 1649, 32067, 288, 23527, 82, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 8282, 1669, 2088, 2647, 2580, 69388, 368, 341, 197, 83336, 1669, 501, 89177, 1649, 445, 7975, 1138, 197, 83336, 36473, 16689, 198...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAbsCollection_Reject(t *testing.T) { intColl := NewIntCollection([]int{1, 2, 3, 4, 5}) retColl := intColl.Reject(func(item interface{}, key int) bool { i := item.(int) return i > 3 }) if retColl.Count() != 3 { t.Fatal("Reject 重复错误") } }
explode_data.jsonl/66434
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 116 }
[ 2830, 3393, 27778, 6482, 50693, 583, 1155, 353, 8840, 836, 8, 341, 2084, 15265, 1669, 1532, 1072, 6482, 10556, 396, 90, 16, 11, 220, 17, 11, 220, 18, 11, 220, 19, 11, 220, 20, 3518, 11262, 15265, 1669, 526, 15265, 2817, 583, 18552, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLd_Add(t *testing.T) { type fields struct { request request } type args struct { ld *lead } tests := []struct { name string fields fields args args want int wantErr bool }{ {"error", fields{request{}}, args{&lead{}}, 0, true}, } for _, tt := range tests { if tt.name == "error" { OpenConnection("error", "error", "error") } t.Run(tt.name, func(t *testing.T) { l := Ld{ request: tt.fields.request, } got, err := l.Add(tt.args.ld) if (err != nil) != tt.wantErr { t.Errorf("Ld.Add() error = %v, wantErr %v", err, tt.wantErr) return } if got != tt.want { t.Errorf("Ld.Add() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/15594
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 345 }
[ 2830, 3393, 43, 67, 21346, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 23555, 1681, 198, 197, 532, 13158, 2827, 2036, 341, 197, 197, 507, 353, 26060, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestConnectFailsIfNoIkey(t *testing.T) { assert := assert.New(t) transmitter := new(mocks.Transmitter) transmitter.On("Close").Return(closed) ai := ApplicationInsights{ transmitter: transmitter, // Very long timeout to ensure we do not rely on timeouts for closing the transmitter Timeout: internal.Duration{Duration: time.Hour}, } err := ai.Connect() assert.Error(err) }
explode_data.jsonl/50938
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 143 }
[ 2830, 3393, 14611, 37, 6209, 2679, 2753, 40, 792, 1155, 353, 8840, 836, 8, 972, 6948, 1669, 2060, 7121, 1155, 7229, 72453, 16126, 1669, 501, 1255, 25183, 11815, 16126, 1218, 72453, 16126, 8071, 445, 7925, 1827, 5598, 1337, 9259, 7229, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestURL(t *testing.T) { client := NewRookNetworkRestClient(GetRestURL("10.0.1.2:8124"), http.DefaultClient) assert.Equal(t, "http://10.0.1.2:8124", client.URL()) }
explode_data.jsonl/27837
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 71 }
[ 2830, 3393, 3144, 1155, 353, 8840, 836, 8, 341, 25291, 1669, 1532, 49, 1941, 12320, 12416, 2959, 24460, 12416, 3144, 445, 16, 15, 13, 15, 13, 16, 13, 17, 25, 23, 16, 17, 19, 3975, 1758, 13275, 2959, 340, 6948, 12808, 1155, 11, 330...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPerformRequestRetryOnHttpError(t *testing.T) { var numFailedReqs int fail := func(r *http.Request) (*http.Response, error) { numFailedReqs += 1 //return &http.Response{Request: r, StatusCode: 400}, nil return nil, errors.New("request failed") } // Run against a failing endpoint and see if PerformRequest // retries correctly. tr := &failingTransport{path: "/fail", fail: fail} httpClient := &http.Client{Transport: tr} client, err := NewClient(SetHttpClient(httpClient), SetMaxRetries(5), SetHealthcheck(false)) if err != nil { t.Fatal(err) } res, err := client.PerformRequest(context.TODO(), "GET", "/fail", nil, nil) if err == nil { t.Fatal("expected error") } if res != nil { t.Fatal("expected no response") } // Connection should be marked as dead after it failed if numFailedReqs != 5 { t.Errorf("expected %d failed requests; got: %d", 5, numFailedReqs) } }
explode_data.jsonl/38023
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 322 }
[ 2830, 3393, 46951, 1900, 51560, 1925, 2905, 1454, 1155, 353, 8840, 836, 8, 341, 2405, 1629, 9408, 693, 26358, 526, 198, 63052, 1669, 2915, 2601, 353, 1254, 9659, 8, 4609, 1254, 12574, 11, 1465, 8, 341, 197, 22431, 9408, 693, 26358, 14...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPriorityQueueLen(t *testing.T) { pq := New(true) quickAssert(pq.Len(), 0, t) pq.Push(struct{}{}, 1) quickAssert(pq.Len(), 1, t) }
explode_data.jsonl/16774
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 68 }
[ 2830, 3393, 20555, 7554, 11271, 1155, 353, 8840, 836, 8, 341, 3223, 80, 1669, 1532, 3715, 340, 197, 27763, 8534, 1295, 80, 65819, 1507, 220, 15, 11, 259, 340, 3223, 80, 34981, 6163, 6257, 22655, 220, 16, 340, 197, 27763, 8534, 1295, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestGauge_DataPoints(t *testing.T) { ms := NewGauge() assert.EqualValues(t, NewNumberDataPointSlice(), ms.DataPoints()) fillTestNumberDataPointSlice(ms.DataPoints()) testValDataPoints := generateTestNumberDataPointSlice() assert.EqualValues(t, testValDataPoints, ms.DataPoints()) }
explode_data.jsonl/32692
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 99 }
[ 2830, 3393, 38, 19392, 17817, 11411, 1155, 353, 8840, 836, 8, 341, 47691, 1669, 1532, 38, 19392, 741, 6948, 12808, 6227, 1155, 11, 1532, 2833, 1043, 2609, 33236, 1507, 9829, 3336, 11411, 2398, 65848, 2271, 2833, 1043, 2609, 33236, 35680, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCertificateServiceReplace(t *testing.T) { service := createCertificateService(t) require.NotNil(t, service) certificate, err := service.Replace(emptyString, nil) assert.Equal(t, err, createInvalidParameterError(OperationReplace, ParameterCertificateID)) assert.Nil(t, certificate) certificate, err = service.Replace(whitespaceString, nil) assert.Equal(t, err, createInvalidParameterError(OperationReplace, ParameterCertificateID)) assert.Nil(t, certificate) certificate, err = service.Replace("fake-id-string", nil) assert.Equal(t, err, createInvalidParameterError(OperationReplace, ParameterReplacementCertificate)) assert.Nil(t, certificate) replacementCertificate := NewReplacementCertificate("fake-name-string", "fake-password-string") assert.NotNil(t, replacementCertificate) certificate, err = service.Replace(whitespaceString, replacementCertificate) assert.Error(t, err) assert.Nil(t, certificate) }
explode_data.jsonl/71915
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 290 }
[ 2830, 3393, 33202, 1860, 23107, 1155, 353, 8840, 836, 8, 341, 52934, 1669, 1855, 33202, 1860, 1155, 340, 17957, 93882, 1155, 11, 2473, 692, 1444, 20962, 11, 1848, 1669, 2473, 20858, 24216, 703, 11, 2092, 340, 6948, 12808, 1155, 11, 1848...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestChunkSeriesIterator_NextWithMinTime(t *testing.T) { metas := []chunks.Meta{ tsdbutil.ChunkFromSamples([]Sample{sample{1, 6}, sample{5, 6}, sample{7, 8}}), } it := newChunkSeriesIterator(metas, nil, 2, 4) testutil.Assert(t, it.Next() == false, "") }
explode_data.jsonl/68065
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 109 }
[ 2830, 3393, 28304, 25544, 11951, 1604, 427, 2354, 6217, 1462, 1155, 353, 8840, 836, 8, 341, 2109, 35917, 1669, 3056, 84263, 58806, 515, 197, 57441, 1999, 1314, 6353, 3122, 3830, 39571, 10556, 17571, 90, 13611, 90, 16, 11, 220, 21, 2137,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNestedMapString(t *testing.T) { callErr, funcErr, assert, callBuffer, funcBuffer := testOpenAPITypeWriter(t, ` package foo // Map sample tests openAPIGen.generateMapProperty method. type Blah struct { // A sample String to String map StringToArray map[string]map[string]string } `) if callErr != nil { t.Fatal(callErr) } if funcErr != nil { t.Fatal(funcErr) } assert.Equal(`"base/foo.Blah": schema_base_foo_Blah(ref), `, callBuffer.String()) assert.Equal(`func schema_base_foo_Blah(ref common.ReferenceCallback) common.OpenAPIDefinition { return common.OpenAPIDefinition{ Schema: spec.Schema{ SchemaProps: spec.SchemaProps{ Description: "Map sample tests openAPIGen.generateMapProperty method.", Type: []string{"object"}, Properties: map[string]spec.Schema{ "StringToArray": { SchemaProps: spec.SchemaProps{ Description: "A sample String to String map", Type: []string{"object"}, AdditionalProperties: &spec.SchemaOrBool{ Allows: true, Schema: &spec.Schema{ SchemaProps: spec.SchemaProps{ Type: []string{"object"}, AdditionalProperties: &spec.SchemaOrBool{ Allows: true, Schema: &spec.Schema{ SchemaProps: spec.SchemaProps{ Default: "", Type: []string{"string"}, Format: "", }, }, }, }, }, }, }, }, }, Required: []string{"StringToArray"}, }, }, } } `, funcBuffer.String()) }
explode_data.jsonl/3348
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 477 }
[ 2830, 3393, 71986, 2227, 703, 1155, 353, 8840, 836, 8, 341, 67288, 7747, 11, 2915, 7747, 11, 2060, 11, 1618, 4095, 11, 2915, 4095, 1669, 1273, 5002, 7082, 929, 6492, 1155, 11, 22074, 1722, 15229, 271, 322, 5027, 6077, 7032, 1787, 7082...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestConvertSpansToTraceSpans_json(t *testing.T) { // Using Adrian Cole's sample at https://gist.github.com/adriancole/e8823c19dfed64e2eb71 blob, err := ioutil.ReadFile("./testdata/sample1.json") require.NoError(t, err, "Failed to read sample JSON file: %v", err) zi := new(ZipkinReceiver) reqs, err := zi.v2ToTraceSpans(blob, nil) require.NoError(t, err, "Failed to parse convert Zipkin spans in JSON to Trace spans: %v", err) require.Len(t, reqs, 1, "Expecting only one request since all spans share same node/localEndpoint: %v", len(reqs)) req := reqs[0] wantNode := &commonpb.Node{ ServiceInfo: &commonpb.ServiceInfo{ Name: "frontend", }, } assert.True(t, proto.Equal(wantNode, req.Node)) nonNilSpans := 0 for _, span := range req.Spans { if span != nil { nonNilSpans++ } } // Expecting 9 non-nil spans require.Equal(t, 9, nonNilSpans, "Incorrect non-nil spans count") }
explode_data.jsonl/53600
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 363 }
[ 2830, 3393, 12012, 6406, 596, 1249, 6550, 6406, 596, 9455, 1155, 353, 8840, 836, 8, 341, 197, 322, 12091, 43592, 23407, 594, 6077, 518, 3703, 1110, 95294, 11021, 905, 44460, 7266, 55645, 16546, 23, 23, 17, 18, 66, 16, 24, 2940, 291, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestLoadPrivValidatorKeyFile(t *testing.T) { tempKeyFile, err := ioutil.TempFile("", "priv_validator_key_") require.Nil(t, err) tempStateFile, err := ioutil.TempFile("", "priv_validator_state_") require.Nil(t, err) { // does not exist _, err := LoadPrivValidatorKeyFile("DOES_NOT_EXIST") require.NotNil(t, err) require.Contains(t, err.Error(), "does not exist") } { // error reading since empty _, err := LoadPrivValidatorKeyFile(tempKeyFile.Name()) require.NotNil(t, err) require.Contains(t, err.Error(), "error reading") } expected, err := privval.GenFilePV(tempKeyFile.Name(), tempStateFile.Name(), privval.PrivKeyTypeEd25519) require.Nil(t, err) expected.Save() // success actual, err := LoadPrivValidatorKeyFile(tempKeyFile.Name()) require.Nil(t, err) assert.Equal(t, expected.Key.Address, actual.Address) assert.Equal(t, expected.Key.PrivKey, actual.PrivKey) assert.Equal(t, expected.Key.PubKey, actual.PubKey) }
explode_data.jsonl/60930
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 370 }
[ 2830, 3393, 5879, 32124, 14256, 1592, 1703, 1155, 353, 8840, 836, 8, 341, 16280, 1592, 1703, 11, 1848, 1669, 43144, 65009, 1703, 19814, 330, 11887, 64959, 3097, 62, 1138, 17957, 59678, 1155, 11, 1848, 340, 16280, 1397, 1703, 11, 1848, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateActionReturnCodeFails(t *testing.T) { codes := []int{0, -1, 199, 300, 399, 600, 999} for _, c := range codes { allErrs := validateActionReturnCode(c, field.NewPath("code")) if len(allErrs) == 0 { t.Errorf("validateActionReturnCode(%v) returned no errors for invalid input", c) } } }
explode_data.jsonl/65911
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 119 }
[ 2830, 3393, 17926, 2512, 5598, 2078, 37, 6209, 1155, 353, 8840, 836, 8, 341, 1444, 2539, 1669, 3056, 396, 90, 15, 11, 481, 16, 11, 220, 16, 24, 24, 11, 220, 18, 15, 15, 11, 220, 18, 24, 24, 11, 220, 21, 15, 15, 11, 220, 24, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestReconciler_reconcileMachineHealthCheck(t *testing.T) { // create a controlPlane object with enough information to be used as an OwnerReference for the MachineHealthCheck. cp := builder.ControlPlane(metav1.NamespaceDefault, "cp1").Build() cp.SetUID("very-unique-identifier") mhcBuilder := builder.MachineHealthCheck(metav1.NamespaceDefault, "cp1"). WithSelector(*selectorForControlPlaneMHC()). WithUnhealthyConditions([]clusterv1.UnhealthyCondition{ { Type: corev1.NodeReady, Status: corev1.ConditionUnknown, Timeout: metav1.Duration{Duration: 5 * time.Minute}, }, }). WithClusterName("cluster1") tests := []struct { name string current *clusterv1.MachineHealthCheck desired *clusterv1.MachineHealthCheck want *clusterv1.MachineHealthCheck wantErr bool }{ { name: "Create a MachineHealthCheck", current: nil, desired: mhcBuilder.Build(), want: mhcBuilder.Build(), }, { name: "Successfully create a valid Ownerreference on the MachineHealthCheck", current: nil, // update the unhealthy conditions in the MachineHealthCheck desired: mhcBuilder. // Desired object has an incomplete owner reference which has no UID. WithOwnerReferences([]metav1.OwnerReference{{Name: cp.GetName(), Kind: cp.GetKind(), APIVersion: cp.GetAPIVersion()}}). Build(), // Want a reconciled object with a full ownerReference including UID want: mhcBuilder. WithOwnerReferences([]metav1.OwnerReference{{Name: cp.GetName(), Kind: cp.GetKind(), APIVersion: cp.GetAPIVersion(), UID: cp.GetUID()}}). Build(), wantErr: false, }, { name: "Update a MachineHealthCheck with changes", current: mhcBuilder.Build(), // update the unhealthy conditions in the MachineHealthCheck desired: mhcBuilder.WithUnhealthyConditions([]clusterv1.UnhealthyCondition{ { Type: corev1.NodeReady, Status: corev1.ConditionUnknown, Timeout: metav1.Duration{Duration: 1000 * time.Minute}, }, }).Build(), want: mhcBuilder.WithUnhealthyConditions([]clusterv1.UnhealthyCondition{ { Type: corev1.NodeReady, Status: corev1.ConditionUnknown, Timeout: metav1.Duration{Duration: 1000 * time.Minute}, }, }).Build(), }, { name: "Don't change a MachineHealthCheck with no difference between desired and current", current: mhcBuilder.Build(), // update the unhealthy conditions in the MachineHealthCheck desired: mhcBuilder.Build(), want: mhcBuilder.Build(), }, { name: "Delete a MachineHealthCheck", current: mhcBuilder.Build(), // update the unhealthy conditions in the MachineHealthCheck desired: nil, want: nil, wantErr: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { g := NewWithT(t) got := &clusterv1.MachineHealthCheck{} r := Reconciler{ Client: fake.NewClientBuilder(). WithScheme(fakeScheme). WithObjects([]client.Object{cp}...). Build(), recorder: env.GetEventRecorderFor("test"), } if tt.current != nil { g.Expect(r.Client.Create(ctx, tt.current)).To(Succeed()) } if err := r.reconcileMachineHealthCheck(ctx, tt.current, tt.desired); err != nil { if !tt.wantErr { t.Errorf("reconcileMachineHealthCheck() error = %v, wantErr %v", err, tt.wantErr) } } if err := r.Client.Get(ctx, client.ObjectKeyFromObject(mhcBuilder.Build()), got); err != nil { if !tt.wantErr { t.Errorf("reconcileMachineHealthCheck() error = %v, wantErr %v", err, tt.wantErr) } // Delete case: If we want to find nothing and the Get call returns a NotFound error from the API this is a deletion case and the test succeeds. if tt.want == nil && apierrors.IsNotFound(err) { return } } g.Expect(got).To(EqualObject(tt.want, IgnoreAutogeneratedMetadata)) }) } }
explode_data.jsonl/11665
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1525 }
[ 2830, 3393, 693, 40446, 5769, 1288, 40446, 457, 21605, 14542, 3973, 1155, 353, 8840, 836, 8, 341, 197, 322, 1855, 264, 2524, 34570, 1633, 448, 3322, 1995, 311, 387, 1483, 438, 458, 25833, 8856, 369, 279, 12960, 14542, 3973, 624, 52018, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestNewTextMessage(t *testing.T) { b := []byte("TEST") message := wspubsub.NewTextMessage(b) require.Equal(t, wspubsub.MessageTypeText, message.Type) require.Equal(t, b, message.Payload) s := "TEST" message = wspubsub.NewTextMessageFromString(s) require.Equal(t, wspubsub.MessageTypeText, message.Type) require.Equal(t, []byte(s), message.Payload) }
explode_data.jsonl/37741
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 144 }
[ 2830, 3393, 3564, 1178, 2052, 1155, 353, 8840, 836, 8, 341, 2233, 1669, 3056, 3782, 445, 10033, 1138, 24753, 1669, 62507, 392, 1966, 7121, 1178, 2052, 1883, 340, 17957, 12808, 1155, 11, 62507, 392, 1966, 8472, 929, 1178, 11, 1943, 10184...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseTokenRequestErrors(t *testing.T) { r1, _ := http.NewRequest("GET", "", nil) r2, _ := http.NewRequest("POST", "", nil) matrix := []struct { r *http.Request e string }{ { r: r1, e: "invalid_request: invalid HTTP method", }, { r: r2, e: "invalid_request: malformed query parameters or body form", }, { r: newRequest(nil), e: "invalid_request: missing grant type", }, { r: newRequest(map[string]string{ "grant_type": PasswordGrantType, }), e: "invalid_request: missing client identification", }, { r: newRequestWithAuth("foo", "bar", map[string]string{ "grant_type": PasswordGrantType, "redirect_uri": "blaa%blupp", }), e: "invalid_request: invalid redirect URI", }, { r: newRequestWithAuth("foo", "bar", map[string]string{ "grant_type": PasswordGrantType, "redirect_uri": "foo", }), e: "invalid_request: invalid redirect URI", }, } for _, i := range matrix { req, err := ParseTokenRequest(i.r) assert.Nil(t, req) assert.Error(t, err) assert.Equal(t, i.e, err.Error()) } }
explode_data.jsonl/1725
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 487 }
[ 2830, 3393, 14463, 3323, 1900, 13877, 1155, 353, 8840, 836, 8, 341, 7000, 16, 11, 716, 1669, 1758, 75274, 445, 3806, 497, 7342, 2092, 340, 7000, 17, 11, 716, 1669, 1758, 75274, 445, 2946, 497, 7342, 2092, 692, 60930, 1669, 3056, 1235,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestParseYearToDate(t *testing.T) { type args struct { s string } t1 := args{"2020"} t2 := args{"2006"} t3 := args{"1999"} tests := []struct { name string args args want time.Time }{ {"string 2020 to datetime", t1, time.Date(2020, time.January, 01, 00, 00, 00, 00, time.UTC)}, {"string 2006 to datetime", t2, time.Date(2006, time.January, 01, 00, 00, 00, 00, time.UTC)}, {"string 1999 to datetime", t3, time.Date(1999, time.January, 01, 00, 00, 00, 00, time.UTC)}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { want := tt.want actual := parseYearToDate(tt.args.s) assert.Equal(t, want, actual, "The string year should be the same as the datetime.") }) } }
explode_data.jsonl/32662
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 295 }
[ 2830, 3393, 14463, 9490, 76054, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 1903, 914, 198, 197, 630, 3244, 16, 1669, 2827, 4913, 17, 15, 17, 15, 16707, 3244, 17, 1669, 2827, 4913, 17, 15, 15, 21, 16707, 3244, 18, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_registrarDomainNameserverList(t *testing.T) { t.Run("DefaultRetrieve", func(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() service := mocks.NewMockRegistrarService(mockCtrl) service.EXPECT().GetDomainNameservers(gomock.Any()).Return([]registrar.Nameserver{}, nil).Times(1) registrarDomainNameserverList(service, &cobra.Command{}, []string{"testdomain1.co.uk"}) }) t.Run("GetDomainNameserversError_ReturnsError", func(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() service := mocks.NewMockRegistrarService(mockCtrl) service.EXPECT().GetDomainNameservers(gomock.Any()).Return([]registrar.Nameserver{}, errors.New("test error")).Times(1) err := registrarDomainNameserverList(service, &cobra.Command{}, []string{"testdomain1.co.uk"}) assert.Equal(t, "Error retrieving domain nameservers: test error", err.Error()) }) }
explode_data.jsonl/74652
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 334 }
[ 2830, 3393, 4920, 34765, 13636, 7980, 2836, 852, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 3675, 87665, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 77333, 15001, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 197, 16867, 7860, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContainKeys(t *testing.T) { tests := map[string]struct { mapOfObjs map[string]interface{} searchKeys []string hasKeys bool }{ "contains key - +ve test case - map having the key": { mapOfObjs: map[string]interface{}{ "k1": "v1", }, searchKeys: []string{"k1"}, hasKeys: true, }, "contains key - +ve test case - map without the keys": { mapOfObjs: map[string]interface{}{ "k1": "v1", }, searchKeys: []string{"k2"}, hasKeys: false, }, "contains key - +ve test case - empty map": { mapOfObjs: map[string]interface{}{}, searchKeys: []string{"k1"}, hasKeys: false, }, "contains key - +ve test case - nil map": { mapOfObjs: nil, searchKeys: []string{"k1"}, hasKeys: false, }, "contains key - +ve test case - with no search keys": { mapOfObjs: map[string]interface{}{ "k1": "v1", }, searchKeys: []string{}, hasKeys: false, }, } for name, mock := range tests { t.Run(name, func(t *testing.T) { hasKeys := ContainKeys(mock.mapOfObjs, mock.searchKeys) if hasKeys != mock.hasKeys { t.Fatalf("failed to test contains key: expected key '%s': actual 'not found'", mock.searchKeys) } }) } }
explode_data.jsonl/44191
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 538 }
[ 2830, 3393, 46522, 8850, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 2415, 14032, 60, 1235, 341, 197, 19567, 2124, 4121, 2519, 220, 2415, 14032, 31344, 16094, 197, 45573, 8850, 3056, 917, 198, 197, 63255, 8850, 262, 1807, 198, 197, 59403...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCallocFree(t *testing.T) { var ptr unsafe.Pointer if ptr = calloc(2, 4); ptr == nil { t.Error("ptr should not be nil") } free(ptr) }
explode_data.jsonl/79708
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 61 }
[ 2830, 3393, 34, 4742, 10940, 1155, 353, 8840, 836, 8, 341, 2405, 10087, 19860, 41275, 271, 743, 10087, 284, 46013, 7, 17, 11, 220, 19, 1215, 10087, 621, 2092, 341, 197, 3244, 6141, 445, 3505, 1265, 537, 387, 2092, 1138, 197, 630, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestRoute_SetDeployment(t *testing.T) { testCases := []struct { param string errWanted bool }{ {"example", false}, {"", true}, } for _, tc := range testCases { route := Route{} errGot := route.SetDeployment(tc.param) if tc.errWanted != (errGot != nil) { t.Errorf("SetDeployment(%s) = %v; errWanted = %t", route.deployment, errGot, tc.errWanted) } if errGot == nil && route.deployment != tc.param { t.Errorf("SetDeployment(%s) != want %s", route.deployment, tc.param) } } }
explode_data.jsonl/67788
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 219 }
[ 2830, 3393, 4899, 14812, 75286, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 36037, 257, 914, 198, 197, 9859, 54, 7566, 1807, 198, 197, 59403, 197, 197, 4913, 8687, 497, 895, 1583, 197, 197, 4913, 497, 830, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestSimpleList(t *testing.T) { scheme := runtime.NewScheme() AddToScheme(scheme) duckv1alpha1.AddToScheme(scheme) namespace, name, want := "foo", "bar", "my_hostname" // Despite the signature allowing `...runtime.Object`, this method // will not work properly unless the passed objects are `unstructured.Unstructured` client := fake.NewSimpleDynamicClient(scheme, &unstructured.Unstructured{ Object: map[string]interface{}{ "apiVersion": "pkg.knative.dev/v2", "kind": "Resource", "metadata": map[string]interface{}{ "namespace": namespace, "name": name, }, "status": map[string]interface{}{ "address": map[string]interface{}{ "hostname": want, }, }, }, }) ctx, cancel := context.WithCancel(context.Background()) defer cancel() tif := &duck.TypedInformerFactory{ Client: client, Type: &duckv1alpha1.AddressableType{}, ResyncPeriod: 1 * time.Second, StopChannel: ctx.Done(), } // This hangs without: // https://github.com/kubernetes/kubernetes/pull/68552 _, lister, err := tif.Get(ctx, SchemeGroupVersion.WithResource("resources")) if err != nil { t.Fatalf("Get() = %v", err) } elt, err := lister.ByNamespace(namespace).Get(name) if err != nil { t.Fatalf("Get() = %v", err) } got, ok := elt.(*duckv1alpha1.AddressableType) if !ok { t.Fatalf("Get() = %T, wanted *duckv1alpha1.AddressableType", elt) } if gotHostname := got.Status.Address.Hostname; gotHostname != want { t.Errorf("Get().Status.Address.Hostname = %v, wanted %v", gotHostname, want) } // TODO(mattmoor): Access through informer }
explode_data.jsonl/51696
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 643 }
[ 2830, 3393, 16374, 852, 1155, 353, 8840, 836, 8, 341, 1903, 8058, 1669, 15592, 7121, 28906, 741, 37972, 1249, 28906, 1141, 8058, 340, 197, 72970, 85, 16, 7141, 16, 1904, 1249, 28906, 1141, 8058, 692, 56623, 11, 829, 11, 1366, 1669, 33...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestUTopAcccountsLongShortRatio(t *testing.T) { t.Parallel() _, err := b.UTopAcccountsLongShortRatio(context.Background(), currency.NewPair(currency.BTC, currency.USDT), "5m", 2, time.Time{}, time.Time{}) if err != nil { t.Error(err) } _, err = b.UTopAcccountsLongShortRatio(context.Background(), currency.NewPair(currency.BTC, currency.USDT), "5m", 2, time.Unix(1577836800, 0), time.Unix(1580515200, 0)) if err != nil { t.Error(err) } }
explode_data.jsonl/76565
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 184 }
[ 2830, 3393, 1381, 453, 14603, 44859, 6583, 12472, 22777, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 197, 6878, 1848, 1669, 293, 13, 1381, 453, 14603, 44859, 6583, 12472, 22777, 5378, 19047, 1507, 11413, 7121, 12443, 90475, 1785...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestHandlers_GetMessages(t *testing.T) { t.Parallel() path := "/api/v3/channels/{channelId}/messages" env := Setup(t, common1) user := env.CreateUser(t, rand) ch := env.CreateChannel(t, rand) m := env.CreateMessage(t, user.GetID(), ch.ID, rand) m2 := env.CreateMessage(t, user.GetID(), ch.ID, rand) s := env.S(t, user.GetID()) t.Run("not logged in", func(t *testing.T) { t.Parallel() e := env.R(t) e.GET(path, ch.ID). Expect(). Status(http.StatusUnauthorized) }) t.Run("not found", func(t *testing.T) { t.Parallel() e := env.R(t) e.GET(path, uuid.Must(uuid.NewV4())). WithCookie(session.CookieName, s). Expect(). Status(http.StatusNotFound) }) t.Run("bad request", func(t *testing.T) { t.Parallel() e := env.R(t) e.GET(path, ch.ID). WithCookie(session.CookieName, s). WithQuery("limit", -1). Expect(). Status(http.StatusBadRequest) }) t.Run("success", func(t *testing.T) { t.Parallel() e := env.R(t) obj := e.GET(path, ch.ID). WithCookie(session.CookieName, s). Expect(). Status(http.StatusOK). JSON(). Array() obj.Length().Equal(2) messageEquals(t, m2, obj.Element(0).Object()) messageEquals(t, m, obj.Element(1).Object()) }) }
explode_data.jsonl/40160
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 566 }
[ 2830, 3393, 39949, 13614, 15820, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 26781, 1669, 3521, 2068, 5457, 18, 21284, 6680, 9388, 10119, 764, 4472, 16325, 698, 57538, 1669, 18626, 1155, 11, 4185, 16, 340, 19060, 1669, 6105, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Pid(t *testing.T) { cmd := cmder.New(sleep, five) err := cmd.Start() if err != nil { t.Error(err) } pid := cmd.Pid() if pid == nil || *pid <= 0 { msg := fmt.Sprintf("Expected pid non-zero pid. Got %d.", pid) t.Errorf(msg, pid) } err = cmd.Kill() if err != nil { t.Error(err) } }
explode_data.jsonl/70669
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 142 }
[ 2830, 3393, 1088, 307, 1155, 353, 8840, 836, 8, 341, 25920, 1669, 9961, 1107, 7121, 1141, 3499, 11, 4236, 692, 9859, 1669, 5439, 12101, 741, 743, 1848, 961, 2092, 341, 197, 3244, 6141, 3964, 340, 197, 630, 78799, 1669, 5439, 1069, 307...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestEncodeDecodeResult(t *testing.T) { v := Result{} var buf bytes.Buffer msgp.Encode(&buf, &v) m := v.Msgsize() if buf.Len() > m { t.Log("WARNING: TestEncodeDecodeResult Msgsize() is inaccurate") } vn := Result{} err := msgp.Decode(&buf, &vn) if err != nil { t.Error(err) } buf.Reset() msgp.Encode(&buf, &v) err = msgp.NewReader(&buf).Skip() if err != nil { t.Error(err) } }
explode_data.jsonl/78284
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 184 }
[ 2830, 3393, 32535, 32564, 2077, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 5714, 16094, 2405, 6607, 5820, 22622, 198, 21169, 79, 50217, 2099, 5909, 11, 609, 85, 692, 2109, 1669, 348, 30365, 2141, 741, 743, 6607, 65819, 368, 861, 296, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestResourceDirs(t *testing.T) { testCases := []struct { name string prop string resources []string }{ { name: "no resource_dirs", prop: "", resources: []string{"res/res/values/strings.xml"}, }, { name: "resource_dirs", prop: `resource_dirs: ["res"]`, resources: []string{"res/res/values/strings.xml"}, }, { name: "empty resource_dirs", prop: `resource_dirs: []`, resources: nil, }, } fs := android.MockFS{ "res/res/values/strings.xml": nil, } bp := ` android_app { name: "foo", sdk_version: "current", %s } ` for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { result := android.GroupFixturePreparers( PrepareForTestWithJavaDefaultModules, PrepareForTestWithOverlayBuildComponents, fs.AddToFixture(), ).RunTestWithBp(t, fmt.Sprintf(bp, testCase.prop)) module := result.ModuleForTests("foo", "android_common") resourceList := module.MaybeOutput("aapt2/res.list") var resources []string if resourceList.Rule != nil { for _, compiledResource := range resourceList.Inputs.Strings() { resources = append(resources, module.Output(compiledResource).Inputs.Strings()...) } } android.AssertDeepEquals(t, "resource files", testCase.resources, resources) }) } }
explode_data.jsonl/58483
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 581 }
[ 2830, 3393, 4783, 97384, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 414, 914, 198, 197, 79244, 414, 914, 198, 197, 10202, 2360, 3056, 917, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 414, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestAddMemberFailMissingAddress(t *testing.T) { ctx := wasmsolo.NewSoloContext(t, dividend.ScName, dividend.OnLoad) member := dividend.ScFuncs.Member(ctx) member.Params.Factor().SetValue(100) member.Func.Post() require.Error(t, ctx.Err) require.Contains(t, ctx.Err.Error(), "missing mandatory address") }
explode_data.jsonl/31342
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 115 }
[ 2830, 3393, 2212, 9366, 19524, 25080, 4286, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 572, 1011, 10011, 7121, 89299, 1972, 1155, 11, 45880, 18326, 675, 11, 45880, 8071, 5879, 692, 2109, 1377, 1669, 45880, 18326, 9626, 82, 46404, 7502, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPostgreSQLIntegration(t *testing.T) { connectionString := getConnectionString() if connectionString == "" { t.Skipf("PostgreSQL state integration tests skipped. To enable define the connection string using environment variable '%s' (example 'export %s=\"host=localhost user=postgres password=example port=5432 connect_timeout=10 database=dapr_test\")", connectionStringEnvKey, connectionStringEnvKey) } t.Run("Test init configurations", func(t *testing.T) { testInitConfiguration(t) }) metadata := state.Metadata{ Properties: map[string]string{connectionStringKey: connectionString}, } pgs := NewPostgreSQLStateStore(logger.NewLogger("test")) t.Cleanup(func() { defer pgs.Close() }) error := pgs.Init(metadata) if error != nil { t.Fatal(error) } t.Run("Create table succeeds", func(t *testing.T) { t.Parallel() testCreateTable(t, pgs.dbaccess.(*postgresDBAccess)) }) t.Run("Get Set Delete one item", func(t *testing.T) { t.Parallel() setGetUpdateDeleteOneItem(t, pgs) }) t.Run("Get item that does not exist", func(t *testing.T) { t.Parallel() getItemThatDoesNotExist(t, pgs) }) t.Run("Get item with no key fails", func(t *testing.T) { t.Parallel() getItemWithNoKey(t, pgs) }) t.Run("Set updates the updatedate field", func(t *testing.T) { t.Parallel() setUpdatesTheUpdatedateField(t, pgs) }) t.Run("Set item with no key fails", func(t *testing.T) { t.Parallel() setItemWithNoKey(t, pgs) }) t.Run("Bulk set and bulk delete", func(t *testing.T) { t.Parallel() testBulkSetAndBulkDelete(t, pgs) }) t.Run("Update and delete with etag succeeds", func(t *testing.T) { t.Parallel() updateAndDeleteWithEtagSucceeds(t, pgs) }) t.Run("Update with old etag fails", func(t *testing.T) { t.Parallel() updateWithOldEtagFails(t, pgs) }) t.Run("Insert with etag fails", func(t *testing.T) { t.Parallel() newItemWithEtagFails(t, pgs) }) t.Run("Delete with invalid etag fails", func(t *testing.T) { t.Parallel() deleteWithInvalidEtagFails(t, pgs) }) t.Run("Delete item with no key fails", func(t *testing.T) { t.Parallel() deleteWithNoKeyFails(t, pgs) }) t.Run("Delete an item that does not exist", func(t *testing.T) { t.Parallel() deleteItemThatDoesNotExist(t, pgs) }) t.Run("Multi with delete and set", func(t *testing.T) { t.Parallel() multiWithDeleteAndSet(t, pgs) }) t.Run("Multi with delete only", func(t *testing.T) { t.Parallel() multiWithDeleteOnly(t, pgs) }) t.Run("Multi with set only", func(t *testing.T) { t.Parallel() multiWithSetOnly(t, pgs) }) }
explode_data.jsonl/2606
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1017 }
[ 2830, 3393, 4133, 59796, 52464, 1155, 353, 8840, 836, 8, 341, 54590, 703, 1669, 633, 40431, 741, 743, 62084, 621, 1591, 341, 197, 3244, 57776, 69, 445, 4133, 59796, 1584, 17590, 7032, 35157, 13, 2014, 7283, 6979, 279, 3633, 914, 1667, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFirstLast(t *testing.T) { bm := New() bm.AddInt(2) bm.AddInt(4) bm.AddInt(8) assert.EqualValues(t, 2, bm.Minimum()) assert.EqualValues(t, 8, bm.Maximum()) i := 1 << 5 for ; i < (1 << 17); i++ { bm.AddInt(i) assert.EqualValues(t, 2, bm.Minimum()) assert.EqualValues(t, i, bm.Maximum()) } bm.RunOptimize() assert.EqualValues(t, 2, bm.Minimum()) assert.EqualValues(t, i-1, bm.Maximum()) }
explode_data.jsonl/20322
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 195 }
[ 2830, 3393, 5338, 5842, 1155, 353, 8840, 836, 8, 341, 2233, 76, 1669, 1532, 741, 2233, 76, 1904, 1072, 7, 17, 340, 2233, 76, 1904, 1072, 7, 19, 340, 2233, 76, 1904, 1072, 7, 23, 692, 6948, 12808, 6227, 1155, 11, 220, 17, 11, 348...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUpdateRegistryErrorSaving(t *testing.T) { mockRepositoryRegistry := test.NewDeploymentConfigRegistry() mockRepositoryRegistry.Err = fmt.Errorf("foo") storage := REST{registry: mockRepositoryRegistry} channel, err := storage.Update(kapi.NewDefaultContext(), &api.DeploymentConfig{ ObjectMeta: kapi.ObjectMeta{Name: "bar"}, }) if err != nil { t.Errorf("Unexpected non-nil error: %#v", err) } result := <-channel status, ok := result.Object.(*kapi.Status) if !ok { t.Errorf("Expected status, got %#v", result) } if status.Status != kapi.StatusFailure || status.Message != "foo" { t.Errorf("Expected status=failure, message=foo, got %#v", status) } }
explode_data.jsonl/66991
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 245 }
[ 2830, 3393, 4289, 15603, 1454, 43535, 1155, 353, 8840, 836, 8, 341, 77333, 4624, 15603, 1669, 1273, 7121, 75286, 2648, 15603, 741, 77333, 4624, 15603, 27862, 284, 8879, 13080, 445, 7975, 1138, 197, 16172, 1669, 25414, 90, 29172, 25, 7860,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestAuthService_UpdateUser(t *testing.T) { user := createTestUserUpdatePayload() token := issueTestToken(user.ID, user.Username, createTestConfig().PrivKeyPath) dao := dao.MockUserDao{} dao.On("Get", user.ID).Return(&user, nil) dao.On("GetByUsername", user.Username).Return(&user, nil) dao.On("Update", &user).Return(nil) s := AuthService{&mailer, &dao, createTestConfig()} err := s.UpdateUser(&user, token) assert.Nil(t, err) }
explode_data.jsonl/18895
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 170 }
[ 2830, 3393, 90466, 47393, 1474, 1155, 353, 8840, 836, 8, 341, 19060, 1669, 1855, 2271, 1474, 4289, 29683, 741, 43947, 1669, 4265, 2271, 3323, 4277, 9910, 11, 1196, 42777, 11, 1855, 2271, 2648, 1005, 32124, 1592, 1820, 692, 2698, 3441, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSrcJar(t *testing.T) { mockFs := pathtools.MockFs(map[string][]byte{ "wrong_package.java": []byte("package foo;"), "foo/correct_package.java": []byte("package foo;"), "src/no_package.java": nil, "src2/parse_error.java": []byte("error"), }) want := []string{ "foo/", "foo/wrong_package.java", "foo/correct_package.java", "no_package.java", "src2/", "src2/parse_error.java", } args := ZipArgs{} args.FileArgs = NewFileArgsBuilder().File("**/*.java").FileArgs() args.SrcJar = true args.AddDirectoryEntriesToZip = true args.Filesystem = mockFs args.Stderr = &bytes.Buffer{} buf := &bytes.Buffer{} err := zipTo(args, buf) if err != nil { t.Fatalf("got error %v", err) } br := bytes.NewReader(buf.Bytes()) zr, err := zip.NewReader(br, int64(br.Len())) if err != nil { t.Fatal(err) } var got []string for _, f := range zr.File { r, err := f.Open() if err != nil { t.Fatalf("error when opening %s: %s", f.Name, err) } crc := crc32.NewIEEE() len, err := io.Copy(crc, r) r.Close() if err != nil { t.Fatalf("error when reading %s: %s", f.Name, err) } if uint64(len) != f.UncompressedSize64 { t.Errorf("incorrect length for %s, want %d got %d", f.Name, f.UncompressedSize64, len) } if crc.Sum32() != f.CRC32 { t.Errorf("incorrect crc for %s, want %x got %x", f.Name, f.CRC32, crc) } got = append(got, f.Name) } if !reflect.DeepEqual(want, got) { t.Errorf("want files %q, got %q", want, got) } }
explode_data.jsonl/65695
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 678 }
[ 2830, 3393, 20360, 71735, 1155, 353, 8840, 836, 8, 341, 77333, 48300, 1669, 3272, 426, 6178, 24664, 48300, 9147, 14032, 45725, 3782, 515, 197, 197, 1, 34870, 26328, 10848, 788, 981, 3056, 3782, 445, 1722, 15229, 26, 4461, 197, 197, 1, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestInvokeServiceFromGRPCResponse(t *testing.T) { mockDirectMessaging := new(daprt.MockDirectMessaging) // Setup Dapr API server fakeAPI := &api{ id: "fakeAPI", directMessaging: mockDirectMessaging, } t.Run("handle grpc response code", func(t *testing.T) { fakeResp := invokev1.NewInvokeMethodResponse( int32(codes.Unimplemented), "Unimplemented", []*anypb.Any{ mustMarshalAny(&epb.ResourceInfo{ ResourceType: "sidecar", ResourceName: "invoke/service", Owner: "Dapr", }), }, ) fakeResp.WithRawData([]byte("fakeDirectMessageResponse"), "application/json") // Set up direct messaging mock mockDirectMessaging.Calls = nil // reset call count mockDirectMessaging.On("Invoke", mock.AnythingOfType("*context.valueCtx"), "fakeAppID", mock.AnythingOfType("*v1.InvokeMethodRequest")).Return(fakeResp, nil).Once() // Run test server port, _ := freeport.GetFreePort() server := startDaprAPIServer(port, fakeAPI, "") defer server.Stop() // Create gRPC test client clientConn := createTestClient(port) defer clientConn.Close() // act client := runtimev1pb.NewDaprClient(clientConn) req := &runtimev1pb.InvokeServiceRequest{ Id: "fakeAppID", Message: &commonv1pb.InvokeRequest{ Method: "fakeMethod", Data: &anypb.Any{Value: []byte("testData")}, }, } _, err := client.InvokeService(context.Background(), req) // assert mockDirectMessaging.AssertNumberOfCalls(t, "Invoke", 1) s, ok := status.FromError(err) assert.True(t, ok) assert.Equal(t, codes.Unimplemented, s.Code()) assert.Equal(t, "Unimplemented", s.Message()) errInfo := s.Details()[0].(*epb.ResourceInfo) assert.Equal(t, 1, len(s.Details())) assert.Equal(t, "sidecar", errInfo.GetResourceType()) assert.Equal(t, "invoke/service", errInfo.GetResourceName()) assert.Equal(t, "Dapr", errInfo.GetOwner()) }) }
explode_data.jsonl/21728
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 773 }
[ 2830, 3393, 17604, 1860, 3830, 8626, 4872, 2582, 1155, 353, 8840, 836, 8, 341, 77333, 16027, 91049, 1669, 501, 1500, 391, 3342, 24664, 16027, 91049, 692, 197, 322, 18626, 422, 59817, 5333, 3538, 198, 1166, 726, 7082, 1669, 609, 2068, 51...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddWiredTigerStats(t *testing.T) { d := NewMongodbData( &StatLine{ StorageEngine: "wiredTiger", CacheDirtyPercent: 0, CacheUsedPercent: 0, TrackedDirtyBytes: 0, CurrentCachedBytes: 0, MaxBytesConfigured: 0, AppThreadsPageReadCount: 0, AppThreadsPageReadTime: 0, AppThreadsPageWriteCount: 0, BytesWrittenFrom: 0, BytesReadInto: 0, PagesEvictedByAppThread: 0, PagesQueuedForEviction: 0, ServerEvictingPages: 0, WorkerThreadEvictingPages: 0, }, tags, ) var acc testutil.Accumulator d.AddDefaultStats() d.flush(&acc) for key, _ := range WiredTigerStats { assert.True(t, acc.HasFloatField("mongodb", key)) } }
explode_data.jsonl/60753
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 379 }
[ 2830, 3393, 2212, 54, 2690, 51, 7272, 16635, 1155, 353, 8840, 836, 8, 341, 2698, 1669, 1532, 44, 21225, 1043, 1006, 197, 197, 5, 15878, 2460, 515, 298, 197, 5793, 4571, 25, 1797, 330, 86, 2690, 51, 7272, 756, 298, 6258, 1777, 36485,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFilter(t *testing.T) { members := Members{ {PKIid: common.PKIidType("p0"), Endpoint: "p0", Properties: &proto.Properties{ Chaincodes: []*proto.Chaincode{{Name: "cc", Version: "1.0"}}, }}, {PKIid: common.PKIidType("p1"), Endpoint: "p1", Properties: &proto.Properties{ Chaincodes: []*proto.Chaincode{{Name: "cc", Version: "2.0"}}, }}, } res := members.Filter(func(member NetworkMember) bool { cc := member.Properties.Chaincodes[0] return cc.Version == "2.0" && cc.Name == "cc" }) assert.Equal(t, Members{members[1]}, res) }
explode_data.jsonl/62277
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 229 }
[ 2830, 3393, 5632, 1155, 353, 8840, 836, 8, 341, 2109, 7062, 1669, 16954, 515, 197, 197, 90, 22242, 40, 307, 25, 4185, 1069, 80971, 307, 929, 445, 79, 15, 3975, 47269, 25, 330, 79, 15, 497, 11831, 25, 609, 15110, 15945, 515, 298, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRangeDeleteParams_WithHTTPClient(t *testing.T) { p := NewRangeDeleteParams() cli := &http.Client{} p = p.WithHTTPClient(cli) require.NotNil(t, p.HTTPClient) assert.Equal(t, cli, p.HTTPClient) }
explode_data.jsonl/38293
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 84 }
[ 2830, 3393, 6046, 6435, 4870, 62, 2354, 9230, 2959, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 1532, 6046, 6435, 4870, 741, 86448, 1669, 609, 1254, 11716, 16094, 3223, 284, 281, 26124, 9230, 2959, 70249, 340, 17957, 93882, 1155, 11, 281,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestDecanon(t *testing.T) { m := ImageRepos{imageReposMap{ name: infos, }} filteredImages := m.GetRepoImages(mustParseName("weaveworks/helloworld")).Filter("*") latest, ok := filteredImages.Latest() if !ok { t.Error("did not find latest image") } else if latest.ID.Name != mustParseName("weaveworks/helloworld") { t.Error("name did not match what was asked") } filteredImages = m.GetRepoImages(mustParseName("index.docker.io/weaveworks/helloworld")).Filter("*") latest, ok = filteredImages.Latest() if !ok { t.Error("did not find latest image") } else if latest.ID.Name != mustParseName("index.docker.io/weaveworks/helloworld") { t.Error("name did not match what was asked") } avail := m.GetRepoImages(mustParseName("weaveworks/helloworld")) if len(avail) != len(infos) { t.Errorf("expected %d available images, got %d", len(infos), len(avail)) } for _, im := range avail { if im.ID.Name != mustParseName("weaveworks/helloworld") { t.Errorf("got image with name %q", im.ID.String()) } } }
explode_data.jsonl/68529
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 392 }
[ 2830, 3393, 4900, 58910, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 4654, 693, 966, 90, 1805, 693, 966, 2227, 515, 197, 11609, 25, 44144, 345, 197, 47449, 50108, 291, 14228, 1669, 296, 2234, 25243, 14228, 1255, 590, 14463, 675, 445, 89...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestOOMPanicInHashJoinWhenFetchBuildRows(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("use test") tk.MustExec("drop table if exists t") tk.MustExec("create table t(c1 int, c2 int)") tk.MustExec("insert into t values(1,1),(2,2)") fpName := "github.com/pingcap/tidb/executor/errorFetchBuildSideRowsMockOOMPanic" require.NoError(t, failpoint.Enable(fpName, `panic("ERROR 1105 (HY000): Out Of Memory Quota![conn_id=1]")`)) defer func() { require.NoError(t, failpoint.Disable(fpName)) }() err := tk.QueryToErr("select * from t as t2 join t as t1 where t1.c1=t2.c1") require.EqualError(t, err, "failpoint panic: ERROR 1105 (HY000): Out Of Memory Quota![conn_id=1]") }
explode_data.jsonl/38154
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 309 }
[ 2830, 3393, 46, 51607, 31270, 641, 6370, 12292, 4498, 20714, 11066, 9024, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUpdateRTStatus(t *testing.T) { assert := assert.New(t) cfg := config.NewDefaultAgentConfig(false) c, err := NewCollector(cfg) assert.NoError(err) // XXX: Give the collector a big channel so it never blocks. c.rtIntervalCh = make(chan time.Duration, 1000) // Validate that we switch to real-time if only one response says so. statuses := []*model.CollectorStatus{ {ActiveClients: 0, Interval: 2}, {ActiveClients: 3, Interval: 2}, {ActiveClients: 0, Interval: 2}, } c.updateStatus(statuses) assert.Equal(int32(1), atomic.LoadInt32(&c.realTimeEnabled)) // Validate that we stay that way statuses = []*model.CollectorStatus{ {ActiveClients: 0, Interval: 2}, {ActiveClients: 3, Interval: 2}, {ActiveClients: 0, Interval: 2}, } c.updateStatus(statuses) assert.Equal(int32(1), atomic.LoadInt32(&c.realTimeEnabled)) // And that it can turn back off statuses = []*model.CollectorStatus{ {ActiveClients: 0, Interval: 2}, {ActiveClients: 0, Interval: 2}, {ActiveClients: 0, Interval: 2}, } c.updateStatus(statuses) assert.Equal(int32(0), atomic.LoadInt32(&c.realTimeEnabled)) }
explode_data.jsonl/50773
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 417 }
[ 2830, 3393, 4289, 5350, 2522, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 50286, 1669, 2193, 7121, 3675, 16810, 2648, 3576, 340, 1444, 11, 1848, 1669, 1532, 53694, 28272, 340, 6948, 35699, 3964, 340, 197, 322, 19975...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServer_Location(t *testing.T) { a := assert.New(t, false) srv := newServer(a, &Options{Tag: language.SimplifiedChinese}) w := httptest.NewRecorder() r, err := http.NewRequest(http.MethodGet, "/test", nil) a.NotError(err).NotNil(r) ctx := srv.NewContext(w, r) now := ctx.Now() a.Equal(now.Location(), srv.Location()) }
explode_data.jsonl/34204
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 136 }
[ 2830, 3393, 5475, 85524, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 2060, 7121, 1155, 11, 895, 340, 1903, 10553, 1669, 501, 5475, 2877, 11, 609, 3798, 90, 5668, 25, 4128, 808, 73837, 44923, 3518, 6692, 1669, 54320, 70334, 7121, 47023, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNodeUnpublishVolume(t *testing.T) { errorTarget := testutil.GetWorkDirPath("error_is_likely_target", t) targetFile := testutil.GetWorkDirPath("abc.go", t) d := NewFakeDriver() tests := []struct { desc string setup func() req csi.NodeUnpublishVolumeRequest skipOnDarwin bool expectedErr testutil.TestError cleanup func() }{ { desc: "[Error] Volume ID missing", req: csi.NodeUnpublishVolumeRequest{TargetPath: targetTest}, expectedErr: testutil.TestError{ DefaultError: status.Error(codes.InvalidArgument, "Volume ID missing in request"), }, }, { desc: "[Error] Target missing", req: csi.NodeUnpublishVolumeRequest{VolumeId: "vol_1"}, expectedErr: testutil.TestError{ DefaultError: status.Error(codes.InvalidArgument, "Target path missing in request"), }, }, { desc: "[Error] Volume operation in progress", setup: func() { d.volumeLocks.TryAcquire("vol_1") }, req: csi.NodeUnpublishVolumeRequest{TargetPath: targetFile, VolumeId: "vol_1"}, expectedErr: testutil.TestError{ DefaultError: status.Error(codes.Aborted, fmt.Sprintf(volumeOperationAlreadyExistsFmt, "vol_1")), }, cleanup: func() { d.volumeLocks.Release("vol_1") }, }, { desc: "[Error] Unmount error mocked by IsLikelyNotMountPoint", skipOnDarwin: true, req: csi.NodeUnpublishVolumeRequest{TargetPath: errorTarget, VolumeId: "vol_1"}, expectedErr: testutil.TestError{ DefaultError: status.Error(codes.Internal, fmt.Sprintf("failed to unmount target \"%s\": fake IsLikelyNotMountPoint: fake error", errorTarget)), }, }, { desc: "[Success] Valid request", req: csi.NodeUnpublishVolumeRequest{TargetPath: targetFile, VolumeId: "vol_1"}, expectedErr: testutil.TestError{}, }, } // Setup _ = makeDir(errorTarget) mounter, err := NewFakeMounter() if err != nil { t.Fatalf(fmt.Sprintf("failed to get fake mounter: %v", err)) } if runtime.GOOS != "windows" { mounter.Exec = &testingexec.FakeExec{ExactOrder: true} } d.mounter = mounter for _, test := range tests { if test.setup != nil { test.setup() } if test.skipOnDarwin && runtime.GOOS == "darwin" { continue } _, err := d.NodeUnpublishVolume(context.Background(), &test.req) if !testutil.AssertError(err, &test.expectedErr) { t.Errorf("test case: %s, \nUnexpected error: %v\nExpected error: %v", test.desc, err, test.expectedErr.GetExpectedError()) } if test.cleanup != nil { test.cleanup() } } // Clean up err = os.RemoveAll(errorTarget) assert.NoError(t, err) }
explode_data.jsonl/36852
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1062 }
[ 2830, 3393, 1955, 1806, 27502, 18902, 1155, 353, 8840, 836, 8, 341, 18290, 6397, 1669, 1273, 1314, 2234, 6776, 6184, 1820, 445, 841, 6892, 62, 14282, 11123, 497, 259, 340, 28861, 1703, 1669, 1273, 1314, 2234, 6776, 6184, 1820, 445, 1368...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestShortWriter(t *testing.T) { w := &ShortWriter{} payload := []byte("foo") n, err := w.Write(payload) assert.NoError(t, err, "Unexpected error writing to ShortWriter.") assert.Equal(t, len(payload)-1, n, "Wrong number of bytes written.") }
explode_data.jsonl/5038
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 92 }
[ 2830, 3393, 12472, 6492, 1155, 353, 8840, 836, 8, 341, 6692, 1669, 609, 12472, 6492, 16094, 76272, 1669, 3056, 3782, 445, 7975, 1138, 9038, 11, 1848, 1669, 289, 4073, 26772, 340, 6948, 35699, 1155, 11, 1848, 11, 330, 29430, 1465, 4378, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetDisabledExchanges(t *testing.T) { cfg := GetConfig() err := cfg.LoadConfig(ConfigTestFile) if err != nil { t.Errorf( "Test failed. TestGetDisabledExchanges. LoadConfig Error: %s", err.Error(), ) } exchanges := cfg.GetDisabledExchanges() if len(exchanges) != 0 { t.Error( "Test failed. TestGetDisabledExchanges. Enabled exchanges value mismatch", ) } exchCfg, err := cfg.GetExchangeConfig("Bitfinex") if err != nil { t.Errorf( "Test failed. TestGetDisabledExchanges. GetExchangeConfig Error: %s", err.Error(), ) } exchCfg.Enabled = false err = cfg.UpdateExchangeConfig(exchCfg) if err != nil { t.Errorf( "Test failed. TestGetDisabledExchanges. UpdateExchangeConfig Error: %s", err.Error(), ) } if len(cfg.GetDisabledExchanges()) != 1 { t.Error( "Test failed. TestGetDisabledExchanges. Enabled exchanges value mismatch", ) } }
explode_data.jsonl/21892
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 343 }
[ 2830, 3393, 1949, 25907, 840, 19365, 1155, 353, 8840, 836, 8, 341, 50286, 1669, 2126, 2648, 741, 9859, 1669, 13286, 13969, 2648, 33687, 2271, 1703, 340, 743, 1848, 961, 2092, 341, 197, 3244, 13080, 1006, 298, 197, 1, 2271, 4641, 13, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestOrderRegistryDuplicate(t *testing.T) { orderID := "fake-order-id2" response := order.SubmitResponse{ OrderID: orderID, IsOrderPlaced: true, } r := NewOrderRegistry() duplicate := r.Store("ftx", response, nil) if duplicate { t.Failed() } if r.length != 1 { t.Errorf("Order Registry length count not incremented correctly") t.Failed() } duplicate2 := r.Store("ftx", response, nil) if duplicate2 == true { t.Logf("failed") t.Failed() } if r.length != 1 { t.Errorf("Order Registry length count not incremented correctly") t.Failed() } }
explode_data.jsonl/18564
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 231 }
[ 2830, 3393, 4431, 15603, 53979, 1155, 353, 8840, 836, 8, 341, 42245, 915, 1669, 330, 30570, 23810, 12897, 17, 698, 21735, 1669, 1973, 98309, 2582, 515, 197, 197, 4431, 915, 25, 981, 1973, 915, 345, 197, 197, 3872, 4431, 2120, 4435, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestMergedPostings(t *testing.T) { var cases = []struct { in []Postings res Postings }{ { in: []Postings{}, res: EmptyPostings(), }, { in: []Postings{ newListPostings(), newListPostings(), }, res: EmptyPostings(), }, { in: []Postings{ newListPostings(), }, res: newListPostings(), }, { in: []Postings{ EmptyPostings(), EmptyPostings(), EmptyPostings(), EmptyPostings(), }, res: EmptyPostings(), }, { in: []Postings{ newListPostings(1, 2, 3, 4, 5), newListPostings(6, 7, 8, 9, 10), }, res: newListPostings(1, 2, 3, 4, 5, 6, 7, 8, 9, 10), }, { in: []Postings{ newListPostings(1, 2, 3, 4, 5), newListPostings(4, 5, 6, 7, 8), }, res: newListPostings(1, 2, 3, 4, 5, 6, 7, 8), }, { in: []Postings{ newListPostings(1, 2, 3, 4, 9, 10), newListPostings(1, 4, 5, 6, 7, 8, 10, 11), }, res: newListPostings(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11), }, { in: []Postings{ newListPostings(1, 2, 3, 4, 9, 10), EmptyPostings(), newListPostings(1, 4, 5, 6, 7, 8, 10, 11), }, res: newListPostings(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11), }, { in: []Postings{ newListPostings(1, 2), newListPostings(), }, res: newListPostings(1, 2), }, { in: []Postings{ newListPostings(1, 2), EmptyPostings(), }, res: newListPostings(1, 2), }, } for _, c := range cases { t.Run("", func(t *testing.T) { if c.res == nil { t.Fatal("merge result expectancy cannot be nil") } expected, err := ExpandPostings(c.res) testutil.Ok(t, err) m := Merge(c.in...) if c.res == EmptyPostings() { testutil.Equals(t, EmptyPostings(), m) return } if m == EmptyPostings() { t.Fatal("merge unexpected result: EmptyPostings sentinel") } res, err := ExpandPostings(m) testutil.Ok(t, err) testutil.Equals(t, expected, res) }) } }
explode_data.jsonl/13126
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 997 }
[ 2830, 3393, 44, 51525, 4133, 819, 1155, 353, 8840, 836, 8, 341, 2405, 5048, 284, 3056, 1235, 341, 197, 17430, 3056, 4133, 819, 271, 197, 10202, 3877, 819, 198, 197, 59403, 197, 197, 515, 298, 17430, 25, 220, 3056, 4133, 819, 38837, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestChangePreExistVehiclesCommunicationIdAndPublishEvent(t *testing.T) { a := assert.New(t) var ( DefaultVersion1 = DefaultVersion + "-1" DefaultVersion2 = DefaultVersion + "-2" NewCommunicationID = DefaultCommunicationID + "-new" ) gen := &generatorMock{ id: DefaultID, versions: []Version{DefaultVersion1, DefaultVersion2}, } pub := &publisherMock{} vehicle := NewInstance(gen) vehicle.communicationID = DefaultCommunicationID vehicle.SetPublisher(pub) err := vehicle.GiveCommunication(NewCommunicationID) expectEvent1 := CommunicationIDGaveEvent{ CommunicationID: NewCommunicationID, } expectEvent2 := CommunicationIDRemovedEvent{ CommunicationID: DefaultCommunicationID, } a.Equal(vehicle.GetCommunicationID(), NewCommunicationID) a.Equal(vehicle.GetVersion(), DefaultVersion1) a.Equal(vehicle.GetNewVersion(), DefaultVersion2) a.Len(pub.events, 2) a.Contains(pub.events, expectEvent1) a.Contains(pub.events, expectEvent2) a.Nil(err) }
explode_data.jsonl/22742
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 352 }
[ 2830, 3393, 4072, 4703, 25613, 53, 41865, 65411, 764, 3036, 50145, 1556, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 2060, 7121, 1155, 692, 2405, 2399, 197, 91084, 5637, 16, 262, 284, 7899, 5637, 488, 6523, 16, 698, 197, 91084, 5637, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClusterScope_ReconcileVCN(t *testing.T) { mockCtrl := gomock.NewController(t) defer mockCtrl.Finish() vcnClient := mock_vcn.NewMockClient(mockCtrl) tags := make(map[string]string) tags["CreatedBy"] = "OCIClusterAPIProvider" tags["ClusterUUID"] = "a" definedTags := map[string]map[string]string{ "ns1": { "tag1": "foo", "tag2": "bar", }, "ns2": { "tag1": "foo1", "tag2": "bar1", }, } definedTagsInterface := make(map[string]map[string]interface{}) for ns, mapNs := range definedTags { mapValues := make(map[string]interface{}) for k, v := range mapNs { mapValues[k] = v } definedTagsInterface[ns] = mapValues } vcnClient.EXPECT().GetVcn(gomock.Any(), gomock.Eq(core.GetVcnRequest{ VcnId: common.String("normal_id"), })). Return(core.GetVcnResponse{ Vcn: core.Vcn{ Id: common.String("normal_id"), FreeformTags: tags, DisplayName: common.String("foo"), DefinedTags: definedTagsInterface, }, }, nil).AnyTimes() vcnClient.EXPECT().UpdateVcn(gomock.Any(), gomock.Eq(core.UpdateVcnRequest{ VcnId: common.String("normal_id"), UpdateVcnDetails: core.UpdateVcnDetails{ DisplayName: common.String("foo1"), FreeformTags: tags, DefinedTags: definedTagsInterface, }, })). Return(core.UpdateVcnResponse{ Vcn: core.Vcn{ Id: common.String("normal_id"), FreeformTags: tags, DisplayName: common.String("foo1"), }, }, nil) vcnClient.EXPECT().UpdateVcn(gomock.Any(), gomock.Eq(core.UpdateVcnRequest{ VcnId: common.String("normal_id"), UpdateVcnDetails: core.UpdateVcnDetails{ DisplayName: common.String("foo2"), FreeformTags: tags, DefinedTags: definedTagsInterface, }, })). Return(core.UpdateVcnResponse{ Vcn: core.Vcn{}, }, errors.New("some error")) vcnClient.EXPECT().ListVcns(gomock.Any(), gomock.Eq(core.ListVcnsRequest{ CompartmentId: common.String("bar"), DisplayName: common.String("not_found"), })).Return( core.ListVcnsResponse{ Items: []core.Vcn{ { Id: common.String("vcn_id"), }, }}, nil) vcnClient.EXPECT().CreateVcn(gomock.Any(), Eq(func(request interface{}) error { return createVcnDisplayNameMatcher(request, "not_found") })). Return(core.CreateVcnResponse{ Vcn: core.Vcn{ Id: common.String("not_found"), }, }, nil) tests := []struct { name string spec infrastructurev1beta1.OCIClusterSpec wantErr bool expectedError string }{ { name: "no reconciliation needed", spec: infrastructurev1beta1.OCIClusterSpec{ DefinedTags: definedTags, NetworkSpec: infrastructurev1beta1.NetworkSpec{ Vcn: infrastructurev1beta1.VCN{ ID: common.String("normal_id"), Name: "foo", CIDR: "bar", }, }, }, wantErr: false, }, { name: "vcn update needed", spec: infrastructurev1beta1.OCIClusterSpec{ DefinedTags: definedTags, NetworkSpec: infrastructurev1beta1.NetworkSpec{ Vcn: infrastructurev1beta1.VCN{ ID: common.String("normal_id"), Name: "foo1", CIDR: "bar", }, }, }, wantErr: false, }, { name: "vcn update needed but error out", spec: infrastructurev1beta1.OCIClusterSpec{ DefinedTags: definedTags, NetworkSpec: infrastructurev1beta1.NetworkSpec{ Vcn: infrastructurev1beta1.VCN{ ID: common.String("normal_id"), Name: "foo2", CIDR: "bar", }, }, }, wantErr: true, expectedError: "failed to reconcile the vcn, failed to update: some error", }, { name: "vcn creation needed", spec: infrastructurev1beta1.OCIClusterSpec{ CompartmentId: "bar", NetworkSpec: infrastructurev1beta1.NetworkSpec{ Vcn: infrastructurev1beta1.VCN{ Name: "not_found", }, }, }, wantErr: false, }, } l := log.FromContext(context.Background()) for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { ociCluster := infrastructurev1beta1.OCICluster{ Spec: tt.spec, ObjectMeta: metav1.ObjectMeta{ UID: "a", }, } s := &ClusterScope{ VCNClient: vcnClient, OCICluster: &ociCluster, Cluster: &clusterv1.Cluster{ ObjectMeta: metav1.ObjectMeta{ UID: "a", }, }, Logger: &l, } err := s.ReconcileVCN(context.Background()) if (err != nil) != tt.wantErr { t.Errorf("ReconcileVCN() error = %v, wantErr %v", err, tt.wantErr) } if err != nil { if err.Error() != tt.expectedError { t.Errorf("ReconcileVCN() expected error = %s, actual error %s", tt.expectedError, err.Error()) } } }) } }
explode_data.jsonl/52636
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2090 }
[ 2830, 3393, 28678, 10803, 50693, 40446, 457, 11287, 45, 1155, 353, 8840, 836, 8, 341, 77333, 15001, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 7860, 15001, 991, 18176, 741, 5195, 14271, 2959, 1669, 7860, 2273, 14271, 7121, 11571,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRaw(t *testing.T) { // TestRaw no longer works in CircleCi, Restrict to only VM tests. testutil.SkipIfNotRoot(t) tty, err := New() if os.IsNotExist(err) { t.Skipf("No /dev/tty here.") } else if err != nil { t.Fatalf("TestRaw new: want nil, got %v", err) } term, err := tty.Get() if err != nil { t.Fatalf("TestRaw get: want nil, got %v", err) } n, err := tty.Raw() if err != nil { t.Fatalf("TestRaw raw: want nil, got %v", err) } if !reflect.DeepEqual(term, n) { t.Fatalf("TestRaw: New(%v) and Raw(%v) should be equal, are not", t, n) } if err := tty.Set(n); err != nil { t.Fatalf("TestRaw restore mode: want nil, got %v", err) } n, err = tty.Get() if err != nil { t.Fatalf("TestRaw second call to New(): want nil, got %v", err) } if !reflect.DeepEqual(term, n) { t.Fatalf("TestRaw: After Raw restore: New(%v) and check(%v) should be equal, are not", term, n) } }
explode_data.jsonl/73092
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 389 }
[ 2830, 3393, 20015, 1155, 353, 8840, 836, 8, 341, 197, 322, 3393, 20015, 902, 5021, 4278, 304, 21224, 60619, 11, 41327, 849, 311, 1172, 17792, 7032, 624, 18185, 1314, 57776, 2679, 2623, 8439, 1155, 340, 3244, 1881, 11, 1848, 1669, 1532, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestExecuteAndWaitRetries(t *testing.T) { t.Parallel() f := setup(t) defer f.shutDown() op, err := f.client.ExecuteAndWait(f.ctx, &repb.ExecuteRequest{}) if err != nil { t.Fatalf("client.WaitExecution(ctx, {}) = %v", err) } st := client.OperationStatus(op) if st == nil { t.Errorf("client.WaitExecution(ctx, {}) returned no status, expected Aborted") } if st != nil && st.Code() != codes.Aborted { t.Errorf("client.WaitExecution(ctx, {}) returned unexpected status code %s", st.Code()) } // 2 separate transient Execute errors. if f.fake.numCalls["Execute"] != 2 { t.Errorf("Expected 2 Execute calls, got %v", f.fake.numCalls["Execute"]) } // 3 separate transient WaitExecution errors + the final successful call. if f.fake.numCalls["WaitExecution"] != 4 { t.Errorf("Expected 4 WaitExecution calls, got %v", f.fake.numCalls["WaitExecution"]) } }
explode_data.jsonl/5611
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 331 }
[ 2830, 3393, 17174, 92812, 12020, 4019, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 1166, 1669, 6505, 1155, 340, 16867, 282, 2395, 332, 4454, 2822, 39703, 11, 1848, 1669, 282, 6581, 13827, 92812, 955, 30608, 11, 609, 9995, 65, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestAccGitRepo_RepoInitialization_Uninitialized(t *testing.T) { projectName := testutils.GenerateResourceName() gitRepoName := testutils.GenerateResourceName() tfRepoNode := "azuredevops_git_repository.repository" resource.Test(t, resource.TestCase{ PreCheck: func() { testutils.PreCheck(t, nil) }, Providers: testutils.GetProviders(), CheckDestroy: checkGitRepoDestroyed, Steps: []resource.TestStep{ { Config: testutils.HclGitRepoResource(projectName, gitRepoName, "Uninitialized"), Check: resource.ComposeTestCheckFunc( checkGitRepoExists(gitRepoName), resource.TestCheckResourceAttrSet(tfRepoNode, "project_id"), resource.TestCheckResourceAttr(tfRepoNode, "name", gitRepoName), resource.TestCheckResourceAttr(tfRepoNode, "default_branch", ""), ), }, }, }) }
explode_data.jsonl/59079
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 322 }
[ 2830, 3393, 14603, 46562, 25243, 62, 25243, 61928, 40687, 36161, 1155, 353, 8840, 836, 8, 341, 72470, 675, 1669, 1273, 6031, 57582, 4783, 675, 741, 90731, 25243, 675, 1669, 1273, 6031, 57582, 4783, 675, 741, 3244, 69, 25243, 1955, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReadRecordRE(t *testing.T) { allRecordsStr := "hello<foo>howdy</foo>hello<bar>yellow</bar>hello<baz>goodbye</baz>" scr := NewScript() scr.input = bufio.NewReader(strings.NewReader(allRecordsStr)) scr.SetRS(`<[^>]+>[^<]*<[^>]+>`) scr.rsScanner = bufio.NewScanner(scr.input) scr.rsScanner.Split(scr.makeRecordSplitter()) for i := 0; i < 3; i++ { rec, err := scr.readRecord() if err != nil { t.Fatal(err) } if rec != "hello" { t.Fatalf("Expected %q but received %q", "hello", rec) } } }
explode_data.jsonl/2997
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 238 }
[ 2830, 3393, 4418, 6471, 787, 1155, 353, 8840, 836, 8, 341, 50960, 25876, 2580, 1669, 330, 14990, 27, 7975, 29, 5158, 10258, 522, 7975, 29, 14990, 27, 2257, 29, 27869, 522, 2257, 29, 14990, 33177, 1370, 29, 18536, 28374, 522, 42573, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestVerifyHTTP01(t *testing.T) { var ( http01 http.Handler authzCount int // num. of created authorizations didAcceptHTTP01 bool ) verifyHTTPToken := func() { r := httptest.NewRequest("GET", "/.well-known/acme-challenge/token-http-01", nil) w := httptest.NewRecorder() http01.ServeHTTP(w, r) if w.Code != http.StatusOK { t.Errorf("http token: w.Code = %d; want %d", w.Code, http.StatusOK) } if v := w.Body.String(); !strings.HasPrefix(v, "token-http-01.") { t.Errorf("http token value = %q; want 'token-http-01.' prefix", v) } } // ACME CA server stub, only the needed bits. // TODO: Merge this with startACMEServerStub, making it a configurable CA for testing. var ca *httptest.Server ca = httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Replay-Nonce", "nonce") if r.Method == "HEAD" { // a nonce request return } switch r.URL.Path { // Discovery. case "/": if err := discoTmpl.Execute(w, ca.URL); err != nil { t.Errorf("discoTmpl: %v", err) } // Client key registration. case "/new-reg": w.Write([]byte("{}")) // New domain authorization. case "/new-authz": authzCount++ w.Header().Set("Location", fmt.Sprintf("%s/authz/%d", ca.URL, authzCount)) w.WriteHeader(http.StatusCreated) if err := authzTmpl.Execute(w, ca.URL); err != nil { t.Errorf("authzTmpl: %v", err) } // Accept tls-sni-02. case "/challenge/2": w.Write([]byte("{}")) // Reject tls-sni-01. case "/challenge/1": http.Error(w, "won't accept tls-sni-01", http.StatusBadRequest) // Should not accept dns-01. case "/challenge/dns-01": t.Errorf("dns-01 challenge was accepted") http.Error(w, "won't accept dns-01", http.StatusBadRequest) // Accept http-01. case "/challenge/http-01": didAcceptHTTP01 = true verifyHTTPToken() w.Write([]byte("{}")) // Authorization statuses. // Make tls-sni-xxx invalid. case "/authz/1", "/authz/2": w.Write([]byte(`{"status": "invalid"}`)) case "/authz/3", "/authz/4": w.Write([]byte(`{"status": "valid"}`)) default: http.NotFound(w, r) t.Errorf("unrecognized r.URL.Path: %s", r.URL.Path) } })) defer ca.Close() m := &Manager{ Client: &acme.Client{ DirectoryURL: ca.URL, }, } http01 = m.HTTPHandler(nil) ctx := context.Background() client, err := m.acmeClient(ctx) if err != nil { t.Fatalf("m.acmeClient: %v", err) } if err := m.verify(ctx, client, "example.org"); err != nil { t.Errorf("m.verify: %v", err) } // Only tls-sni-01, tls-sni-02 and http-01 must be accepted // The dns-01 challenge is unsupported. if authzCount != 3 { t.Errorf("authzCount = %d; want 3", authzCount) } if !didAcceptHTTP01 { t.Error("did not accept http-01 challenge") } }
explode_data.jsonl/65053
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1211 }
[ 2830, 3393, 32627, 9230, 15, 16, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 28080, 15, 16, 1758, 31010, 271, 197, 78011, 89, 2507, 414, 526, 442, 1629, 13, 315, 3465, 3150, 8040, 198, 197, 2698, 307, 16646, 9230, 15, 16, 1807, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestQuic(t *testing.T) { if strings.ToLower(os.Getenv("CI")) != "true" { // TODO. (#1782) This test requires configuring hosts // file and updating the certificate in testdata. We // should find a more robust way of testing this. return } upstream := "quic.clemente.io:8086" config := "proxy / quic://" + upstream + " {\n\tinsecure_skip_verify\n}" content := "Hello, client" // make proxy upstreams, err := NewStaticUpstreams(caddyfile.NewDispenser("Testfile", strings.NewReader(config)), "") if err != nil { t.Errorf("Expected no error. Got: %s", err.Error()) } p := &Proxy{ Next: httpserver.EmptyNext, // prevents panic in some cases when test fails Upstreams: upstreams, } // start QUIC server go func() { dir, err := os.Getwd() if err != nil { t.Errorf("Expected no error. Got: %s", err.Error()) return } handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Write([]byte(content)) w.WriteHeader(200) }) err = h2quic.ListenAndServeQUIC( upstream, path.Join(dir, "testdata", "fullchain.pem"), // TODO: Use a dynamically-generated, self-signed cert instead path.Join(dir, "testdata", "privkey.pem"), handler, ) if err != nil { t.Errorf("Expected no error. Got: %s", err.Error()) return } }() r := httptest.NewRequest("GET", "/", nil) w := httptest.NewRecorder() _, err = p.ServeHTTP(w, r) if err != nil { t.Errorf("Expected no error. Got: %s", err.Error()) return } // check response if w.Code != 200 { t.Errorf("Expected response code 200, got: %d", w.Code) } responseContent := string(w.Body.Bytes()) if responseContent != content { t.Errorf("Expected response body, got: %s", responseContent) } }
explode_data.jsonl/64248
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 678 }
[ 2830, 3393, 2183, 292, 1155, 353, 8840, 836, 8, 341, 743, 9069, 29983, 9638, 64883, 445, 11237, 2761, 961, 330, 1866, 1, 341, 197, 197, 322, 5343, 13, 29083, 16, 22, 23, 17, 8, 1096, 1273, 7460, 71783, 18432, 198, 197, 197, 322, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMultiPartitionSubscriberMultipleMessages(t *testing.T) { const subscription = "projects/123456/locations/us-central1-b/subscriptions/my-sub" receiver := newTestMessageReceiver(t) msg1 := seqMsgWithOffsetAndSize(22, 100) msg2 := seqMsgWithOffsetAndSize(23, 200) msg3 := seqMsgWithOffsetAndSize(44, 100) msg4 := seqMsgWithOffsetAndSize(45, 200) verifiers := test.NewVerifiers(t) // Partition 1 subStream1 := test.NewRPCVerifier(t) subStream1.Push(initSubReqCommit(subscriptionPartition{Path: subscription, Partition: 1}), initSubResp(), nil) subStream1.Push(initFlowControlReq(), msgSubResp(msg1), nil) subStream1.Push(nil, msgSubResp(msg2), nil) verifiers.AddSubscribeStream(subscription, 1, subStream1) cmtStream1 := test.NewRPCVerifier(t) cmtStream1.Push(initCommitReq(subscriptionPartition{Path: subscription, Partition: 1}), initCommitResp(), nil) cmtStream1.Push(commitReq(24), commitResp(1), nil) verifiers.AddCommitStream(subscription, 1, cmtStream1) // Partition 2 subStream2 := test.NewRPCVerifier(t) subStream2.Push(initSubReqCommit(subscriptionPartition{Path: subscription, Partition: 2}), initSubResp(), nil) subStream2.Push(initFlowControlReq(), msgSubResp(msg3), nil) subStream2.Push(nil, msgSubResp(msg4), nil) verifiers.AddSubscribeStream(subscription, 2, subStream2) cmtStream2 := test.NewRPCVerifier(t) cmtStream2.Push(initCommitReq(subscriptionPartition{Path: subscription, Partition: 2}), initCommitResp(), nil) cmtStream2.Push(commitReq(46), commitResp(1), nil) verifiers.AddCommitStream(subscription, 2, cmtStream2) mockServer.OnTestStart(verifiers) defer mockServer.OnTestEnd() sub := newTestMultiPartitionSubscriber(t, receiver.onMessage, subscription, []int{1, 2}) verifyPartitionsActive(t, sub, true, 1, 2) verifyPartitionsActive(t, sub, false, 0, 3) if gotErr := sub.WaitStarted(); gotErr != nil { t.Errorf("Start() got err: (%v)", gotErr) } receiver.ValidateMsgs(join(partitionMsgs(1, msg1, msg2), partitionMsgs(2, msg3, msg4))) sub.Stop() if gotErr := sub.WaitStopped(); gotErr != nil { t.Errorf("Stop() got err: (%v)", gotErr) } }
explode_data.jsonl/31653
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 784 }
[ 2830, 3393, 20358, 49978, 40236, 32089, 15820, 1155, 353, 8840, 836, 8, 341, 4777, 15142, 284, 330, 17161, 14, 16, 17, 18, 19, 20, 21, 14, 31309, 62431, 84081, 16, 1455, 37885, 29966, 34198, 17967, 698, 17200, 12862, 1669, 501, 2271, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNewFunc(t *testing.T) { newFn := func() (toil.Toiler, error) { return nil, nil } toilForker := NewFunc(newFn) if nil == toilForker { t.Errorf("After calling New(), expected returned value not to be nil, but instead was: %v", toilForker) } }
explode_data.jsonl/15805
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 106 }
[ 2830, 3393, 3564, 9626, 1155, 353, 8840, 836, 8, 1476, 8638, 24911, 1669, 2915, 368, 320, 983, 321, 3274, 5769, 11, 1465, 8, 341, 197, 853, 2092, 11, 2092, 198, 197, 630, 31709, 321, 37, 669, 261, 1669, 1532, 9626, 1755, 24911, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFindStructuralBits(t *testing.T) { if !SupportedCPU() { t.SkipNow() } t.Run("avx2", func(t *testing.T) { testFindStructuralBits(t, find_structural_bits) }) if cpuid.CPU.Has(cpuid.AVX512F) { t.Run("avx512", func(t *testing.T) { testFindStructuralBits(t, find_structural_bits_avx512) }) } }
explode_data.jsonl/18735
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 147 }
[ 2830, 3393, 9885, 9422, 4176, 19920, 1155, 353, 8840, 836, 8, 341, 743, 753, 34636, 31615, 368, 341, 197, 3244, 57776, 7039, 741, 197, 532, 3244, 16708, 445, 402, 87, 17, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 18185, 9885, 94...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFilterToAvailableChannels(t *testing.T) { testCases := []struct { name string genChanAndDocs int // Amount of docs and channels to generate (ie. doc1 ch1, doc2 ch2...) userChans base.Set // Channels user is in accessChans base.Set // Channels to get changes for expectedDocsReturned []string // Expected Doc IDs returned }{ { // Should log "Channels [ ch2 ] request without access by user test" - CBG-1326 name: "Info logged when channels dropped from list", genChanAndDocs: 3, userChans: base.SetOf("ch1", "ch3"), accessChans: base.SetOf("ch1", "ch2", "ch3"), expectedDocsReturned: []string{"doc1", "doc3"}, }, { name: "No info logged if no channels dropped from list", genChanAndDocs: 3, userChans: base.SetOf("ch1", "ch3"), accessChans: base.SetOf("ch1", "ch3"), expectedDocsReturned: []string{"doc1", "doc3"}, }, { name: "No info logged when using wildcard", genChanAndDocs: 3, userChans: base.SetOf("ch1", "ch3"), accessChans: base.SetOf("*"), expectedDocsReturned: []string{"doc1", "doc3"}, }, } defer base.SetUpTestLogging(base.LevelInfo, base.KeyChanges)() for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { db := setupTestDB(t) auth := db.Authenticator() user, err := auth.NewUser("test", "pass", testCase.userChans) require.NoError(t, err) require.NoError(t, auth.Save(user)) for i := 0; i < testCase.genChanAndDocs; i++ { id := fmt.Sprintf("%d", i+1) _, _, err = db.Put("doc"+id, Body{"channels": []string{"ch" + id}}) require.NoError(t, err) } err = db.WaitForPendingChanges(context.Background()) require.NoError(t, err) db.user, err = auth.GetUser("test") require.NoError(t, err) ch, err := db.GetChanges(testCase.accessChans, getZeroSequence()) require.NoError(t, err) require.Len(t, ch, len(testCase.expectedDocsReturned)) match := true // Check if expected matches with actual in-order for i, change := range ch { if change.ID != testCase.expectedDocsReturned[i] { match = false } } assert.True(t, match) db.Close() }) } }
explode_data.jsonl/60348
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1044 }
[ 2830, 3393, 5632, 1249, 16485, 35925, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 338, 914, 198, 197, 82281, 46019, 3036, 63107, 981, 526, 414, 442, 25783, 315, 26340, 323, 11744, 311, 6923, 320, 645, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestInline(t *testing.T) { testdataBase := `../../testdata` source, err := filepath.Abs(filepath.Join(testdataBase, `hello`, `template.txt`)) if err != nil { t.Errorf(`%v`, err) } err = files.Copy(source, `/tmp/test-inline-hello.txt`) if err != nil { t.Errorf(`%v`, err) } r := ProcessRequest{ Source: `/tmp/test-inline-hello.txt`, Inline: true, PlaceholderSeparator: `:`, } err = Process(r) if err != nil { t.Errorf("%v", err) } assertTextFilesEqual(t, `/tmp/test-inline-hello.txt`, `../../testdata/hello/expected_output.txt`) }
explode_data.jsonl/66296
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 269 }
[ 2830, 3393, 25324, 1155, 353, 8840, 836, 8, 341, 18185, 691, 3978, 1669, 1565, 2748, 92425, 19324, 47418, 11, 1848, 1669, 26054, 33255, 34793, 22363, 8623, 691, 3978, 11, 1565, 14990, 7808, 1565, 4214, 3909, 63, 1171, 743, 1848, 961, 20...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestFindOnManual(t *testing.T) { var ( ctx = context.Background() svc = &service{ permissions: permissions.RuleSet{}, sScripts: ScriptSet{ &Script{ Name: "s1", Triggers: []*Trigger{ &Trigger{ EventTypes: []string{"ev"}, ResourceTypes: []string{"res"}, }, }, }, &Script{ Name: "s2", Triggers: []*Trigger{ &Trigger{ EventTypes: []string{"foo"}, ResourceTypes: []string{"bar"}, }, }, }, //&Script{ // Triggers: []*Trigger{ // &Trigger{ // EventTypes: []string{"not-a-match"}, // ResourceTypes: []string{"not-a-match"}, // }, // }, //}, }, cScripts: ScriptSet{ &Script{ Name: "s3", Triggers: []*Trigger{ &Trigger{ EventTypes: []string{"ev"}, ResourceTypes: []string{"res"}, }, }, }, &Script{ Name: "s4", Triggers: []*Trigger{ &Trigger{ EventTypes: []string{"foo"}, ResourceTypes: []string{"bar"}, }, }, }, }, } filter = Filter{ ResourceTypes: []string{"res"}, EventTypes: []string{"ev"}, ExcludeServerScripts: false, ExcludeClientScripts: false, } o, _, err = svc.Find(ctx, filter) a = assert.New(t) ) a.NoError(err) a.Len(o, 2) }
explode_data.jsonl/81123
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 718 }
[ 2830, 3393, 9885, 1925, 52092, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 20985, 284, 2266, 19047, 2822, 197, 1903, 7362, 284, 609, 7936, 515, 298, 197, 29900, 25, 8541, 63961, 1649, 38837, 298, 1903, 44942, 25, 13710, 1649, 515, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewRunCommandUsageTemplate(t *testing.T) { f := newFakeKoolRun([]builder.Command{}, nil) f.parser.(*parser.FakeParser).MockScripts = []string{"testing_script"} cmd := NewRunCommand(f) SetRunUsageFunc(f, cmd) cmd.SetArgs([]string{"--help"}) if err := cmd.Execute(); err != nil { t.Errorf("unexpected error executing run command; error: %v", err) } if !f.out.(*shell.FakeOutputWriter).CalledPrintln { t.Error("did not call Println for command usage") } usage := f.out.(*shell.FakeOutputWriter).OutLines[0] if !strings.Contains(usage, "testing_script") { t.Error("did not find testing_script as available script on usage text") } }
explode_data.jsonl/60859
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 234 }
[ 2830, 3393, 3564, 6727, 4062, 14783, 7275, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 52317, 42, 1749, 6727, 10556, 17850, 12714, 22655, 2092, 340, 1166, 25617, 41399, 9657, 991, 726, 6570, 568, 11571, 44942, 284, 3056, 917, 4913, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGenerateConfigForDCALiveCandles(t *testing.T) { cfg := Config{ Nickname: "ExampleStrategyDCALiveCandles", Goal: "To demonstrate live trading proof of concept against candle data", StrategySettings: StrategySettings{ Name: dca, }, CurrencySettings: []CurrencySettings{ { ExchangeName: testExchange, Asset: asset.Spot.String(), Base: currency.BTC.String(), Quote: currency.USDT.String(), InitialQuoteFunds: initialQuoteFunds2, BuySide: minMax, SellSide: minMax, Leverage: Leverage{ CanUseLeverage: false, }, MakerFee: makerFee, TakerFee: takerFee, }, }, DataSettings: DataSettings{ Interval: kline.OneMin.Duration(), DataType: common.CandleStr, LiveData: &LiveData{ APIKeyOverride: "", APISecretOverride: "", APIClientIDOverride: "", API2FAOverride: "", APISubAccountOverride: "", RealOrders: false, }, }, PortfolioSettings: PortfolioSettings{ BuySide: minMax, SellSide: minMax, Leverage: Leverage{ CanUseLeverage: false, }, }, StatisticSettings: StatisticSettings{ RiskFreeRate: decimal.NewFromFloat(0.03), }, } if saveConfig { result, err := json.MarshalIndent(cfg, "", " ") if err != nil { t.Fatal(err) } p, err := os.Getwd() if err != nil { t.Fatal(err) } err = ioutil.WriteFile(filepath.Join(p, "examples", "dca-candles-live.strat"), result, 0770) if err != nil { t.Error(err) } } }
explode_data.jsonl/58410
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 741 }
[ 2830, 3393, 31115, 2648, 2461, 5626, 969, 533, 34, 20125, 1155, 353, 8840, 836, 8, 341, 50286, 1669, 5532, 515, 197, 18317, 41052, 25, 330, 13314, 19816, 5626, 969, 533, 34, 20125, 756, 197, 9600, 78, 278, 25, 257, 330, 1249, 19869, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func Test_GetAPSServerInfoConfigFileNameImplGet(t *testing.T) { getInfo := &getServerInfoConfigFileNameImpl{} serverInfo := &model.ServerInfo{ServerName: "报警服务器1", Address: "/mnt/hgfs/Source/c++/iVideo/Source/APS/Bin/aps", Type: common.SERVER_TYPE_APS} cfgFileName, err := getInfo.GetInfo(serverInfo) if nil != err { t.Error(err) } if "/mnt/hgfs/Source/c++/iVideo/Source/APS/Bin/config.xml" != cfgFileName { t.Error(cfgFileName) } }
explode_data.jsonl/74516
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 192 }
[ 2830, 3393, 13614, 2537, 1220, 2836, 1731, 2648, 10903, 9673, 1949, 1155, 353, 8840, 836, 8, 341, 10366, 1731, 1669, 609, 455, 5475, 1731, 2648, 10903, 9673, 16094, 41057, 1731, 1669, 609, 2528, 22997, 1731, 90, 5475, 675, 25, 330, 1061...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestParser_findStart(t *testing.T) { tests := []struct { name string args []string prefix []string suffix []string found bool }{ { name: "default case", args: nil, prefix: nil, suffix: nil, found: false, }, { name: "simple case", args: []string{"server"}, prefix: []string{"server"}, suffix: []string{}, found: true, }, { name: "also simple case", args: []string{"server", "foo"}, prefix: []string{"server"}, suffix: []string{"foo"}, found: true, }, { name: "longer simple case", args: []string{"server", "foo", "bar"}, prefix: []string{"server"}, suffix: []string{"foo", "bar"}, found: true, }, { name: "not found", args: []string{"not-server", "foo", "bar"}, prefix: []string{"not-server", "foo", "bar"}, found: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { p := Parser{ After: []string{"server", "agent"}, } prefix, suffix, found := p.findStart(tt.args) if !reflect.DeepEqual(prefix, tt.prefix) { t.Errorf("Parser.findStart() prefix = %+v\nWant = %+v", prefix, tt.prefix) } if !reflect.DeepEqual(suffix, tt.suffix) { t.Errorf("Parser.findStart() suffix = %+v\nWant = %+v", suffix, tt.suffix) } if found != tt.found { t.Errorf("Parser.findStart() found = %+v\nWant = %+v", found, tt.found) } }) } }
explode_data.jsonl/76322
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 658 }
[ 2830, 3393, 6570, 21814, 3479, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 256, 914, 198, 197, 31215, 256, 3056, 917, 198, 197, 3223, 5060, 3056, 917, 198, 197, 1903, 13554, 3056, 917, 198, 197, 58102, 220,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestTracepointProgramCall(t *testing.T) { // Kernels before 4.14 don't support attaching to syscall tracepoints. testutils.SkipOnOldKernel(t, "4.14", "syscalls tracepoint support") m, p := newUpdaterMapProg(t, ebpf.TracePoint) // Open Tracepoint at /sys/kernel/debug/tracing/events/syscalls/sys_enter_getpid // and attach it to the ebpf program created above. tp, err := Tracepoint("syscalls", "sys_enter_getpid", p, nil) if err != nil { t.Fatal(err) } // Trigger ebpf program call. unix.Getpid() // Assert that the value at index 0 has been updated to 1. assertMapValue(t, m, 0, 1) // Detach the Tracepoint. if err := tp.Close(); err != nil { t.Fatal(err) } // Reset map value to 0 at index 0. if err := m.Update(uint32(0), uint32(0), ebpf.UpdateExist); err != nil { t.Fatal(err) } // Retrigger the ebpf program call. unix.Getpid() // Assert that this time the value has not been updated. assertMapValue(t, m, 0, 0) }
explode_data.jsonl/27671
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 379 }
[ 2830, 3393, 6550, 2768, 10690, 7220, 1155, 353, 8840, 836, 8, 341, 197, 322, 730, 42329, 1573, 220, 19, 13, 16, 19, 1513, 944, 1824, 71808, 311, 49345, 11655, 7706, 624, 18185, 6031, 57776, 1925, 18284, 26343, 1155, 11, 330, 19, 13, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDockerExec(t *testing.T) { assert := asrt.New(t) client := GetDockerClient() id, _, err := RunSimpleContainer("busybox:latest", "", []string{"tail", "-f", "/dev/null"}, nil, nil, nil, "0", false, true, nil) assert.NoError(err) t.Cleanup(func() { err = client.RemoveContainer(docker.RemoveContainerOptions{ ID: id, Force: true, }) assert.NoError(err) }) stdout, _, err := Exec(id, "ls /etc") assert.NoError(err) assert.Contains(stdout, "group\nhostname") _, stderr, err := Exec(id, "ls /nothingthere") assert.Error(err) assert.Contains(stderr, "No such file or directory") }
explode_data.jsonl/41382
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 245 }
[ 2830, 3393, 35, 13659, 10216, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 438, 3342, 7121, 1155, 340, 25291, 1669, 2126, 35, 13659, 2959, 2822, 15710, 11, 8358, 1848, 1669, 6452, 16374, 4502, 445, 78467, 2011, 25, 19350, 497, 7342, 3056, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnmarshalURL(t *testing.T) { b := []byte(`"http://example.com/a b"`) var u URL err := json.Unmarshal(b, &u) if err != nil { t.Fatal(err) } require.Equal(t, "http://example.com/a%20b", u.String(), "URL not properly unmarshalled in JSON.") err = yaml.Unmarshal(b, &u) if err != nil { t.Fatal(err) } require.Equal(t, "http://example.com/a%20b", u.String(), "URL not properly unmarshalled in YAML.") }
explode_data.jsonl/72912
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 187 }
[ 2830, 3393, 1806, 27121, 3144, 1155, 353, 8840, 836, 8, 341, 2233, 1669, 3056, 3782, 5809, 1, 1254, 1110, 8687, 905, 14186, 293, 1, 24183, 2405, 575, 5548, 271, 9859, 1669, 2951, 38097, 1883, 11, 609, 84, 340, 743, 1848, 961, 2092, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDealStatusStreamSendReceiveResponse(t *testing.T) { ctx := context.Background() testCases := map[string]struct { senderDisabledNew bool receiverDisabledNew bool }{ "both clients current version": {}, "sender old supports old queries": { senderDisabledNew: true, }, "receiver only supports old queries": { receiverDisabledNew: true, }, } for testCase, data := range testCases { t.Run(testCase, func(t *testing.T) { td := shared_testutil.NewLibp2pTestData(ctx, t) var fromNetwork, toNetwork network.StorageMarketNetwork if data.senderDisabledNew { fromNetwork = network.NewFromLibp2pHost(td.Host1, network.SupportedDealStatusProtocols([]protocol.ID{storagemarket.OldDealStatusProtocolID})) } else { fromNetwork = network.NewFromLibp2pHost(td.Host1) } if data.receiverDisabledNew { toNetwork = network.NewFromLibp2pHost(td.Host2, network.SupportedDealStatusProtocols([]protocol.ID{storagemarket.OldDealStatusProtocolID})) } else { toNetwork = network.NewFromLibp2pHost(td.Host2) } toHost := td.Host2.ID() // host1 gets no-op receiver tr := &testReceiver{t: t} require.NoError(t, fromNetwork.SetDelegate(tr)) // host2 gets receiver achan := make(chan network.DealStatusResponse) tr2 := &testReceiver{t: t, dealStatusStreamHandler: func(s network.DealStatusStream) { a, _, err := s.ReadDealStatusResponse() require.NoError(t, err) achan <- a }} require.NoError(t, toNetwork.SetDelegate(tr2)) assertDealStatusResponseReceived(ctx, t, fromNetwork, toHost, achan) }) } }
explode_data.jsonl/19996
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 617 }
[ 2830, 3393, 72841, 2522, 3027, 11505, 14742, 2582, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 2822, 18185, 37302, 1669, 2415, 14032, 60, 1235, 341, 197, 1903, 1659, 25907, 3564, 256, 1807, 198, 197, 17200, 12862, 25907, 3564,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBuffer_AcceptRemovesBatch(t *testing.T) { m := Metric() b := setup(NewBuffer("test", 5)) b.Add(m, m, m) batch := b.Batch(2) b.Accept(batch) require.Equal(t, 1, b.Len()) }
explode_data.jsonl/17683
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 85 }
[ 2830, 3393, 4095, 1566, 66, 1484, 6590, 10088, 21074, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 52458, 741, 2233, 1669, 6505, 35063, 4095, 445, 1944, 497, 220, 20, 1171, 2233, 1904, 1255, 11, 296, 11, 296, 340, 2233, 754, 1669, 293, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestNewService(t *testing.T) { g := NewGomegaWithT(t) scheme := runtime.NewScheme() _ = clusterv1.AddToScheme(scheme) _ = infrav1.AddToScheme(scheme) _ = infrav1exp.AddToScheme(scheme) cluster := &clusterv1.Cluster{ ObjectMeta: metav1.ObjectMeta{Name: "test-cluster"}, } client := fake.NewClientBuilder().WithScheme(scheme).WithObjects(cluster).Build() s, err := scope.NewClusterScope(context.Background(), scope.ClusterScopeParams{ AzureClients: scope.AzureClients{ Authorizer: autorest.NullAuthorizer{}, }, Client: client, Cluster: cluster, AzureCluster: &infrav1.AzureCluster{ Spec: infrav1.AzureClusterSpec{ Location: "test-location", ResourceGroup: "my-rg", SubscriptionID: "123", NetworkSpec: infrav1.NetworkSpec{ Vnet: infrav1.VnetSpec{Name: "my-vnet", ResourceGroup: "my-rg"}, }, }, }, }) g.Expect(err).ToNot(HaveOccurred()) mpms, err := scope.NewMachinePoolMachineScope(scope.MachinePoolMachineScopeParams{ Client: client, Logger: s.Logger, MachinePool: new(clusterv1exp.MachinePool), AzureMachinePool: new(infrav1exp.AzureMachinePool), AzureMachinePoolMachine: new(infrav1exp.AzureMachinePoolMachine), ClusterScope: s, }) g.Expect(err).ToNot(HaveOccurred()) actual := NewService(mpms) g.Expect(actual).ToNot(BeNil()) }
explode_data.jsonl/70804
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 605 }
[ 2830, 3393, 3564, 1860, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1532, 38, 32696, 2354, 51, 1155, 340, 1903, 8058, 1669, 15592, 7121, 28906, 741, 197, 62, 284, 1185, 590, 648, 16, 1904, 1249, 28906, 1141, 8058, 340, 197, 62, 284, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEmptyEnv_Allowed(t *testing.T) { initJSON() AllowEmptyEnv(true) BindEnv("type") // Empty environment variable BindEnv("name") // Bound, but not set environment variable os.Clearenv() os.Setenv("TYPE", "") assert.Equal(t, "", Get("type")) assert.Equal(t, "Cake", Get("name")) }
explode_data.jsonl/5559
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 111 }
[ 2830, 3393, 3522, 14359, 53629, 12817, 1155, 353, 8840, 836, 8, 341, 28248, 5370, 2822, 197, 18605, 3522, 14359, 3715, 692, 197, 9950, 14359, 445, 1313, 899, 442, 22228, 4573, 3890, 198, 197, 9950, 14359, 445, 606, 899, 442, 37176, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParentSubQuery(t *testing.T) { var jsonStr = `{ "topology": { "instances": [ { "service_version": "1.2.3", "service_locale": {"lang": "en"}, "service_roles": ["one", "two"] }, { "service_version": "1.2.4", "service_locale": {"lang": "th"}, "service_roles": ["three", "four"] }, { "service_version": "1.2.2", "service_locale": {"lang": "en"}, "service_roles": ["one"] } ] } }` res := Get(jsonStr, `topology.instances.#( service_roles.#(=="one"))#.service_version`) // should return two instances assert(t, res.String() == `["1.2.3","1.2.2"]`) }
explode_data.jsonl/43471
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 302 }
[ 2830, 3393, 8387, 3136, 2859, 1155, 353, 8840, 836, 8, 341, 2405, 2951, 2580, 284, 1565, 515, 197, 197, 1, 3481, 2449, 788, 341, 7847, 330, 47825, 788, 2278, 298, 197, 515, 11869, 330, 7936, 9438, 788, 330, 16, 13, 17, 13, 18, 756...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRangeCacheContextCancellation(t *testing.T) { defer leaktest.AfterTest(t)() defer log.Scope(t).Close(t) db := initTestDescriptorDB(t) // lookupAndWaitUntilJoin performs a RangeDescriptor lookup in a new // goroutine and blocks until the request is added to the inflight request // map. It returns a channel that transmits the error return value from the // lookup. lookupAndWaitUntilJoin := func(ctx context.Context, key roachpb.RKey, expectDBLookup bool) chan error { errC := make(chan error) var blocked <-chan struct{} if expectDBLookup { blocked = db.notifyOn(key) } else { ch := make(chan struct{}) db.cache.coalesced = ch blocked = ch } go func() { _, err := db.cache.lookupInternal(ctx, key, EvictionToken{}, false) errC <- err }() <-blocked return errC } expectContextCancellation := func(t *testing.T, c <-chan error) { t.Helper() if err := <-c; !errors.Is(err, context.Canceled) { t.Errorf("expected context cancellation error, found %v", err) } } expectNoError := func(t *testing.T, c <-chan error) { t.Helper() if err := <-c; err != nil { t.Errorf("unexpected error, found %v", err) } } ctx1, cancel := context.WithCancel(context.Background()) // leader ctx2 := context.Background() ctx3 := context.Background() db.pauseRangeLookups() key1 := roachpb.RKey("aa") errC1 := lookupAndWaitUntilJoin(ctx1, key1, true) errC2 := lookupAndWaitUntilJoin(ctx2, key1, false) // Cancel the leader and check that it gets an error. cancel() expectContextCancellation(t, errC1) // While lookups are still blocked, launch another one. This new request // should join the flight just like c2. errC3 := lookupAndWaitUntilJoin(ctx3, key1, false) // Let the flight finish. db.resumeRangeLookups() expectNoError(t, errC2) expectNoError(t, errC3) }
explode_data.jsonl/28186
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 678 }
[ 2830, 3393, 6046, 8233, 1972, 82298, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 16867, 1487, 77940, 1155, 568, 7925, 1155, 340, 20939, 1669, 2930, 2271, 11709, 3506, 1155, 692, 197, 322, 18615, 92812, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIsArchiveInvalidHeader(t *testing.T) { header := []byte{0x00, 0x01, 0x02} out := IsArchive(header) if out { t.Fatalf("isArchive should return false as %s is not a valid archive header", header) } }
explode_data.jsonl/81961
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 80 }
[ 2830, 3393, 3872, 42502, 7928, 4047, 1155, 353, 8840, 836, 8, 341, 20883, 1669, 3056, 3782, 90, 15, 87, 15, 15, 11, 220, 15, 87, 15, 16, 11, 220, 15, 87, 15, 17, 532, 13967, 1669, 2160, 42502, 25534, 340, 743, 700, 341, 197, 324...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestExportTypeValue(t *testing.T) { t.Parallel() script := ` access(all) fun main(): Type { return Type<Int>() } ` actual := exportValueFromScript(t, script) expected := cadence.TypeValue{ StaticType: "Int", } assert.Equal(t, expected, actual) }
explode_data.jsonl/12569
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 124 }
[ 2830, 3393, 16894, 929, 1130, 1155, 353, 8840, 836, 8, 1476, 3244, 41288, 7957, 2822, 86956, 1669, 22074, 286, 2615, 20388, 8, 2464, 1887, 4555, 3990, 341, 310, 470, 3990, 34520, 18949, 286, 456, 262, 1565, 271, 88814, 1669, 7485, 1130,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReconcile_SuccessNotFound(t *testing.T) { c := setup(t, node1ID) req := ctrl.Request{NamespacedName: types.NamespacedName{Namespace: ns, Name: "not-found-that-name"}} res, err := c.Reconcile(req) assert.Nil(t, err) assert.Equal(t, res, ctrl.Result{}) }
explode_data.jsonl/51716
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 111 }
[ 2830, 3393, 693, 40446, 457, 87161, 10372, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 6505, 1155, 11, 2436, 16, 915, 692, 24395, 1669, 23743, 9659, 90, 7980, 68552, 675, 25, 4494, 98932, 68552, 675, 90, 22699, 25, 12268, 11, 3988, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParallel_3(t *testing.T) { testConf.SetGOPATH("") conf := testConf.Config() t.Parallel() for _, test := range tests { if err := test.Check(conf); err != nil { t.Error(err) } } }
explode_data.jsonl/41691
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 85 }
[ 2830, 3393, 16547, 62, 18, 1155, 353, 8840, 836, 8, 341, 18185, 15578, 4202, 98733, 4827, 31764, 67850, 1669, 1273, 15578, 10753, 741, 3244, 41288, 7957, 741, 2023, 8358, 1273, 1669, 2088, 7032, 341, 197, 743, 1848, 1669, 1273, 10600, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
3
func TestGetContainerTypePodSandbox(t *testing.T) { annotations := map[string]string{ vcAnnotations.ContainerTypeKey: string(vc.PodSandbox), } testGetContainerTypeSuccessful(t, annotations, vc.PodSandbox) }
explode_data.jsonl/44037
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 71 }
[ 2830, 3393, 1949, 4502, 929, 23527, 50, 31536, 1155, 353, 8840, 836, 8, 341, 197, 39626, 1669, 2415, 14032, 30953, 515, 197, 5195, 66, 21418, 33672, 929, 1592, 25, 914, 80698, 88823, 50, 31536, 1326, 197, 630, 18185, 1949, 4502, 929, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestTradeActions_AggregationInvalidOffset(t *testing.T) { ht := StartHTTPTest(t, "base") defer ht.Finish() dbQ := &Q{ht.HorizonSession()} ass1, ass2, err := PopulateTestTrades(dbQ, 0, 100, hour, 1) ht.Require.NoError(err) q := make(url.Values) setAssetQuery(&q, "base_", ass1) setAssetQuery(&q, "counter_", ass2) q.Add("order", "asc") testCases := []struct { offset int64 resolution int64 startTime int64 endTime int64 }{ {offset: minute, resolution: hour}, // Test invalid offset value that's not hour aligned {offset: 25 * hour, resolution: week}, // Test invalid offset value that's greater than 24 hours {offset: 3 * hour, resolution: hour}, // Test invalid offset value that's greater than the resolution {offset: 3 * hour, startTime: 28 * hour, endTime: 26 * hour, resolution: day}, // Test invalid end time that's less than the start time {offset: 3 * hour, startTime: 6 * hour, endTime: 26 * hour, resolution: day}, // Test invalid end time that's less than the offset-adjusted start time {offset: 1 * hour, startTime: 5 * hour, endTime: 3 * hour, resolution: day}, // Test invalid end time that's less than the offset-adjusted start time {offset: 3 * hour, endTime: 1 * hour, resolution: day}, // Test invalid end time that's less than the offset {startTime: 3 * minute, endTime: 1 * minute, resolution: minute}, // Test invalid end time that's less than the start time (no offset) } for _, tc := range testCases { t.Run("Testing invalid offset parameters", func(t *testing.T) { q.Add("offset", strconv.FormatInt(tc.offset, 10)) q.Add("resolution", strconv.FormatInt(tc.resolution, 10)) q.Add("start_time", strconv.FormatInt(tc.startTime, 10)) if tc.endTime != 0 { q.Add("end_time", strconv.FormatInt(tc.endTime, 10)) } w := ht.GetWithParams(aggregationPath, q) ht.Assert.Equal(400, w.Code) }) } }
explode_data.jsonl/6929
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 817 }
[ 2830, 3393, 39173, 12948, 1566, 70, 34442, 7928, 6446, 1155, 353, 8840, 836, 8, 341, 197, 426, 1669, 5145, 9230, 2271, 1155, 11, 330, 3152, 1138, 16867, 34323, 991, 18176, 741, 20939, 48, 1669, 609, 48, 90, 426, 3839, 269, 16973, 5283...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIntSum_AggregationTemporality(t *testing.T) { ms := NewIntSum() assert.EqualValues(t, AggregationTemporalityUnspecified, ms.AggregationTemporality()) testValAggregationTemporality := AggregationTemporalityCumulative ms.SetAggregationTemporality(testValAggregationTemporality) assert.EqualValues(t, testValAggregationTemporality, ms.AggregationTemporality()) }
explode_data.jsonl/32694
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 117 }
[ 2830, 3393, 1072, 9190, 1566, 70, 34442, 12151, 269, 2719, 1155, 353, 8840, 836, 8, 341, 47691, 1669, 1532, 1072, 9190, 741, 6948, 12808, 6227, 1155, 11, 4598, 34442, 12151, 269, 2719, 1806, 53434, 11, 9829, 49850, 34442, 12151, 269, 27...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBuildLogsError(t *testing.T) { testServer(t, func(c *stdsdk.Client, p *structs.MockProvider) { opts := structs.LogsOptions{Since: options.Duration(2 * time.Minute)} p.On("BuildLogs", "app1", "build1", opts).Return(nil, fmt.Errorf("err1")) r1, err := c.Websocket("/apps/app1/builds/build1/logs", stdsdk.RequestOptions{}) require.NoError(t, err) require.NotNil(t, r1) d1, err := ioutil.ReadAll(r1) require.NoError(t, err) require.Equal(t, []byte("ERROR: err1\n"), d1) }) }
explode_data.jsonl/71424
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 222 }
[ 2830, 3393, 11066, 51053, 1454, 1155, 353, 8840, 836, 8, 341, 18185, 5475, 1155, 11, 2915, 1337, 353, 1834, 51295, 11716, 11, 281, 353, 1235, 82, 24664, 5179, 8, 341, 197, 64734, 1669, 62845, 5247, 82, 3798, 90, 12549, 25, 2606, 33795...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestVerifyAndComplete(t *testing.T) { t.Parallel() info := &ChangeFeedInfo{ SinkURI: "blackhole://", Opts: map[string]string{}, StartTs: 417257993615179777, Config: &config.ReplicaConfig{ CaseSensitive: true, EnableOldValue: true, CheckGCSafePoint: true, }, } err := info.VerifyAndComplete() require.Nil(t, err) require.Equal(t, SortUnified, info.Engine) marshalConfig1, err := info.Config.Marshal() require.Nil(t, err) defaultConfig := config.GetDefaultReplicaConfig() marshalConfig2, err := defaultConfig.Marshal() require.Nil(t, err) require.Equal(t, marshalConfig2, marshalConfig1) }
explode_data.jsonl/10730
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 256 }
[ 2830, 3393, 32627, 3036, 12548, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 27043, 1669, 609, 4072, 28916, 1731, 515, 197, 7568, 766, 10301, 25, 330, 11453, 30420, 1110, 756, 197, 197, 43451, 25, 262, 2415, 14032, 30953, 3883...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPermissionService(t *testing.T) { dir := filepath.Join(config.QlcTestDataDir(), uuid.New().String()) cm := config.NewCfgManager(dir) _, err := cm.Load() if err != nil { t.Fatal(err) } defer func() { _ = os.RemoveAll(dir) }() ps := NewPermissionService(cm.ConfigFile) cc := context.NewChainContext(cm.ConfigFile) l := ledger.NewLedger(cm.ConfigFile) rs, err := NewRPCService(cm.ConfigFile) if err != nil { t.Fatal(err) } err = cc.Register(context.RPCService, rs) if err != nil { t.Fatal(err) } err = ps.Init() if err == nil { t.Fatal() } adminAccount := mock.Account() admin := new(abi.AdminAccount) admin.Account = adminAccount.Address() admin.Comment = "t1" admin.Valid = true addTestAdmin(t, l, admin, 1) err = ps.Init() if err != nil { t.Fatal() } am := mock.AccountMeta(adminAccount.Address()) am.Tokens[0].Type = config.ChainToken() //ps.vmCtx.Ledger.AddAccountMeta(am, ps.vmCtx.Ledger.Cache().GetCache()) l.AddAccountMeta(am, l.Cache().GetCache()) cc.SetAccounts([]*types.Account{adminAccount}) wli := &config.WhiteListInfo{ PeerId: "xxxxxxx", Addr: "127.0.0.1:9734", Comment: "tn1", } ps.cfg.WhiteList.WhiteListInfos = append(ps.cfg.WhiteList.WhiteListInfos, wli) err = ps.Init() if err != nil { t.Fatal() } err = ps.cc.Init(func() error { return nil }) if err != nil { t.Fatal(err) } ps.cc.Start() err = ps.Start() if err != nil { t.Fatal(err) } ps.Status() ps.cc.EventBus().Publish(topic.EventPovSyncState, topic.SyncDone) time.Sleep(10 * time.Second) err = ps.Stop() if err != nil { t.Fatal() } time.Sleep(3 * time.Second) }
explode_data.jsonl/45765
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 703 }
[ 2830, 3393, 14966, 1860, 1155, 353, 8840, 836, 8, 341, 48532, 1669, 26054, 22363, 8754, 10003, 17257, 83920, 6184, 1507, 16040, 7121, 1005, 703, 2398, 98316, 1669, 2193, 7121, 42467, 2043, 14161, 340, 197, 6878, 1848, 1669, 9961, 13969, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestImportedTypes(t *testing.T) { if !testenv.HasSrc() { t.Skip("no source code available") } for _, test := range importedObjectTests { s := strings.Split(test.name, ".") if len(s) != 2 { t.Fatal("invalid test data format") } importPath := s[0] objName := s[1] pkg, err := importer.ImportFrom(importPath, ".", 0) if err != nil { t.Error(err) continue } obj := pkg.Scope().Lookup(objName) if obj == nil { t.Errorf("%s: object not found", test.name) continue } got := types.ObjectString(obj, types.RelativeTo(pkg)) if got != test.want { t.Errorf("%s: got %q; want %q", test.name, got, test.want) } } }
explode_data.jsonl/57735
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 287 }
[ 2830, 3393, 11511, 291, 4173, 1155, 353, 8840, 836, 8, 341, 743, 753, 1944, 3160, 16152, 20360, 368, 341, 197, 3244, 57776, 445, 2152, 2530, 2038, 2500, 1138, 197, 630, 2023, 8358, 1273, 1669, 2088, 24928, 1190, 18200, 341, 197, 1903, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestFavoriteAddCreatedAlwaysGoThrough(t *testing.T) { mockCtrl, config, ctx := favTestInit(t, false) f := NewFavorites(config) f.InitForTest() defer favTestShutdown(t, mockCtrl, config, f) fav1 := favorites.ToAdd{ Folder: favorites.Folder{ Name: "test", Type: tlf.Public, }, Data: favorites.Data{}, Created: false, } expected1 := keybase1.Folder{ Name: "test", FolderType: keybase1.FolderType_PUBLIC, Created: false, } config.mockKbpki.EXPECT().FavoriteList(gomock.Any()).Return(keybase1.FavoritesResult{}, nil) config.mockKbpki.EXPECT().FavoriteAdd(gomock.Any(), expected1).Return(nil) config.mockClock.EXPECT().Now().Return(time.Unix(0, 0)).Times(2) if err := f.Add(ctx, fav1); err != nil { t.Fatalf("Couldn't add favorite: %v", err) } fav2 := favorites.ToAdd{ Folder: favorites.Folder{ Name: "test", Type: tlf.Public, }, Data: favorites.Data{}, Created: true, } expected2 := keybase1.Folder{ Name: "test", FolderType: keybase1.FolderType_PUBLIC, Created: true, } config.mockKbpki.EXPECT().FavoriteAdd(gomock.Any(), expected2).Return(nil) if err := f.Add(ctx, fav2); err != nil { t.Fatalf("Couldn't add favorite: %v", err) } }
explode_data.jsonl/14680
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 515 }
[ 2830, 3393, 38839, 2212, 11694, 37095, 10850, 23857, 1155, 353, 8840, 836, 8, 341, 77333, 15001, 11, 2193, 11, 5635, 1669, 9244, 2271, 3803, 1155, 11, 895, 340, 1166, 1669, 1532, 85221, 8754, 340, 1166, 26849, 2461, 2271, 741, 16867, 92...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_Interceptor_BindUnbind(t *testing.T) { lim := test.TimeOut(time.Second * 10) defer lim.Stop() report := test.CheckRoutines(t) defer report() m := &MediaEngine{} assert.NoError(t, m.RegisterDefaultCodecs()) var ( cntBindRTCPReader uint32 cntBindRTCPWriter uint32 cntBindLocalStream uint32 cntUnbindLocalStream uint32 cntBindRemoteStream uint32 cntUnbindRemoteStream uint32 cntClose uint32 ) mockInterceptor := &mock_interceptor.Interceptor{ BindRTCPReaderFn: func(reader interceptor.RTCPReader) interceptor.RTCPReader { atomic.AddUint32(&cntBindRTCPReader, 1) return reader }, BindRTCPWriterFn: func(writer interceptor.RTCPWriter) interceptor.RTCPWriter { atomic.AddUint32(&cntBindRTCPWriter, 1) return writer }, BindLocalStreamFn: func(i *interceptor.StreamInfo, writer interceptor.RTPWriter) interceptor.RTPWriter { atomic.AddUint32(&cntBindLocalStream, 1) return writer }, UnbindLocalStreamFn: func(i *interceptor.StreamInfo) { atomic.AddUint32(&cntUnbindLocalStream, 1) }, BindRemoteStreamFn: func(i *interceptor.StreamInfo, reader interceptor.RTPReader) interceptor.RTPReader { atomic.AddUint32(&cntBindRemoteStream, 1) return reader }, UnbindRemoteStreamFn: func(i *interceptor.StreamInfo) { atomic.AddUint32(&cntUnbindRemoteStream, 1) }, CloseFn: func() error { atomic.AddUint32(&cntClose, 1) return nil }, } ir := &interceptor.Registry{} ir.Add(mockInterceptor) sender, receiver, err := NewAPI(WithMediaEngine(m), WithInterceptorRegistry(ir)).newPair(Configuration{}) assert.NoError(t, err) track, err := NewTrackLocalStaticSample(RTPCodecCapability{MimeType: "video/vp8"}, "video", "pion") assert.NoError(t, err) _, err = sender.AddTrack(track) assert.NoError(t, err) receiverReady, receiverReadyFn := context.WithCancel(context.Background()) receiver.OnTrack(func(track *TrackRemote, _ *RTPReceiver) { _, _, readErr := track.ReadRTP() assert.NoError(t, readErr) receiverReadyFn() }) assert.NoError(t, signalPair(sender, receiver)) ticker := time.NewTicker(time.Millisecond * 20) defer ticker.Stop() func() { for { select { case <-receiverReady.Done(): return case <-ticker.C: // Send packet to make receiver track actual creates RTPReceiver. assert.NoError(t, track.WriteSample(media.Sample{Data: []byte{0xAA}, Duration: time.Second})) } } }() closePairNow(t, sender, receiver) // Bind/UnbindLocal/RemoteStream should be called from one side. if cnt := atomic.LoadUint32(&cntBindLocalStream); cnt != 1 { t.Errorf("BindLocalStreamFn is expected to be called once, but called %d times", cnt) } if cnt := atomic.LoadUint32(&cntUnbindLocalStream); cnt != 1 { t.Errorf("UnbindLocalStreamFn is expected to be called once, but called %d times", cnt) } if cnt := atomic.LoadUint32(&cntBindRemoteStream); cnt != 1 { t.Errorf("BindRemoteStreamFn is expected to be called once, but called %d times", cnt) } if cnt := atomic.LoadUint32(&cntUnbindRemoteStream); cnt != 1 { t.Errorf("UnbindRemoteStreamFn is expected to be called once, but called %d times", cnt) } // BindRTCPWriter/Reader and Close should be called from both side. if cnt := atomic.LoadUint32(&cntBindRTCPWriter); cnt != 2 { t.Errorf("BindRTCPWriterFn is expected to be called twice, but called %d times", cnt) } if cnt := atomic.LoadUint32(&cntBindRTCPReader); cnt != 2 { t.Errorf("BindRTCPReaderFn is expected to be called twice, but called %d times", cnt) } if cnt := atomic.LoadUint32(&cntClose); cnt != 2 { t.Errorf("CloseFn is expected to be called twice, but called %d times", cnt) } }
explode_data.jsonl/26456
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1435 }
[ 2830, 3393, 62, 32786, 1668, 484, 1806, 7666, 1155, 353, 8840, 836, 8, 341, 197, 4659, 1669, 1273, 16299, 2662, 9730, 32435, 353, 220, 16, 15, 340, 16867, 4568, 30213, 2822, 69931, 1669, 1273, 10600, 49, 28628, 1155, 340, 16867, 1895, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIAMToken(t *testing.T) { const iamToken = "this-is-iam-token" creds := NewIAMTokenCredentials(iamToken) iamTokenResp, err := creds.IAMToken(context.Background()) require.NoError(t, err) assert.Equal(t, iamToken, iamTokenResp.GetIamToken()) }
explode_data.jsonl/9079
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 101 }
[ 2830, 3393, 73707, 3323, 1155, 353, 8840, 836, 8, 341, 4777, 97148, 3323, 284, 330, 574, 30430, 12, 4932, 34841, 698, 197, 85734, 1669, 1532, 73707, 3323, 27025, 1956, 309, 3323, 340, 197, 4932, 3323, 36555, 11, 1848, 1669, 73177, 2447,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestVolumeClaimTemplates_Configuration(t *testing.T) { sts := performReconciliationAndGetStatefulSet(t, "volume_claim_templates_mdb.yaml") assert.Len(t, sts.Spec.VolumeClaimTemplates, 3) pvcSpec := sts.Spec.VolumeClaimTemplates[2].Spec storage := pvcSpec.Resources.Requests[corev1.ResourceStorage] storageRef := &storage assert.Equal(t, "1Gi", storageRef.String()) assert.Len(t, pvcSpec.AccessModes, 1) assert.Contains(t, pvcSpec.AccessModes, corev1.ReadWriteOnce) }
explode_data.jsonl/80690
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 174 }
[ 2830, 3393, 18902, 45544, 51195, 35412, 2017, 1155, 353, 8840, 836, 8, 341, 18388, 82, 1669, 2736, 693, 98240, 97726, 1397, 1262, 1649, 1155, 11, 330, 25060, 84969, 49526, 717, 1999, 33406, 5130, 6948, 65819, 1155, 11, 51756, 36473, 79106...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMetaBackend_planBackendMismatchLineage(t *testing.T) { // Create a temporary working directory that is empty td := tempDir(t) copy.CopyDir(testFixturePath("backend-plan-backend-mismatch"), td) defer os.RemoveAll(td) defer testChdir(t, td)() // Get the state for the plan by getting the real state and // adding the backend config to it. original := testStateRead(t, filepath.Join( testFixturePath("backend-plan-backend-empty-config"), "local-state.tfstate")) backendState := testStateRead(t, filepath.Join( testFixturePath("backend-plan-backend-empty-config"), DefaultDataDir, DefaultStateFilename)) planState := original.DeepCopy() // Get the real original original = testStateRead(t, "local-state.tfstate") // Create the plan plan := &terraform.Plan{ Module: testModule(t, "backend-plan-backend-empty-config"), State: planState, Backend: backendState.Backend, } // Setup the meta m := testMetaBackend(t, nil) // Get the backend _, err := m.Backend(&BackendOpts{Plan: plan}) if err == nil { t.Fatal("should have error") } if !strings.Contains(err.Error(), "lineage") { t.Fatalf("bad: %s", err) } // Verify our local state didn't change actual := testStateRead(t, "local-state.tfstate") if !actual.Equal(original) { t.Fatalf("bad: %#v", actual) } // Verify a backup doesn't exist if _, err := os.Stat(DefaultStateFilename + DefaultBackupExtension); err == nil { t.Fatal("file should not exist") } // Verify we have no configured backend/legacy path := filepath.Join(m.DataDir(), DefaultStateFilename) if _, err := os.Stat(path); err == nil { t.Fatalf("should not have backend configured") } // Verify we have no default state if _, err := os.Stat(DefaultStateFilename); err == nil { t.Fatal("file should not exist") } }
explode_data.jsonl/34349
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 634 }
[ 2830, 3393, 12175, 29699, 26564, 29699, 82572, 2460, 424, 1155, 353, 8840, 836, 8, 341, 197, 322, 4230, 264, 13340, 3238, 6220, 429, 374, 4287, 198, 76373, 1669, 2730, 6184, 1155, 340, 49124, 31770, 6184, 8623, 18930, 1820, 445, 20942, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestStorageDelete(t *testing.T) { // initialize storage storage := Init(driver.NewMemory()) // create fake release rls := ReleaseTestData{ Name: "angry-beaver", Version: 1, }.ToRelease() rls2 := ReleaseTestData{ Name: "angry-beaver", Version: 2, }.ToRelease() assertErrNil(t.Fatal, storage.Create(rls), "StoreRelease") assertErrNil(t.Fatal, storage.Create(rls2), "StoreRelease") // delete the release res, err := storage.Delete(rls.Name, rls.Version) assertErrNil(t.Fatal, err, "DeleteRelease") // verify updated and fetched releases are the same. if !reflect.DeepEqual(rls, res) { t.Fatalf("Expected %q, got %q", rls, res) } hist, err := storage.History(rls.Name) if err != nil { t.Errorf("unexpected error: %s", err) } // We have now deleted one of the two records. if len(hist) != 1 { t.Errorf("expected 1 record for deleted release version, got %d", len(hist)) } if hist[0].Version != 2 { t.Errorf("Expected version to be 2, got %d", hist[0].Version) } }
explode_data.jsonl/35125
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 390 }
[ 2830, 3393, 5793, 6435, 1155, 353, 8840, 836, 8, 341, 197, 322, 9468, 5819, 198, 197, 16172, 1669, 15690, 24032, 7121, 10642, 12367, 197, 322, 1855, 12418, 4879, 198, 197, 2381, 82, 1669, 17381, 83920, 515, 197, 21297, 25, 262, 330, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestAESSIVNewKey(t *testing.T) { km, err := registry.GetKeyManager(testutil.AESSIVTypeURL) if err != nil { t.Errorf("cannot obtain AESSIV key manager: %s", err) } m, err := km.NewKey(nil) if err != nil { t.Errorf("km.NewKey(nil) = _, %v; want _, nil", err) } key, _ := m.(*aspb.AesSivKey) if err := validateAESSIVKey(key); err != nil { t.Errorf("validateAESSIVKey(%v) = %v; want nil", key, err) } }
explode_data.jsonl/58685
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 188 }
[ 2830, 3393, 32, 9996, 3090, 3564, 1592, 1155, 353, 8840, 836, 8, 341, 197, 16017, 11, 1848, 1669, 19424, 51723, 2043, 8623, 1314, 875, 9996, 3090, 929, 3144, 340, 743, 1848, 961, 2092, 341, 197, 3244, 13080, 445, 33260, 6851, 362, 999...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAuthInit(t *testing.T) { t.Parallel() ab := authboss.New() router := &mocks.Router{} renderer := &mocks.Renderer{} errHandler := &mocks.ErrorHandler{} ab.Config.Core.Router = router ab.Config.Core.ViewRenderer = renderer ab.Config.Core.ErrorHandler = errHandler a := &Auth{} if err := a.Init(ab); err != nil { t.Fatal(err) } if err := renderer.HasLoadedViews(PageLogin); err != nil { t.Error(err) } if err := router.HasGets("/login"); err != nil { t.Error(err) } if err := router.HasPosts("/login"); err != nil { t.Error(err) } }
explode_data.jsonl/3464
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 238 }
[ 2830, 3393, 5087, 3803, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 370, 1669, 4166, 33314, 7121, 2822, 67009, 1669, 609, 16712, 82, 31413, 16094, 83509, 1669, 609, 16712, 82, 27386, 261, 16094, 9859, 3050, 1669, 609, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestNonStreamingRpcRetriesSleep(t *testing.T) { t.Parallel() f := setup(t) defer f.shutDown() f.fake.sleepDelay = time.Second client.RPCTimeouts(map[string]time.Duration{"QueryWriteStatus": 500 * time.Millisecond}).Apply(f.client) got, err := f.client.QueryWriteStatus(f.ctx, &bspb.QueryWriteStatusRequest{}) if got != nil { t.Errorf("client.QueryWriteStatus(ctx, digest) gave result %s, want nil", got) } if err == nil { t.Error("QueryWriteStatus(ctx, {}) = nil; expected Unimplemented error got nil") } else if s, ok := status.FromError(err); ok && s.Code() != codes.Unimplemented { t.Errorf("QueryWriteStatus(ctx, {}) = %v; expected Unimplemented error, got %v", err, s.Code()) } else if !ok { t.Errorf("QueryWriteStatus(ctx, {}) = %v; expected Unimplemented error (status.FromError failed)", err) } }
explode_data.jsonl/5613
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 306 }
[ 2830, 3393, 8121, 76509, 60248, 12020, 4019, 41745, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 1166, 1669, 6505, 1155, 340, 16867, 282, 2395, 332, 4454, 741, 1166, 94624, 11118, 20039, 284, 882, 32435, 198, 25291, 2013, 47, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestServerResponseServerHeader(t *testing.T) { serverName := "foobar serv" s := &Server{ Handler: func(ctx *RequestCtx) { name := ctx.Response.Header.Server() if string(name) != serverName { fmt.Fprintf(ctx, "unexpected server name: %q. Expecting %q", name, serverName) } else { ctx.WriteString("OK") } // make sure the server name is sent to the client after ctx.Response.Reset() ctx.NotFound() }, Name: serverName, } ln := fasthttputil.NewInmemoryListener() serverCh := make(chan struct{}) go func() { if err := s.Serve(ln); err != nil { t.Fatalf("unexpected error: %s", err) } close(serverCh) }() clientCh := make(chan struct{}) go func() { c, err := ln.Dial() if err != nil { t.Fatalf("unexpected error: %s", err) } if _, err = c.Write([]byte("GET / HTTP/1.1\r\nHost: aa\r\n\r\n")); err != nil { t.Fatalf("unexpected error: %s", err) } br := bufio.NewReader(c) var resp Response if err = resp.Read(br); err != nil { t.Fatalf("unexpected error: %s", err) } if resp.StatusCode() != StatusNotFound { t.Fatalf("unexpected status code: %d. Expecting %d", resp.StatusCode(), StatusNotFound) } if string(resp.Body()) != "404 Page not found" { t.Fatalf("unexpected body: %q. Expecting %q", resp.Body(), "404 Page not found") } if string(resp.Header.Server()) != serverName { t.Fatalf("unexpected server header: %q. Expecting %q", resp.Header.Server(), serverName) } if err = c.Close(); err != nil { t.Fatalf("unexpected error: %s", err) } close(clientCh) }() select { case <-clientCh: case <-time.After(time.Second): t.Fatalf("timeout") } if err := ln.Close(); err != nil { t.Fatalf("unexpected error: %s", err) } select { case <-serverCh: case <-time.After(time.Second): t.Fatalf("timeout") } }
explode_data.jsonl/73269
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 752 }
[ 2830, 3393, 5475, 2582, 5475, 4047, 1155, 353, 8840, 836, 8, 341, 41057, 675, 1669, 330, 50267, 4853, 1837, 1903, 1669, 609, 5475, 515, 197, 197, 3050, 25, 2915, 7502, 353, 1900, 23684, 8, 341, 298, 11609, 1669, 5635, 12574, 15753, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSugarFieldsInvalidPairs(t *testing.T) { withSugar(t, TraceLevel, nil, func(logger *SugaredLogger, logs *observer.ObservedLogs) { logger.With(42, "foo", []string{"bar"}, "baz").Info("") output := logs.AllUntimed() // Double-check that the actual message was logged. require.Equal(t, 2, len(output), "Unexpected number of entries logged.") require.Equal(t, observer.LoggedEntry{Context: []Field{}}, output[1], "Unexpected non-error log entry.") // Assert that the error message's structured fields serialize properly. require.Equal(t, 1, len(output[0].Context), "Expected one field in error entry context.") enc := zapcore.NewMapObjectEncoder() output[0].Context[0].AddTo(enc) assert.Equal(t, []interface{}{ map[string]interface{}{"position": int64(0), "key": int64(42), "value": "foo"}, map[string]interface{}{"position": int64(2), "key": []interface{}{"bar"}, "value": "baz"}, }, enc.Fields["invalid"], "Unexpected output when logging invalid key-value pairs.") }) }
explode_data.jsonl/5022
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 353 }
[ 2830, 3393, 83414, 8941, 7928, 54228, 1155, 353, 8840, 836, 8, 341, 46948, 83414, 1155, 11, 27163, 4449, 11, 2092, 11, 2915, 37833, 353, 50, 768, 1605, 7395, 11, 18422, 353, 30730, 8382, 1279, 2771, 51053, 8, 341, 197, 17060, 26124, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRebuildChannelsError(t *testing.T) { computer := mockComputer{} auth := NewAuthenticator(gTestBucket, &computer) role, err := auth.NewRole("testRole2", ch.SetOf("explicit1")) assert.Equals(t, err, nil) assert.Equals(t, auth.InvalidateChannels(role), nil) computer.err = fmt.Errorf("I'm sorry, Dave.") role2, err := auth.GetRole("testRole2") assert.Equals(t, role2, nil) assert.DeepEquals(t, err, computer.err) }
explode_data.jsonl/31563
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 161 }
[ 2830, 3393, 693, 5834, 35925, 1454, 1155, 353, 8840, 836, 8, 341, 32810, 11281, 1669, 7860, 37332, 16094, 78011, 1669, 1532, 5087, 61393, 3268, 2271, 36018, 11, 609, 43111, 340, 197, 5778, 11, 1848, 1669, 4166, 7121, 9030, 445, 1944, 90...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1