text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestNewPolicyRequirementFromJSON(t *testing.T) { // Sample success. Others tested in the individual PolicyRequirement.UnmarshalJSON implementations. validReq := NewPRInsecureAcceptAnything() validJSON, err := json.Marshal(validReq) require.NoError(t, err) req, err := newPolicyRequirementFromJSON(validJSON) require.NoError(t, err) assert.Equal(t, validReq, req) // Invalid for _, invalid := range []interface{}{ // Not an object 1, // Missing type prCommon{}, // Invalid type prCommon{Type: "this is invalid"}, // Valid type but invalid contents prSignedBy{ prCommon: prCommon{Type: prTypeSignedBy}, KeyType: "this is invalid", }, } { testJSON, err := json.Marshal(invalid) require.NoError(t, err) _, err = newPolicyRequirementFromJSON(testJSON) assert.Error(t, err, string(testJSON)) } }
explode_data.jsonl/36497
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 302 }
[ 2830, 3393, 3564, 13825, 75802, 3830, 5370, 1155, 353, 8840, 836, 8, 341, 197, 322, 19143, 2393, 13, 25028, 12510, 304, 279, 3842, 10974, 75802, 38097, 5370, 38337, 624, 56322, 27234, 1669, 1532, 6480, 641, 25132, 16646, 77303, 741, 56322...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRecordSetCreateIntegrationARecord(t *testing.T) { c := client() zs, err := c.ZonesListAll(ListFilter{}) if err != nil { t.Error(err) } rc, err := c.RecordSetCreate(&RecordSet{ Name: "integration-test", ZoneID: zs[0].ID, Type: "A", TTL: 60, Records: []Record{ { Address: "127.0.0.1", }, }, }) if err != nil { t.Error(err) } createdID := rc.RecordSet.ID limit := 10 for i := 0; i < limit; time.Sleep(10 * time.Second) { i++ rg, err := c.RecordSet(zs[0].ID, createdID) if err == nil && rg.ID != createdID { t.Error(fmt.Sprintf("unable to get record set %s", createdID)) } if err == nil && rg.ID == createdID { break } if i == (limit - 1) { fmt.Printf("%d retries reached in polling for record set %s", limit, createdID) t.Error(err) } } }
explode_data.jsonl/12121
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 377 }
[ 2830, 3393, 6471, 1649, 4021, 52464, 32, 6471, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 2943, 741, 20832, 82, 11, 1848, 1669, 272, 13476, 3154, 852, 2403, 10278, 5632, 37790, 743, 1848, 961, 2092, 341, 197, 3244, 6141, 3964, 340, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestCompetingCRDOwnersExist(t *testing.T) { testNamespace := "default" tests := []struct { name string csv *v1alpha1.ClusterServiceVersion existingCRDOwners map[string][]string expectedErr error expectedResult bool }{ { name: "NoCompetingOwnersExist", csv: csv("turkey", testNamespace, []string{"feathers"}, nil), existingCRDOwners: nil, expectedErr: nil, expectedResult: false, }, { name: "OnlyCompetingWithSelf", csv: csv("turkey", testNamespace, []string{"feathers"}, nil), existingCRDOwners: map[string][]string{ "feathers": {"turkey"}, }, expectedErr: nil, expectedResult: false, }, { name: "CompetingOwnersExist", csv: csv("turkey", testNamespace, []string{"feathers"}, nil), existingCRDOwners: map[string][]string{ "feathers": {"seagull"}, }, expectedErr: nil, expectedResult: true, }, { name: "CompetingOwnerExistsOnSecondCRD", csv: csv("turkey", testNamespace, []string{"feathers", "beak"}, nil), existingCRDOwners: map[string][]string{ "milk": {"cow"}, "beak": {"squid"}, }, expectedErr: nil, expectedResult: true, }, { name: "MoreThanOneCompetingOwnerExists", csv: csv("turkey", testNamespace, []string{"feathers"}, nil), existingCRDOwners: map[string][]string{ "feathers": {"seagull", "turkey"}, }, expectedErr: olmerrors.NewMultipleExistingCRDOwnersError([]string{"seagull", "turkey"}, "feathers", testNamespace), expectedResult: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { t.Parallel() competing, err := competingCRDOwnersExist(testNamespace, tt.csv, tt.existingCRDOwners) // Assert the error is as expected if tt.expectedErr == nil { require.Nil(t, err) } else { require.Equal(t, tt.expectedErr, err) } require.Equal(t, competing, tt.expectedResult) }) } }
explode_data.jsonl/37282
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 895 }
[ 2830, 3393, 75039, 287, 8973, 5865, 1522, 388, 25613, 1155, 353, 8840, 836, 8, 1476, 18185, 22699, 1669, 330, 2258, 698, 78216, 1669, 3056, 1235, 341, 197, 11609, 1060, 914, 198, 197, 1444, 3492, 2290, 353, 85, 16, 7141, 16, 72883, 18...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSchema(t *testing.T) { tab := []struct { inst interface{} name string opts *SchemaOptions err bool s string }{ // 0 {inst: nil, err: true}, {inst: interface{}(nil), err: true}, {testSchema{}, "", nil, false, testSchemaSFFF}, {testSchema{}, "", &SchemaOptions{}, false, testSchemaSFFF}, {testSchema{}, "", &SchemaOptions{KeepPrefix: true}, false, testSchemaSFFT}, // 5 {testSchema{}, "", &SchemaOptions{NoIfNotExists: true}, false, testSchemaSFTF}, {testSchema{}, "", &SchemaOptions{NoIfNotExists: true, KeepPrefix: true}, false, testSchemaSFTT}, {testSchema{}, "", &SchemaOptions{NoTransaction: true}, false, testSchemaSTFF}, {testSchema{}, "", &SchemaOptions{NoTransaction: true, KeepPrefix: true}, false, testSchemaSTFT}, {testSchema{}, "", &SchemaOptions{NoTransaction: true, NoIfNotExists: true}, false, testSchemaSTTF}, // 10 {testSchema{}, "", &SchemaOptions{NoTransaction: true, NoIfNotExists: true, KeepPrefix: true}, false, testSchemaSTTT}, {testSchema2{}, "", nil, true, ""}, {testSchema3{}, "", nil, false, testSchema3S}, {testSchema4{}, "", nil, false, testSchema4S}, {testSchema5{}, "", nil, true, ""}, // 15 {testSchema6{}, "", &SchemaOptions{NoTransaction: true, NoIfNotExists: true}, false, testSchema6S}, {testSchema7{}, "", &SchemaOptions{NoIfNotExists: true}, false, testSchema7S}, {testSchema8{}, "", nil, false, testSchema8S}, {&testSchema8{}, "", nil, false, testSchema8S}, {&testSchema9{}, "", nil, false, testSchema9S}, } for iTest, test := range tab { l, err := Schema(test.inst, test.name, test.opts) if g, e := err != nil, test.err; g != e { t.Fatal(iTest, g, e, err) } if err != nil { t.Log(iTest, err) continue } s, err := Compile(test.s) if err != nil { panic("internal error 070") } if g, e := l.String(), s.String(); g != e { t.Fatalf("%d\n----\n%s\n----\n%s", iTest, g, e) } } }
explode_data.jsonl/42629
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 827 }
[ 2830, 3393, 8632, 1155, 353, 8840, 836, 8, 341, 58149, 1669, 3056, 1235, 341, 197, 88656, 3749, 16094, 197, 11609, 914, 198, 197, 64734, 353, 8632, 3798, 198, 197, 9859, 220, 1807, 198, 197, 1903, 262, 914, 198, 197, 59403, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestReconcileServiceInstanceSuccessOnFinalRetry(t *testing.T) { fakeKubeClient, fakeCatalogClient, fakeClusterServiceBrokerClient, testController, sharedInformers := newTestController(t, fakeosb.FakeClientConfiguration{ ProvisionReaction: &fakeosb.ProvisionReaction{ Response: &osb.ProvisionResponse{}, }, }) sharedInformers.ClusterServiceBrokers().Informer().GetStore().Add(getTestClusterServiceBroker()) sharedInformers.ClusterServiceClasses().Informer().GetStore().Add(getTestClusterServiceClass()) sharedInformers.ClusterServicePlans().Informer().GetStore().Add(getTestClusterServicePlan()) instance := getTestServiceInstanceWithClusterRefs() instance.Status.CurrentOperation = v1beta1.ServiceInstanceOperationProvision instance.Status.InProgressProperties = &v1beta1.ServiceInstancePropertiesState{ ClusterServicePlanExternalName: testClusterServicePlanName, ClusterServicePlanExternalID: testClusterServicePlanGUID, } instance.Status.ObservedGeneration = instance.Generation startTime := metav1.NewTime(time.Now().Add(-7 * 24 * time.Hour)) instance.Status.OperationStartTime = &startTime if err := reconcileServiceInstance(t, testController, instance); err != nil { t.Fatalf("This should not fail : %v", err) } brokerActions := fakeClusterServiceBrokerClient.Actions() assertNumberOfBrokerActions(t, brokerActions, 1) assertProvision(t, brokerActions[0], &osb.ProvisionRequest{ AcceptsIncomplete: true, InstanceID: testServiceInstanceGUID, ServiceID: testClusterServiceClassGUID, PlanID: testClusterServicePlanGUID, OrganizationGUID: testClusterID, SpaceGUID: testNamespaceGUID, Context: testContext}) actions := fakeCatalogClient.Actions() assertNumberOfActions(t, actions, 1) updatedServiceInstance := assertUpdateStatus(t, actions[0], instance) assertServiceInstanceOperationSuccess(t, updatedServiceInstance, v1beta1.ServiceInstanceOperationProvision, testClusterServicePlanName, testClusterServicePlanGUID, instance) // verify no kube resources created // One single action comes from getting namespace uid kubeActions := fakeKubeClient.Actions() if err := checkKubeClientActions(kubeActions, []kubeClientAction{ {verb: "get", resourceName: "namespaces", checkType: checkGetActionType}, }); err != nil { t.Fatal(err) } events := getRecordedEvents(testController) expectedEvent := normalEventBuilder(successProvisionReason).msg("The instance was provisioned successfully") if err := checkEvents(events, expectedEvent.stringArr()); err != nil { t.Fatal(err) } }
explode_data.jsonl/58169
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 824 }
[ 2830, 3393, 693, 40446, 457, 1860, 2523, 7188, 1925, 19357, 51560, 1155, 353, 8840, 836, 8, 341, 1166, 726, 42, 3760, 2959, 11, 12418, 41606, 2959, 11, 12418, 28678, 1860, 65545, 2959, 11, 1273, 2051, 11, 6094, 37891, 388, 1669, 501, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestMarshalErrorsWritesTheExpectedPayload(t *testing.T) { var marshalErrorsTableTasts = []struct { Title string In []*ErrorObject Out map[string]interface{} }{ { Title: "TestFieldsAreSerializedAsNeeded", In: []*ErrorObject{{ID: "0", Title: "Test title.", Detail: "Test detail", Status: "400", Code: "E1100"}}, Out: map[string]interface{}{"errors": []interface{}{ map[string]interface{}{"id": "0", "title": "Test title.", "detail": "Test detail", "status": "400", "code": "E1100"}, }}, }, { Title: "TestMetaFieldIsSerializedProperly", In: []*ErrorObject{{Title: "Test title.", Detail: "Test detail", Meta: &map[string]interface{}{"key": "val"}}}, Out: map[string]interface{}{"errors": []interface{}{ map[string]interface{}{"title": "Test title.", "detail": "Test detail", "meta": map[string]interface{}{"key": "val"}}, }}, }, { Title: "TestSourceFieldIsSerializedProperly", In: []*ErrorObject{{Source: &ErrorSource{Pointer: "/data/attributes/email", Parameter: "email"}}}, Out: map[string]interface{}{"errors": []interface{}{ map[string]interface{}{"source": map[string]interface{}{"pointer": "/data/attributes/email", "parameter": "email"}}, }}, }, } for _, testRow := range marshalErrorsTableTasts { t.Run(testRow.Title, func(t *testing.T) { buffer, output := bytes.NewBuffer(nil), map[string]interface{}{} var writer io.Writer = buffer _ = MarshalErrors(writer, testRow.In) json.Unmarshal(buffer.Bytes(), &output) if !reflect.DeepEqual(output, testRow.Out) { t.Fatalf("Expected: \n%#v \nto equal: \n%#v", output, testRow.Out) } }) } }
explode_data.jsonl/36926
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 655 }
[ 2830, 3393, 55438, 13877, 93638, 785, 18896, 29683, 1155, 353, 8840, 836, 8, 341, 2405, 60771, 13877, 2556, 51, 11757, 284, 3056, 1235, 341, 197, 92233, 914, 198, 197, 70167, 262, 29838, 1454, 1190, 198, 197, 197, 2662, 256, 2415, 14032...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestBTreeMultipleSearch(t *testing.T) { tree := newRangeTree() tree.update(&metapb.Range{StartKey: []byte("a"), EndKey: []byte("e")}) tree.update(&metapb.Range{StartKey: []byte("e"), EndKey: []byte("k")}) tree.update(&metapb.Range{StartKey: []byte("k"), EndKey: []byte("t")}) tree.update(&metapb.Range{StartKey: []byte("t"), EndKey: []byte("w")}) tree.update(&metapb.Range{StartKey: []byte("w"), EndKey: []byte("z")}) rs := tree.multipleSearch([]byte("a"), 10) if len(rs) != 5 { t.Errorf("test failed %v", rs) return } r := rs[0] if bytes.Compare([]byte("a"), r.StartKey) != 0 || bytes.Compare([]byte("e"), r.EndKey) != 0 { t.Errorf("test failed") return } }
explode_data.jsonl/25352
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 280 }
[ 2830, 3393, 33, 6533, 32089, 5890, 1155, 353, 8840, 836, 8, 341, 51968, 1669, 501, 6046, 6533, 741, 51968, 5317, 2099, 4059, 391, 65, 24783, 90, 3479, 1592, 25, 3056, 3782, 445, 64, 3975, 3972, 1592, 25, 3056, 3782, 445, 68, 899, 35...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestFmtInterface(t *testing.T) { var i1 interface{} i1 = "abc" s := Sprintf("%s", i1) if s != "abc" { t.Errorf(`Sprintf("%%s", empty("abc")) = %q want %q`, s, "abc") } }
explode_data.jsonl/46254
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 87 }
[ 2830, 3393, 93322, 5051, 1155, 353, 8840, 836, 8, 341, 2405, 600, 16, 3749, 16094, 8230, 16, 284, 330, 13683, 698, 1903, 1669, 328, 2517, 4430, 82, 497, 600, 16, 340, 743, 274, 961, 330, 13683, 1, 341, 197, 3244, 13080, 5809, 50, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_Number_Range(t *testing.T) { testNumberRange(t, "Number", []testNumberMinMax{ {"1", Number(0), 0, 0, false}, // test zero {"2", Number(0), 1, 2, false}, // not required {"3", Number(0).Required(), 1, 2, true}, // required {"4", Number(1), 2, 4, true}, {"5", Number(2), 2, 4, false}, {"6", Number(3), 2, 4, false}, {"7", Number(4), 2, 4, false}, {"7", Number(5), 2, 4, true}, }) testNumberRange(t, "NumF32", []testNumberMinMax{ {"1", NumF32(0), 0, 0, false}, // test zero {"2", NumF32(0), 1, 2, false}, // not required {"3", NumF32(0).Required(), 1, 2, true}, // required {"4", NumF32(1), 2, 4, true}, {"5", NumF32(2), 2, 4, false}, {"6", NumF32(3), 2, 4, false}, {"7", NumF32(4), 2, 4, false}, {"7", NumF32(5), 2, 4, true}, }) testNumberRange(t, "NumF64", []testNumberMinMax{ {"1", NumF64(0), 0, 0, false}, // test zero {"2", NumF64(0), 1, 2, false}, // not required {"3", NumF64(0).Required(), 1, 2, true}, // required {"4", NumF64(1), 2, 4, true}, {"5", NumF64(2), 2, 4, false}, {"6", NumF64(3), 2, 4, false}, {"7", NumF64(4), 2, 4, false}, {"7", NumF64(5), 2, 4, true}, }) testNumberRange(t, "NumI32", []testNumberMinMax{ {"1", NumI32(0), 0, 0, false}, // test zero {"2", NumI32(0), 1, 2, false}, // not required {"3", NumI32(0).Required(), 1, 2, true}, // required {"4", NumI32(1), 2, 4, true}, {"5", NumI32(2), 2, 4, false}, {"6", NumI32(3), 2, 4, false}, {"7", NumI32(4), 2, 4, false}, {"7", NumI32(5), 2, 4, true}, }) testNumberRange(t, "NumI64", []testNumberMinMax{ {"1", NumI64(0), 0, 0, false}, // test zero {"2", NumI64(0), 1, 2, false}, // not required {"3", NumI64(0).Required(), 1, 2, true}, // required {"4", NumI64(1), 2, 4, true}, {"5", NumI64(2), 2, 4, false}, {"6", NumI64(3), 2, 4, false}, {"7", NumI64(4), 2, 4, false}, {"7", NumI64(5), 2, 4, true}, }) }
explode_data.jsonl/57518
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 978 }
[ 2830, 3393, 51799, 2568, 844, 1155, 353, 8840, 836, 8, 341, 18185, 2833, 6046, 1155, 11, 330, 2833, 497, 3056, 1944, 2833, 92304, 515, 197, 197, 4913, 16, 497, 5624, 7, 15, 701, 220, 15, 11, 220, 15, 11, 895, 2137, 1843, 442, 1273...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGenerateRedirectURL_NoRecipesInstalled(t *testing.T) { t.Parallel() g := NewPlatformLinkGenerator() // We unset the API key in the unit test so we don't make // an HTTP request to the New Relic short URL service and // so we can test the referrer param being added for the fallback // installation strategy. g.apiKey = "" installStatus := InstallStatus{} expectedEncodedQueryParamSubstring := "eyJuZXJkbGV0SWQiOiJucjEtaW5zdGFsbC1uZXdyZWxpYy5pbnN0YWxsYXRpb24tcGxhbiIsInJlZmVycmVyIjoibmV3cmVsaWMtY2xpIn0=" result := g.GenerateRedirectURL(installStatus) require.Contains(t, result, expectedEncodedQueryParamSubstring) }
explode_data.jsonl/15078
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 236 }
[ 2830, 3393, 31115, 17725, 3144, 36989, 69853, 60800, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 3174, 1669, 1532, 17296, 3939, 12561, 741, 197, 322, 1205, 18000, 279, 5333, 1376, 304, 279, 4982, 1273, 773, 582, 1513, 944, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLoadManifest(t *testing.T) { cxt := context.NewTestContext(t) cxt.AddTestFile("testdata/simple.porter.yaml", config.Name) m, err := LoadManifestFrom(cxt.Context, config.Name) require.NoError(t, err, "could not load manifest") require.NotNil(t, m, "manifest was nil") assert.Equal(t, []MixinDeclaration{{Name: "exec"}}, m.Mixins, "expected manifest to declare the exec mixin") require.Len(t, m.Install, 1, "expected 1 install step") installStep := m.Install[0] description, _ := installStep.GetDescription() assert.NotNil(t, description, "expected the install description to be populated") mixin := installStep.GetMixinName() assert.Equal(t, "exec", mixin, "incorrect install step mixin used") require.Len(t, m.CustomActions, 1, "expected manifest to declare 1 custom action") require.Contains(t, m.CustomActions, "status", "expected manifest to declare a status action") statusStep := m.CustomActions["status"][0] description, _ = statusStep.GetDescription() assert.Equal(t, "Get World Status", description, "unexpected status step description") mixin = statusStep.GetMixinName() assert.Equal(t, "exec", mixin, "unexpected status step mixin") }
explode_data.jsonl/37701
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 382 }
[ 2830, 3393, 5879, 38495, 1155, 353, 8840, 836, 8, 341, 1444, 2252, 1669, 2266, 7121, 2271, 1972, 1155, 692, 1444, 2252, 1904, 2271, 1703, 445, 92425, 67195, 14598, 261, 33406, 497, 2193, 2967, 692, 2109, 11, 1848, 1669, 8893, 38495, 383...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEachArray(t *testing.T) { tmpl, err := ParseFile("testdir/test_each.slim") if err != nil { t.Fatal(err) } var buf bytes.Buffer err = tmpl.Execute(&buf, Values{ "foo": []string{"foo", "bar", "baz"}, }) if err != nil { t.Fatal(err) } expect := readFile(t, "testdir/test_each.html") got := buf.String() if expect != got { t.Fatalf("expected %v but %v", expect, got) } }
explode_data.jsonl/80428
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 175 }
[ 2830, 3393, 4854, 1857, 1155, 353, 8840, 836, 8, 341, 3244, 54010, 11, 1848, 1669, 14775, 1703, 445, 1944, 3741, 12697, 32046, 74257, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 2405, 6607, 5820, 22622, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestReplyAudio(t *testing.T) { message := mockMessage() go message.ReplyAudio("server.go") reply := <-message.Replies if reply.Data == "" { t.Error("Reply should contain audio url") } }
explode_data.jsonl/25063
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 67 }
[ 2830, 3393, 20841, 14755, 1155, 353, 8840, 836, 8, 341, 24753, 1669, 7860, 2052, 741, 30680, 1943, 2817, 2541, 14755, 445, 4030, 18002, 1138, 86149, 1669, 9119, 1994, 2817, 7202, 198, 743, 9851, 3336, 621, 1591, 341, 197, 3244, 6141, 44...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestSliceFloat32(t *testing.T) { a := SliceTestObject{} b := SliceTestObject{} assert.Equal(t, a, b) assert.Equal(t, a.Equals(b), true, fmt.Sprintf("\an:\n%s\nb:\n%s\n", spew.Sdump(a), spew.Sdump(b))) // single deletion diff at index 0 a.VFloat32 = append(a.VFloat32, 3.34) // sa = {VA}, sb = nil testSliceFloat32DiffAndApply(t, a, b, 1) // single modification diff at index 0 b.VFloat32 = append(b.VFloat32, 5.42) // sa = {VA}, sb = {VB} testSliceFloat32DiffAndApply(t, a, b, 1) // single insertion diff at index 0 a = SliceTestObject{} // sa = nil, sb = {VB} testSliceFloat32DiffAndApply(t, a, b, 1) // single deletion diff at index > 0 a.VFloat32 = append(a.VFloat32, 5.42) a.VFloat32 = append(a.VFloat32, 3.34) // sa = {VB, VA}, sb = {VB} testSliceFloat32DiffAndApply(t, a, b, 1) // single modification diff at index > 0 b.VFloat32 = append(b.VFloat32, 5.42) // sa = {VB, VA}, sb = {VB, VB} testSliceFloat32DiffAndApply(t, a, b, 1) // single insertion diff at index > 0 a.VFloat32 = a.VFloat32[:len(a.VFloat32)-1] // sa = {VB}, sb = {VB, VB} testSliceFloat32DiffAndApply(t, a, b, 1) // multiple deletion diff a.VFloat32 = append(a.VFloat32, 3.34) a.VFloat32 = append(a.VFloat32, 3.34) a.VFloat32 = append(a.VFloat32, 3.34) a.VFloat32 = append(a.VFloat32, 3.34) b = SliceTestObject{} b.VFloat32 = append(b.VFloat32, 5.42) // sa = {VB, VA, VA, VA, VA}, sb = {VB} testSliceFloat32DiffAndApply(t, a, b, 4) // multiple modification diff b.VFloat32[0] = 3.34 b.VFloat32 = append(b.VFloat32, 5.42) b.VFloat32 = append(b.VFloat32, 5.42) b.VFloat32 = append(b.VFloat32, 5.42) b.VFloat32 = append(b.VFloat32, 5.42) // sa = {VB, VA, VA, VA, VA}, sb = {VA, VB, VB, VB, VB} testSliceFloat32DiffAndApply(t, a, b, 5) // multiple insertion diff a.VFloat32[0] = 3.34 a.VFloat32 = a.VFloat32[:1] // sa = {VA}, sb = {VA, VB, VB, VB, VB} testSliceFloat32DiffAndApply(t, a, b, 4) // multiple modifications and insertions diff a.VFloat32[0] = 5.42 a.VFloat32 = append(a.VFloat32, 3.34) // sa = {VA, VB}, sb = {VA, VB, VB, VB, VB} testSliceFloat32DiffAndApply(t, a, b, 5) }
explode_data.jsonl/45910
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1069 }
[ 2830, 3393, 33236, 5442, 18, 17, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 56476, 2271, 1190, 16094, 2233, 1669, 56476, 2271, 1190, 16094, 262, 2060, 12808, 1155, 11, 264, 11, 293, 340, 262, 2060, 12808, 1155, 11, 264, 16207, 1883, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIfEvtImplementsIEvt(t *testing.T) { // Given cfg := config.NewConfig() id := model.NewGreenhouseID(cfg.GreenhouseId()) traceId := testing2.TEST_TRACE_ID temp := 30.0 hum := 22.0 evt := NewEvt(id, traceId, temp, hum) // When b := verifyIEvt(evt) // Then assert.True(t, b) }
explode_data.jsonl/57227
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 126 }
[ 2830, 3393, 2679, 89120, 1427, 4674, 5371, 9708, 1155, 353, 8840, 836, 8, 341, 197, 322, 16246, 198, 50286, 1669, 2193, 7121, 2648, 741, 15710, 1669, 1614, 7121, 19576, 7675, 915, 28272, 64010, 7675, 764, 2398, 65058, 764, 1669, 7497, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBtoI(t *testing.T) { testCases := []struct { name string argument bool want int }{ {"True", true, 1}, {"False", false, 0}, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { got := BtoI(tc.argument) if got != tc.want { t.Errorf("BtoI(%t) = %d, wanted %d", tc.argument, got, tc.want) } }) } }
explode_data.jsonl/12595
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 177 }
[ 2830, 3393, 33, 983, 40, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 197, 14479, 1807, 198, 197, 50780, 257, 526, 198, 197, 59403, 197, 197, 4913, 2514, 497, 830, 11, 220, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAnnotatingExecuteShards(t *testing.T) { keyspace, shards := setUpSandboxWithTwoShards("TestAnnotatingExecuteShards") _, err := rpcVTGate.ExecuteShards( context.Background(), "INSERT INTO table () VALUES();", nil, keyspace, []string{"20-40"}, topodatapb.TabletType_MASTER, nil, false, nil) if err != nil { t.Fatalf("want nil, got %v", err) } verifyQueryAnnotatedAsUnfriendly(t, shards[1]) }
explode_data.jsonl/7848
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 177 }
[ 2830, 3393, 2082, 1921, 1095, 17174, 2016, 2347, 1155, 353, 8840, 836, 8, 341, 23634, 8746, 11, 74110, 1669, 18620, 50, 31536, 2354, 11613, 2016, 2347, 445, 2271, 2082, 1921, 1095, 17174, 2016, 2347, 1138, 197, 6878, 1848, 1669, 35596, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestConvertPyTorchJobToUnstructured(t *testing.T) { testName := "test-job" testUID := types.UID("test-UID") job := &pyv1.PyTorchJob{ TypeMeta: metav1.TypeMeta{ Kind: pyv1.Kind, }, ObjectMeta: metav1.ObjectMeta{ Name: testName, UID: testUID, }, } _, err := testutil.ConvertPyTorchJobToUnstructured(job) if err != nil { t.Errorf("Expected error to be nil while got %v", err) } }
explode_data.jsonl/77551
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 180 }
[ 2830, 3393, 12012, 13828, 51, 21584, 12245, 1249, 1806, 51143, 1155, 353, 8840, 836, 8, 341, 18185, 675, 1669, 330, 1944, 69948, 698, 18185, 6463, 1669, 4494, 5255, 915, 445, 1944, 12, 6463, 1138, 68577, 1669, 609, 3288, 85, 16, 1069, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIntegration_Scheduler(t *testing.T) { t.Parallel() rpcClient, gethClient, _, assertMocksCalled := cltest.NewEthMocksWithStartupAssertions(t) defer assertMocksCalled() app, cleanup := cltest.NewApplication(t, eth.NewClientWith(rpcClient, gethClient), ) defer cleanup() app.Start() j := cltest.FixtureCreateJobViaWeb(t, app, "fixtures/web/scheduler_job.json") cltest.WaitForRunsAtLeast(t, j, app.Store, 1) initr := j.Initiators[0] assert.Equal(t, models.InitiatorCron, initr.Type) assert.Equal(t, "CRON_TZ=UTC * * * * * *", string(initr.Schedule), "Wrong cron schedule saved") }
explode_data.jsonl/75888
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 245 }
[ 2830, 3393, 52464, 1098, 15222, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 7000, 3992, 2959, 11, 633, 71, 2959, 11, 8358, 2060, 72577, 20960, 1669, 1185, 1944, 7121, 65390, 11571, 16056, 39076, 90206, 1155, 340, 16867, 2060, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRedis_BitOpAnd(t *testing.T) { runOnRedis(t, func(client *Redis) { err := client.Set("key1", "0") assert.Nil(t, err) err = client.Set("key2", "1") assert.Nil(t, err) _, err = NewRedis(client.Addr, "").BitOpAnd("destKey", "key1", "key2") assert.NotNil(t, err) val, err := client.BitOpAnd("destKey", "key1", "key2") assert.Nil(t, err) assert.Equal(t, int64(1), val) valStr, err := client.Get("destKey") assert.Nil(t, err) //destKey binary 110000 ascii 0 assert.Equal(t, "0", valStr) }) }
explode_data.jsonl/39174
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 250 }
[ 2830, 3393, 48137, 1668, 275, 7125, 3036, 1155, 353, 8840, 836, 8, 341, 56742, 1925, 48137, 1155, 11, 2915, 12805, 353, 48137, 8, 341, 197, 9859, 1669, 2943, 4202, 445, 792, 16, 497, 330, 15, 1138, 197, 6948, 59678, 1155, 11, 1848, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestExplicitRouteSuffixGetApplied(t *testing.T) { explicitRoutingSuffix := "acme.com" viper.Set(config.RoutingSuffix.Name, explicitRoutingSuffix) defer viper.Reset() setDefaultRoutingPrefix(testConfig.Ip) if viper.Get(config.RoutingSuffix.Name) != explicitRoutingSuffix { t.Fatalf("Expected argument '%s'. Received '%s'", explicitRoutingSuffix, viper.Get(config.RoutingSuffix.Name)) } }
explode_data.jsonl/12421
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 158 }
[ 2830, 3393, 98923, 4899, 40177, 1949, 75856, 1155, 353, 8840, 836, 8, 341, 94257, 24701, 40177, 1669, 330, 580, 2660, 905, 1837, 5195, 12858, 4202, 8754, 2013, 10909, 40177, 2967, 11, 11464, 24701, 40177, 340, 16867, 95132, 36660, 2822, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPrincipalIsValid(t *testing.T) { testCases := []struct { principal Principal expectedResult bool }{ {NewPrincipal("*"), true}, {NewPrincipal("arn:aws:iam::AccountNumber:root"), true}, {NewPrincipal(), false}, } for i, testCase := range testCases { result := testCase.principal.IsValid() if result != testCase.expectedResult { t.Fatalf("case %v: expected: %v, got: %v\n", i+1, testCase.expectedResult, result) } } }
explode_data.jsonl/39963
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 176 }
[ 2830, 3393, 31771, 55470, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 25653, 15702, 414, 36309, 198, 197, 42400, 2077, 1807, 198, 197, 59403, 197, 197, 90, 3564, 31771, 29592, 3975, 830, 1583, 197, 197, 90, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMaybeMatchScript(t *testing.T) { addr1 := randDeliveryAddress(t) addr2 := randDeliveryAddress(t) tests := []struct { name string shutdownScript lnwire.DeliveryAddress upfrontScript lnwire.DeliveryAddress expectedErr error }{ { name: "no upfront shutdown set, script ok", shutdownScript: addr1, upfrontScript: []byte{}, expectedErr: nil, }, { name: "upfront shutdown set, script ok", shutdownScript: addr1, upfrontScript: addr1, expectedErr: nil, }, { name: "upfront shutdown set, script not ok", shutdownScript: addr1, upfrontScript: addr2, expectedErr: errUpfrontShutdownScriptMismatch, }, { name: "nil shutdown and empty upfront", shutdownScript: nil, upfrontScript: []byte{}, expectedErr: nil, }, } for _, test := range tests { test := test t.Run(test.name, func(t *testing.T) { err := maybeMatchScript( func() error { return nil }, test.upfrontScript, test.shutdownScript, ) if err != test.expectedErr { t.Fatalf("Error: %v, expected error: %v", err, test.expectedErr) } }) } }
explode_data.jsonl/28134
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 518 }
[ 2830, 3393, 21390, 8331, 5910, 1155, 353, 8840, 836, 8, 341, 53183, 16, 1669, 10382, 38121, 4286, 1155, 340, 53183, 17, 1669, 10382, 38121, 4286, 1155, 692, 78216, 1669, 3056, 1235, 341, 197, 11609, 1843, 914, 198, 197, 36196, 18452, 59...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestScrapeTableStat(t *testing.T) { db, mock, err := sqlmock.New() if err != nil { t.Fatalf("error opening a stub database connection: %s", err) } defer db.Close() mock.ExpectQuery(sanitizeQuery(userstatCheckQuery)).WillReturnRows(sqlmock.NewRows([]string{"Variable_name", "Value"}). AddRow("userstat", "ON")) columns := []string{"TABLE_SCHEMA", "TABLE_NAME", "ROWS_READ", "ROWS_CHANGED", "ROWS_CHANGED_X_INDEXES"} rows := sqlmock.NewRows(columns). AddRow("mysql", "db", 238, 0, 8). AddRow("mysql", "proxies_priv", 99, 1, 0). AddRow("mysql", "user", 1064, 2, 5) mock.ExpectQuery(sanitizeQuery(tableStatQuery)).WillReturnRows(rows) ch := make(chan prometheus.Metric) go func() { if err = (ScrapeTableStat{}).Scrape(context.Background(), db, ch, log.NewNopLogger()); err != nil { t.Errorf("error calling function on test: %s", err) } close(ch) }() expected := []MetricResult{ {labels: labelMap{"schema": "mysql", "table": "db"}, value: 238}, {labels: labelMap{"schema": "mysql", "table": "db"}, value: 0}, {labels: labelMap{"schema": "mysql", "table": "db"}, value: 8}, {labels: labelMap{"schema": "mysql", "table": "proxies_priv"}, value: 99}, {labels: labelMap{"schema": "mysql", "table": "proxies_priv"}, value: 1}, {labels: labelMap{"schema": "mysql", "table": "proxies_priv"}, value: 0}, {labels: labelMap{"schema": "mysql", "table": "user"}, value: 1064}, {labels: labelMap{"schema": "mysql", "table": "user"}, value: 2}, {labels: labelMap{"schema": "mysql", "table": "user"}, value: 5}, } convey.Convey("Metrics comparison", t, func() { for _, expect := range expected { got := readMetric(<-ch) convey.So(expect, convey.ShouldResemble, got) } }) // Ensure all SQL queries were executed if err := mock.ExpectationsWereMet(); err != nil { t.Errorf("there were unfulfilled exceptions: %s", err) } }
explode_data.jsonl/66558
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 747 }
[ 2830, 3393, 3326, 19842, 2556, 15878, 1155, 353, 8840, 836, 8, 341, 20939, 11, 7860, 11, 1848, 1669, 5704, 16712, 7121, 741, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 841, 8568, 264, 13633, 4625, 3633, 25, 1018, 82, 497, 1848,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReset(t *testing.T) { ind := mock.MovieIndexer{} AddIndexer(ind) Reset() if len(indexersCollection) != 0 { t.Error("Expected indexer collection to be empty after reset") } }
explode_data.jsonl/81817
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 71 }
[ 2830, 3393, 14828, 1155, 353, 8840, 836, 8, 341, 197, 484, 1669, 7860, 63044, 1552, 261, 16094, 37972, 1552, 261, 23884, 340, 197, 14828, 2822, 743, 2422, 7195, 388, 6482, 8, 961, 220, 15, 341, 197, 3244, 6141, 445, 18896, 87216, 4426...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestCachedChartsChartFromRepo(t *testing.T) { // TODO: validate chart data _, err := chartsImplementation.ChartFromRepo(testutil.RepoName, testutil.ChartName) assert.NoErr(t, err) _, err = chartsImplementation.ChartFromRepo(testutil.BogusRepo, testutil.ChartName) assert.ExistsErr(t, err, "sent bogus repo name to Charts.ChartFromRepo()") _, err = chartsImplementation.ChartFromRepo(testutil.RepoName, testutil.BogusRepo) assert.ExistsErr(t, err, "sent bogus chart name to Charts.ChartFromRepo()") }
explode_data.jsonl/37966
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 186 }
[ 2830, 3393, 70293, 64878, 14488, 3830, 25243, 1155, 353, 8840, 836, 8, 341, 197, 322, 5343, 25, 9593, 9487, 821, 198, 197, 6878, 1848, 1669, 26131, 36850, 42667, 3830, 25243, 8623, 1314, 2817, 5368, 675, 11, 1273, 1314, 42667, 675, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDockerComposeWithWaitStrategy_NoExposedPorts(t *testing.T) { path := "./testresources/docker-compose-no-exposed-ports.yml" identifier := strings.ToLower(uuid.New().String()) compose := NewLocalDockerCompose([]string{path}, identifier, WithLogger(TestLogger(t))) destroyFn := func() { err := compose.Down() checkIfError(t, err) } defer destroyFn() err := compose. WithCommand([]string{"up", "-d"}). WithExposedService("nginx_1", 9080, wait.ForLog("Configuration complete; ready for start up")). Invoke() checkIfError(t, err) assert.Equal(t, 1, len(compose.Services)) assert.Contains(t, compose.Services, "nginx") }
explode_data.jsonl/43631
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 238 }
[ 2830, 3393, 35, 13659, 70492, 2354, 14190, 19816, 36989, 82941, 68273, 1155, 353, 8840, 836, 8, 341, 26781, 1669, 5924, 1944, 12745, 61764, 65070, 28366, 10187, 3865, 12, 3394, 33936, 1837, 197, 15909, 1669, 9069, 29983, 41458, 7121, 1005, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTraceExporter_InvalidName(t *testing.T) { te, err := NewTraceExporter(nil, newPushTraceData(0, nil)) require.Nil(t, te) require.Equal(t, errNilConfig, err) }
explode_data.jsonl/1566
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 6550, 88025, 62, 7928, 675, 1155, 353, 8840, 836, 8, 341, 197, 665, 11, 1848, 1669, 1532, 6550, 88025, 27907, 11, 501, 16644, 6550, 1043, 7, 15, 11, 2092, 1171, 17957, 59678, 1155, 11, 1013, 340, 17957, 12808, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestUpdateBarrierTs(t *testing.T) { ctx := cdcContext.NewBackendContext4Test(true) p, tester := initProcessor4Test(ctx, t) p.changefeed.PatchStatus(func(status *model.ChangeFeedStatus) (*model.ChangeFeedStatus, bool, error) { status.CheckpointTs = 5 status.ResolvedTs = 10 return status, true, nil }) p.changefeed.PatchTaskStatus(p.captureInfo.ID, func(status *model.TaskStatus) (*model.TaskStatus, bool, error) { status.AddTable(1, &model.TableReplicaInfo{StartTs: 5}, 5) return status, true, nil }) p.schemaStorage.(*mockSchemaStorage).resolvedTs = 10 // init tick, add table OperDispatched. _, err := p.Tick(ctx, p.changefeed) require.Nil(t, err) tester.MustApplyPatches() // tick again, add table OperProcessed. _, err = p.Tick(ctx, p.changefeed) require.Nil(t, err) tester.MustApplyPatches() // Global resolved ts has advanced while schema storage stalls. p.changefeed.PatchStatus(func(status *model.ChangeFeedStatus) (*model.ChangeFeedStatus, bool, error) { status.ResolvedTs = 20 return status, true, nil }) _, err = p.Tick(ctx, p.changefeed) require.Nil(t, err) tester.MustApplyPatches() tb := p.tables[model.TableID(1)].(*mockTablePipeline) require.Equal(t, tb.barrierTs, uint64(10)) // Schema storage has advanced too. p.schemaStorage.(*mockSchemaStorage).resolvedTs = 15 _, err = p.Tick(ctx, p.changefeed) require.Nil(t, err) tester.MustApplyPatches() tb = p.tables[model.TableID(1)].(*mockTablePipeline) require.Equal(t, tb.barrierTs, uint64(15)) }
explode_data.jsonl/81946
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 580 }
[ 2830, 3393, 4289, 33038, 52793, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 272, 7628, 1972, 7121, 29699, 1972, 19, 2271, 3715, 340, 3223, 11, 37111, 1669, 2930, 22946, 19, 2271, 7502, 11, 259, 340, 3223, 5329, 524, 823, 12051, 1069, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNonBlockingClose(t *testing.T) { c := context.New(t, defaultMTU) defer c.Cleanup() c.CreateConnected(context.TestInitialSequenceNumber, 30000, -1 /* epRcvBuf */) ep := c.EP c.EP = nil // Close the endpoint and measure how long it takes. t0 := time.Now() ep.Close() if diff := time.Now().Sub(t0); diff > 3*time.Second { t.Fatalf("Took too long to close: %s", diff) } }
explode_data.jsonl/75928
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 155 }
[ 2830, 3393, 8121, 48266, 7925, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 2266, 7121, 1155, 11, 1638, 8505, 52, 340, 16867, 272, 727, 60639, 2822, 1444, 7251, 21146, 5378, 8787, 6341, 14076, 2833, 11, 220, 18, 15, 15, 15, 15, 11, 481...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestBackpressureBuffer_parallel_write_limits_on_bypass(t *testing.T) { t.SkipNow() for repeat := 1; repeat > 0; repeat-- { for parWriters := 1; parWriters <= 20; parWriters++ { for useWorker := 0; useWorker <= 1; useWorker++ { t.Run(fmt.Sprintf("bypass parWriters=%v useWorker=%v", parWriters, useWorker != 0), func(t *testing.T) { testBackpressureBufferLimit(t, parWriters, false, useWorker != 0) }) } } } }
explode_data.jsonl/29809
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 184 }
[ 2830, 3393, 3707, 45974, 4095, 60625, 9165, 31820, 4470, 880, 49911, 1155, 353, 8840, 836, 8, 341, 3244, 57776, 7039, 2822, 2023, 13153, 1669, 220, 16, 26, 13153, 861, 220, 15, 26, 13153, 313, 341, 197, 2023, 1346, 54, 31829, 1669, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestOneBoolReader(t *testing.T) { // setup var err error var result bool = false target := NewOneBoolReader() writer := NewOneBoolWriter() buffer := make([]byte, 1) slice := gobits.NewSlice(buffer, 0, 0, 8) segment := gobits.NewSegment(slice) // when writer.Write(segment, true) err = target.Read(segment, &result) // then assert.Nil(t, err) assert.Equal(t, true, result) }
explode_data.jsonl/44186
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 153 }
[ 2830, 3393, 3966, 11233, 5062, 1155, 353, 8840, 836, 8, 341, 197, 322, 6505, 198, 2405, 1848, 1465, 198, 2405, 1102, 1807, 284, 895, 198, 28861, 1669, 1532, 3966, 11233, 5062, 741, 38959, 1669, 1532, 3966, 11233, 6492, 741, 31122, 1669,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetAggregatedResultSuccess(t *testing.T) { syncher := setupTest(len(totalSuccessTestConsts)+1, totalSuccessTestConsts) go addResultToSyncher(syncher,result{"ipvlan", nil, nil, "eth2"}) err := syncher.GetAggregatedResult() if err != nil { t.Errorf("Results could not be successfully aggregated against our expectation, because: %v", err) } }
explode_data.jsonl/69736
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 120 }
[ 2830, 3393, 1949, 9042, 93040, 2077, 7188, 1155, 353, 8840, 836, 8, 341, 220, 6782, 9034, 1669, 6505, 2271, 6901, 22842, 7188, 2271, 19167, 82, 7257, 16, 11, 2790, 7188, 2271, 19167, 82, 340, 220, 728, 912, 2077, 1249, 37134, 9034, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSeedPhraseKeyRing(t *testing.T) { dir := t.TempDir() kb, err := New("keybasename", "test", dir, nil) require.NoError(t, err) algo := hd.Secp256k1 n1, n2 := "lost-key", "found-again" // make sure key works with initial password info, mnemonic, err := kb.NewMnemonic(n1, English, sdk.FullFundraiserPath, algo) require.Nil(t, err, "%+v", err) require.Equal(t, n1, info.GetName()) require.NotEmpty(t, mnemonic) // now, let us delete this key err = kb.Delete(n1) require.Nil(t, err, "%+v", err) _, err = kb.Key(n1) require.NotNil(t, err) // let us re-create it from the mnemonic-phrase params := *hd.NewFundraiserParams(0, sdk.CoinType, 0) hdPath := params.String() newInfo, err := kb.NewAccount(n2, mnemonic, DefaultBIP39Passphrase, hdPath, hd.Secp256k1) require.NoError(t, err) require.Equal(t, n2, newInfo.GetName()) require.Equal(t, info.GetPubKey().Address(), newInfo.GetPubKey().Address()) require.Equal(t, info.GetPubKey(), newInfo.GetPubKey()) }
explode_data.jsonl/73439
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 406 }
[ 2830, 3393, 41471, 46806, 1592, 43466, 1155, 353, 8840, 836, 8, 341, 48532, 1669, 259, 65009, 6184, 2822, 16463, 65, 11, 1848, 1669, 1532, 445, 792, 42953, 497, 330, 1944, 497, 5419, 11, 2092, 340, 17957, 35699, 1155, 11, 1848, 692, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReplicateRepositoryTransactional(t *testing.T) { cfgBuilder := testcfg.NewGitalyCfgBuilder(testcfg.WithStorages("default", "replica")) cfg := cfgBuilder.Build(t) testhelper.BuildGitalyHooks(t, cfg) testhelper.BuildGitalySSH(t, cfg) serverSocketPath := runRepositoryServerWithConfig(t, cfg, nil, testserver.WithDisablePraefect()) cfg.SocketPath = serverSocketPath sourceRepo, sourceRepoPath := gittest.CloneRepo(t, cfg, cfg.Storages[0]) targetRepo := proto.Clone(sourceRepo).(*gitalypb.Repository) targetRepo.StorageName = cfg.Storages[1].Name votes := 0 txServer := testTransactionServer{ vote: func(request *gitalypb.VoteTransactionRequest) (*gitalypb.VoteTransactionResponse, error) { votes++ return &gitalypb.VoteTransactionResponse{ State: gitalypb.VoteTransactionResponse_COMMIT, }, nil }, } ctx, cancel := testhelper.Context() defer cancel() ctx, err := txinfo.InjectTransaction(ctx, 1, "primary", true) require.NoError(t, err) ctx = helper.IncomingToOutgoing(ctx) ctx = testhelper.MergeOutgoingMetadata(ctx, testhelper.GitalyServersMetadataFromCfg(t, cfg)) client := newMuxedRepositoryClient(t, ctx, cfg, serverSocketPath, backchannel.NewClientHandshaker( testhelper.DiscardTestEntry(t), func() backchannel.Server { srv := grpc.NewServer() gitalypb.RegisterRefTransactionServer(srv, &txServer) return srv }, )) // The first invocation creates the repository via a snapshot given that it doesn't yet // exist. _, err = client.ReplicateRepository(ctx, &gitalypb.ReplicateRepositoryRequest{ Repository: targetRepo, Source: sourceRepo, }) require.NoError(t, err) require.Equal(t, 1, votes) // We're now changing a reference in the source repository such that we can observe changes // in the target repo. gittest.Exec(t, cfg, "-C", sourceRepoPath, "update-ref", "refs/heads/master", "refs/heads/master~") votes = 0 // And the second invocation uses FetchInternalRemote. _, err = client.ReplicateRepository(ctx, &gitalypb.ReplicateRepositoryRequest{ Repository: targetRepo, Source: sourceRepo, }) require.NoError(t, err) require.Equal(t, 2, votes) }
explode_data.jsonl/64811
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 784 }
[ 2830, 3393, 18327, 48795, 4624, 31375, 1155, 353, 8840, 836, 8, 341, 50286, 3297, 1669, 1273, 14072, 7121, 38, 2174, 88, 42467, 3297, 8623, 14072, 26124, 623, 269, 1134, 445, 2258, 497, 330, 9995, 15317, 5455, 50286, 1669, 13286, 3297, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUndefineDomain(t *testing.T) { dom, conn := buildTestDomain() defer func() { dom.Free() if res, _ := conn.Close(); res != 0 { t.Errorf("Close() == %d, expected 0", res) } }() name, err := dom.GetName() if err != nil { t.Error(err) return } if err := dom.Undefine(); err != nil { t.Error(err) return } if _, err := conn.LookupDomainByName(name); err == nil { t.Fatal("Shouldn't have been able to find domain") return } }
explode_data.jsonl/64816
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 195 }
[ 2830, 3393, 52, 5037, 482, 13636, 1155, 353, 8840, 836, 8, 341, 2698, 316, 11, 4534, 1669, 1936, 2271, 13636, 741, 16867, 2915, 368, 341, 197, 2698, 316, 52229, 741, 197, 743, 592, 11, 716, 1669, 4534, 10421, 2129, 592, 961, 220, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTaskRunConversionBadType(t *testing.T) { good, bad := &TaskRun{}, &Task{} if err := good.ConvertTo(context.Background(), bad); err == nil { t.Errorf("ConvertTo() = %#v, wanted error", bad) } if err := good.ConvertFrom(context.Background(), bad); err == nil { t.Errorf("ConvertFrom() = %#v, wanted error", good) } }
explode_data.jsonl/30558
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 125 }
[ 2830, 3393, 6262, 6727, 48237, 17082, 929, 1155, 353, 8840, 836, 8, 341, 3174, 1386, 11, 3873, 1669, 609, 6262, 6727, 22655, 609, 6262, 31483, 743, 1848, 1669, 1661, 36179, 1249, 5378, 19047, 1507, 3873, 1215, 1848, 621, 2092, 341, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGenesisWithInvalidFee(t *testing.T) { cases := map[string]string{ "zero fee": `[{"msg_path": "foo/bar", "fee": {"whole": 0, "fractional": 0, "ticker": "DOGE"}}]`, "no ticker": `[{"msg_path": "foo/bar", "fee": {"whole": 1, "fractional": 0, "ticker": ""}}]`, "no path": `[{"fee": {"whole": 1, "fractional": 1, "ticker": "DOGE"}}]`, "no fee": `[{"msg_path": "foo/bar"}]`, } for testName, content := range cases { t.Run(testName, func(t *testing.T) { genesis := `{"msgfee": ` + content + `}` var opts weave.Options if err := json.Unmarshal([]byte(genesis), &opts); err != nil { t.Fatalf("cannot unmarshal genesis: %s", err) } db := store.MemStore() migration.MustInitPkg(db, "msgfee") var ini Initializer if err := ini.FromGenesis(opts, weave.GenesisParams{}, db); err == nil { t.Fatal("no error") } }) } }
explode_data.jsonl/46512
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 395 }
[ 2830, 3393, 84652, 2354, 7928, 41941, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 2415, 14032, 30953, 515, 197, 197, 1, 14154, 11060, 788, 220, 77644, 4913, 3236, 2638, 788, 330, 7975, 49513, 497, 330, 30017, 788, 5212, 66633, 788, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBindTags(t *testing.T) { bindTagsArgs := &BindTagsArgs{ ChangeTags: []model.TagModel{ { TagKey: "BBCTestKey", TagValue: "BBCTestValue", }, }, } err := BBC_CLIENT.BindTags(BBC_TestBbcId, bindTagsArgs) ExpectEqual(t.Errorf, err, nil) }
explode_data.jsonl/4051
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 121 }
[ 2830, 3393, 9950, 15930, 1155, 353, 8840, 836, 8, 341, 2233, 484, 15930, 4117, 1669, 609, 9950, 15930, 4117, 515, 197, 197, 4072, 15930, 25, 3056, 2528, 23676, 1712, 515, 298, 197, 515, 571, 197, 5668, 1592, 25, 256, 330, 10098, 76434...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTaskUpdateKnownStatusChecksSteadyStateWhenSetToResourceProvisioned(t *testing.T) { resourcesProvisioned := apicontainerstatus.ContainerResourcesProvisioned testTask := &Task{ KnownStatusUnsafe: apitaskstatus.TaskStatusNone, Containers: []*apicontainer.Container{ { KnownStatusUnsafe: apicontainerstatus.ContainerCreated, Essential: true, }, { KnownStatusUnsafe: apicontainerstatus.ContainerRunning, Essential: true, }, { KnownStatusUnsafe: apicontainerstatus.ContainerRunning, Essential: true, SteadyStateStatusUnsafe: &resourcesProvisioned, }, }, } // One of the containers is in CREATED state, expect task to be updated // to apitaskstatus.TaskCreated newStatus := testTask.updateTaskKnownStatus() assert.Equal(t, apitaskstatus.TaskCreated, newStatus, "Incorrect status returned: %s", newStatus.String()) assert.Equal(t, apitaskstatus.TaskCreated, testTask.GetKnownStatus()) // All of the containers are in RUNNING state, but one of the containers // has its steady state set to RESOURCES_PROVISIONED, doexpect task to be // updated to apitaskstatus.TaskRunning testTask.Containers[0].SetKnownStatus(apicontainerstatus.ContainerRunning) newStatus = testTask.updateTaskKnownStatus() assert.Equal(t, apitaskstatus.TaskStatusNone, newStatus, "Incorrect status returned: %s", newStatus.String()) assert.Equal(t, apitaskstatus.TaskCreated, testTask.GetKnownStatus()) // All of the containers have reached their steady states, expect the task // to be updated to `apitaskstatus.TaskRunning` testTask.Containers[2].SetKnownStatus(apicontainerstatus.ContainerResourcesProvisioned) newStatus = testTask.updateTaskKnownStatus() assert.Equal(t, apitaskstatus.TaskRunning, newStatus, "Incorrect status returned: %s", newStatus.String()) assert.Equal(t, apitaskstatus.TaskRunning, testTask.GetKnownStatus()) }
explode_data.jsonl/37218
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 642 }
[ 2830, 3393, 6262, 4289, 48206, 2522, 49820, 623, 3149, 88, 1397, 4498, 1649, 1249, 4783, 1336, 13013, 291, 1155, 353, 8840, 836, 8, 341, 10202, 2360, 1336, 13013, 291, 1669, 1443, 51160, 1743, 2829, 33672, 11277, 1336, 13013, 291, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestArrayShift(t *testing.T) { a := *new([3]string) a[0] = "first" a[1] = "second" a[2] = "third" b := []string{"fourth", "fifth"} for _, it := range b { z := *new([]string) z = append(z, a[0:len(a)-1]...) a[0] = it for i, y := range z { a[i+1] = y } } log.Printf("final : %v", a) if reflect.DeepEqual(a, [3]string{"fifth", "fourth", "first"}) { t.Log("Array shift implemented correctly") return } t.Errorf("array shift not equal") }
explode_data.jsonl/13382
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 218 }
[ 2830, 3393, 1857, 24841, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 353, 931, 2561, 18, 30953, 692, 11323, 58, 15, 60, 284, 330, 3896, 698, 11323, 58, 16, 60, 284, 330, 5569, 698, 11323, 58, 17, 60, 284, 330, 31727, 1837, 2233, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestProbe(t *testing.T) { statsCh := make(chan metrics.StatMessage) server := newTestServer(statsCh) defer server.Shutdown(0) go server.listenAndServe() req, err := http.NewRequest(http.MethodGet, fmt.Sprintf("%s/%s", server.listenAddr(), network.ProbePath), nil) if err != nil { t.Fatal("Error creating request:", err) } req.Header.Set("User-Agent", "kube-probe/1.15.i.wish") resp, err := http.DefaultClient.Do(req) if err != nil { t.Fatal("Error roundtripping:", err) } defer resp.Body.Close() if got, want := resp.StatusCode, http.StatusOK; got != want { t.Errorf("StatusCode: %v, want: %v", got, want) } }
explode_data.jsonl/64749
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 253 }
[ 2830, 3393, 81426, 1155, 353, 8840, 836, 8, 341, 79659, 1143, 1669, 1281, 35190, 16734, 53419, 2052, 340, 41057, 1669, 501, 2271, 5475, 50714, 1143, 692, 16867, 3538, 10849, 18452, 7, 15, 340, 30680, 3538, 22628, 96059, 741, 24395, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestKeyMerger_MergeKeys(t *testing.T) { tests := []struct { name string keys [][][]byte exp string }{ { name: "mixed", keys: [][][]byte{ bytes.Split([]byte("tag0,tag1,tag2"), commaB), bytes.Split([]byte("tag0,tag1,tag2"), commaB), bytes.Split([]byte("tag0"), commaB), bytes.Split([]byte("tag0,tag3"), commaB), }, exp: "tag0,tag1,tag2,tag3", }, { name: "mixed 2", keys: [][][]byte{ bytes.Split([]byte("tag0"), commaB), bytes.Split([]byte("tag0,tag3"), commaB), bytes.Split([]byte("tag0,tag1,tag2"), commaB), bytes.Split([]byte("tag0,tag1,tag2"), commaB), }, exp: "tag0,tag1,tag2,tag3", }, { name: "all different", keys: [][][]byte{ bytes.Split([]byte("tag0"), commaB), bytes.Split([]byte("tag3"), commaB), bytes.Split([]byte("tag1"), commaB), bytes.Split([]byte("tag2"), commaB), }, exp: "tag0,tag1,tag2,tag3", }, { name: "new tags,verify clear", keys: [][][]byte{ bytes.Split([]byte("tag9"), commaB), bytes.Split([]byte("tag8"), commaB), }, exp: "tag8,tag9", }, } var km KeyMerger for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { km.Clear() for _, keys := range tt.keys { km.MergeKeys(keys) } if got := km.String(); !cmp.Equal(got, tt.exp) { t.Errorf("unexpected keys -got/+exp\n%s", cmp.Diff(got, tt.exp)) } }) } }
explode_data.jsonl/17302
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 699 }
[ 2830, 3393, 1592, 26716, 1389, 1245, 10080, 8850, 1155, 353, 8840, 836, 8, 1476, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 80112, 3056, 16613, 3782, 198, 197, 48558, 220, 914, 198, 197, 59403, 197, 197, 515, 298, 11609, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestOrderedWeightedEdgesSlice(t *testing.T) { for _, test := range orderedWeightedEdgesTests { for i := 0; i < 2; i++ { it := iterator.NewOrderedWeightedEdges(test.edges) got := it.WeightedEdgeSlice() want := test.edges if !reflect.DeepEqual(got, want) { t.Errorf("unexpected iterator output for round %d: got:%#v want:%#v", i, got, want) } it.Reset() } } }
explode_data.jsonl/67500
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 170 }
[ 2830, 3393, 54384, 8295, 291, 41122, 33236, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 11457, 8295, 291, 41122, 18200, 341, 197, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 17, 26, 600, 1027, 341, 298, 23374, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestPhaseIAM(t *testing.T) { t.Skip("unable to test w/o allowing failed validation") runTestPhase(t, "lifecyclephases.example.com", "lifecycle_phases", "v1alpha2", true, 1, cloudup.PhaseSecurity) }
explode_data.jsonl/17509
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 75 }
[ 2830, 3393, 30733, 73707, 1155, 353, 8840, 836, 8, 341, 3244, 57776, 445, 45928, 311, 1273, 289, 20271, 10693, 4641, 10519, 1138, 56742, 2271, 30733, 1155, 11, 330, 75, 19517, 759, 2264, 7724, 905, 497, 330, 75, 19517, 9782, 2264, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestAfterRetryRefreshCreds(t *testing.T) { os.Clearenv() credProvider := &mockCredsProvider{} svc := awstesting.NewClient(&aws.Config{ Credentials: credentials.NewCredentials(credProvider), MaxRetries: aws.Int(1), }) svc.Handlers.Clear() svc.Handlers.ValidateResponse.PushBack(func(r *request.Request) { r.Error = awserr.New("UnknownError", "", nil) r.HTTPResponse = &http.Response{StatusCode: 400, Body: ioutil.NopCloser(bytes.NewBuffer([]byte{}))} }) svc.Handlers.UnmarshalError.PushBack(func(r *request.Request) { r.Error = awserr.New("ExpiredTokenException", "", nil) }) svc.Handlers.AfterRetry.PushBackNamed(corehandlers.AfterRetryHandler) assert.True(t, svc.Config.Credentials.IsExpired(), "Expect to start out expired") assert.False(t, credProvider.retrieveCalled) req := svc.NewRequest(&request.Operation{Name: "Operation"}, nil, nil) req.Send() assert.True(t, svc.Config.Credentials.IsExpired()) assert.False(t, credProvider.retrieveCalled) _, err := svc.Config.Credentials.Get() assert.NoError(t, err) assert.True(t, credProvider.retrieveCalled) }
explode_data.jsonl/57763
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 427 }
[ 2830, 3393, 6025, 51560, 14567, 34, 53369, 1155, 353, 8840, 836, 8, 341, 25078, 727, 273, 9151, 85, 741, 197, 10844, 5179, 1669, 609, 16712, 34, 53369, 5179, 31483, 1903, 7362, 1669, 1360, 267, 59855, 7121, 2959, 2099, 8635, 10753, 515,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSAPMClientTokenUsageAndErrorMarshalling(t *testing.T) { tests := []struct { name string accessTokenPassthrough bool translateError bool sendError bool }{ { name: "no error without passthrough", accessTokenPassthrough: false, translateError: false, sendError: false, }, { name: "no error with passthrough", accessTokenPassthrough: true, translateError: false, sendError: false, }, { name: "translate error", accessTokenPassthrough: true, translateError: true, sendError: false, }, { name: "sendError", accessTokenPassthrough: true, translateError: false, sendError: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { tracesReceived := false server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { expectedToken := "ClientAccessToken" if tt.accessTokenPassthrough { expectedToken = "TraceAccessToken" } assert.Contains(t, r.Header.Get("x-sf-token"), expectedToken) status := 200 if tt.sendError { status = 400 } w.WriteHeader(status) tracesReceived = true })) defer func() { if !tt.translateError { assert.True(t, tracesReceived, "Test server never received traces.") } else { assert.False(t, tracesReceived, "Test server received traces when none expected.") } }() defer server.Close() cfg := &Config{ Endpoint: server.URL, AccessToken: "ClientAccessToken", AccessTokenPassthroughConfig: splunk.AccessTokenPassthroughConfig{ AccessTokenPassthrough: tt.accessTokenPassthrough, }, } params := component.ExporterCreateParams{Logger: zap.NewNop()} se, err := newSAPMExporter(cfg, params) assert.Nil(t, err) assert.NotNil(t, se, "failed to create trace exporter") trace := buildTestTrace(!tt.translateError) err = se.pushTraceData(context.Background(), trace) if tt.sendError || tt.translateError { require.Error(t, err) } else { require.NoError(t, err) } }) } }
explode_data.jsonl/72155
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1058 }
[ 2830, 3393, 50, 2537, 44, 2959, 3323, 14783, 3036, 1454, 79712, 16740, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 4293, 914, 198, 197, 197, 41167, 70911, 86901, 1807, 198, 197, 197, 14045, 1454, 260, 1807, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestHandleADRParamSetupAns(t *testing.T) { for _, tc := range []struct { Name string Device, Expected *ttnpb.EndDevice Events events.Builders Error error }{ { Name: "no request", Device: &ttnpb.EndDevice{ MacState: &ttnpb.MACState{}, }, Expected: &ttnpb.EndDevice{ MacState: &ttnpb.MACState{}, }, Events: events.Builders{ EvtReceiveADRParamSetupAnswer, }, Error: ErrRequestNotFound, }, { Name: "limit 32768, delay 1024", Device: &ttnpb.EndDevice{ MacState: &ttnpb.MACState{ PendingRequests: []*ttnpb.MACCommand{ (&ttnpb.MACCommand_ADRParamSetupReq{ AdrAckLimitExponent: ttnpb.ADR_ACK_LIMIT_32768, AdrAckDelayExponent: ttnpb.ADR_ACK_DELAY_1024, }).MACCommand(), }, }, }, Expected: &ttnpb.EndDevice{ MacState: &ttnpb.MACState{ CurrentParameters: ttnpb.MACParameters{ AdrAckLimitExponent: &ttnpb.ADRAckLimitExponentValue{Value: ttnpb.ADR_ACK_LIMIT_32768}, AdrAckDelayExponent: &ttnpb.ADRAckDelayExponentValue{Value: ttnpb.ADR_ACK_DELAY_1024}, }, PendingRequests: []*ttnpb.MACCommand{}, }, }, Events: events.Builders{ EvtReceiveADRParamSetupAnswer, }, }, } { tc := tc test.RunSubtest(t, test.SubtestConfig{ Name: tc.Name, Parallel: true, Func: func(ctx context.Context, t *testing.T, a *assertions.Assertion) { dev := CopyEndDevice(tc.Device) evs, err := HandleADRParamSetupAns(ctx, dev) if tc.Error != nil && !a.So(err, should.EqualErrorOrDefinition, tc.Error) || tc.Error == nil && !a.So(err, should.BeNil) { t.FailNow() } a.So(dev, should.Resemble, tc.Expected) a.So(evs, should.ResembleEventBuilders, tc.Events) }, }) } }
explode_data.jsonl/38428
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 878 }
[ 2830, 3393, 6999, 96473, 2001, 21821, 69599, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 21297, 1797, 914, 198, 197, 197, 6985, 11, 31021, 353, 83, 1517, 16650, 18569, 6985, 198, 197, 197, 7900, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func Test_GetCSINodes(t *testing.T) { type checkFn func(*testing.T, *v1.CSINodeList, error) type connectFn func(*k8s.API) error type configFn func() (*rest.Config, error) check := func(fns ...checkFn) []checkFn { return fns } hasNoError := func(t *testing.T, nodes *v1.CSINodeList, err error) { if err != nil { t.Fatalf("expected no error") } } checkExpectedOutput := func(expectedOutput *v1.CSINodeList) func(t *testing.T, nodes *v1.CSINodeList, err error) { return func(t *testing.T, nodes *v1.CSINodeList, err error) { assert.Equal(t, expectedOutput, nodes) } } hasError := func(t *testing.T, nodes *v1.CSINodeList, err error) { if err == nil { t.Fatalf("expected error") } } tests := map[string]func(t *testing.T) (connectFn, configFn, []checkFn){ "success": func(*testing.T) (connectFn, configFn, []checkFn) { nodes := &v1.CSINodeList{ Items: []v1.CSINode{ { ObjectMeta: metav1.ObjectMeta{ Name: "csi-node-1", }, Spec: v1.CSINodeSpec{ Drivers: []v1.CSINodeDriver{ { Name: "csi-vxflexos.dellemc.com", NodeID: "node-1", }, }, }, }, }, } connect := func(api *k8s.API) error { api.Client = fake.NewSimpleClientset(nodes) return nil } return connect, nil, check(hasNoError, checkExpectedOutput(nodes)) }, "error connecting": func(*testing.T) (connectFn, configFn, []checkFn) { connect := func(api *k8s.API) error { return errors.New("error") } return connect, nil, check(hasError) }, "error getting a valid config": func(*testing.T) (connectFn, configFn, []checkFn) { inClusterConfig := func() (*rest.Config, error) { return nil, errors.New("error") } return nil, inClusterConfig, check(hasError) }, } for name, tc := range tests { t.Run(name, func(t *testing.T) { connectFn, inClusterConfig, checkFns := tc(t) k8sclient := k8s.API{} if connectFn != nil { oldConnectFn := k8s.ConnectFn defer func() { k8s.ConnectFn = oldConnectFn }() k8s.ConnectFn = connectFn } if inClusterConfig != nil { oldInClusterConfig := k8s.InClusterConfigFn defer func() { k8s.InClusterConfigFn = oldInClusterConfig }() k8s.InClusterConfigFn = inClusterConfig } nodes, err := k8sclient.GetCSINodes() for _, checkFn := range checkFns { checkFn(t, nodes, err) } }) } }
explode_data.jsonl/29787
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1108 }
[ 2830, 3393, 13614, 6412, 687, 2539, 1155, 353, 8840, 836, 8, 341, 13158, 1779, 24911, 2915, 4071, 8840, 836, 11, 353, 85, 16, 727, 50, 687, 534, 852, 11, 1465, 340, 13158, 4564, 24911, 2915, 4071, 74, 23, 82, 24922, 8, 1465, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIndexToID(t *testing.T) { assert.Equal(t, "000000000000000000000042", IndexToID(42)) }
explode_data.jsonl/69
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 35 }
[ 2830, 3393, 1552, 1249, 915, 1155, 353, 8840, 836, 8, 341, 6948, 12808, 1155, 11, 330, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 15, 19, 17, 497, 8008, 1249, 915, 7, 19, 17, 1171, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestMem(t *testing.T) { globalSessions, _ := NewManager("memory", `{"cookieName":"gosessionid","gclifetime":10}`) go globalSessions.GC() r, _ := http.NewRequest("GET", "/", nil) w := httptest.NewRecorder() sess := globalSessions.SessionStart(w, r) defer sess.SessionRelease(w) err := sess.Set("username", "astaxie") if err != nil { t.Fatal("set error,", err) } if username := sess.Get("username"); username != "astaxie" { t.Fatal("get username error") } if cookiestr := w.Header().Get("Set-Cookie"); cookiestr == "" { t.Fatal("setcookie error") } else { parts := strings.Split(strings.TrimSpace(cookiestr), ";") for k, v := range parts { nameval := strings.Split(v, "=") if k == 0 && nameval[0] != "gosessionid" { t.Fatal("error") } } } }
explode_data.jsonl/51553
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 322 }
[ 2830, 3393, 18816, 1155, 353, 8840, 836, 8, 341, 18842, 59062, 11, 716, 1669, 1532, 2043, 445, 17269, 497, 1565, 4913, 16236, 675, 3252, 34073, 1338, 307, 2198, 70, 564, 28515, 788, 16, 15, 27085, 30680, 3644, 59062, 1224, 34, 741, 70...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestValidatePublishFlags(t *testing.T) { assert := assert.New(t) r := PacketReader{protocol: 4} tests := []struct { flags PublishFlags valid bool }{ {0x0, true}, {0x1, true}, {0x2, true}, {0x3, true}, {0x4, true}, {0x5, true}, {0x6, false}, {0x7, false}, {0x8, true}, {0x9, true}, {0xa, true}, {0xb, true}, {0xc, true}, {0xd, true}, {0xe, false}, {0xf, false}, } for _, test := range tests { err := r.validatePublishFlags(test.flags) if test.valid { assert.NoError(err) } else { assert.Error(err) } } }
explode_data.jsonl/59724
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 288 }
[ 2830, 3393, 17926, 50145, 9195, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 7000, 1669, 28889, 5062, 90, 17014, 25, 220, 19, 630, 78216, 1669, 3056, 1235, 341, 197, 59516, 23499, 9195, 198, 197, 56322, 1807, 198, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_VoteListings(t *testing.T) { goldenVoteListings := getResponse(voteListings).(Listings) type want struct { wantErr bool containsErr string voteListings Listings } cases := []struct { name string inputHanler http.Handler want }{ { "handles RPC error", gtGoldenHTTPMock(voteListingsHandlerMock(readResponse(rpcerrors), blankHandler)), want{ true, "failed to get listings", Listings{}, }, }, { "failed to unmarshal", gtGoldenHTTPMock(voteListingsHandlerMock([]byte(`junk`), blankHandler)), want{ true, "failed to unmarshal listings", Listings{}, }, }, { "is successful", gtGoldenHTTPMock(voteListingsHandlerMock(readResponse(voteListings), blankHandler)), want{ false, "", goldenVoteListings, }, }, } for _, tt := range cases { t.Run(tt.name, func(t *testing.T) { server := httptest.NewServer(tt.inputHanler) defer server.Close() gt, err := New(server.URL) assert.Nil(t, err) voteListings, err := gt.VoteListings("BLzGD63HA4RP8Fh5xEtvdQSMKa2WzJMZjQPNVUc4Rqy8Lh5BEY1") checkErr(t, tt.wantErr, tt.containsErr, err) assert.Equal(t, tt.want.voteListings, voteListings) }) } }
explode_data.jsonl/48376
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 567 }
[ 2830, 3393, 2334, 1272, 852, 819, 1155, 353, 8840, 836, 8, 341, 3174, 813, 268, 41412, 852, 819, 1669, 633, 2582, 3747, 1272, 852, 819, 68615, 852, 819, 692, 13158, 1366, 2036, 341, 197, 50780, 7747, 414, 1807, 198, 197, 197, 13372, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDMap_Atomic_Decr(t *testing.T) { cluster := testcluster.New(NewService) s := cluster.AddMember(nil).(*Service) defer cluster.Shutdown() var wg sync.WaitGroup var start chan struct{} key := "decr" ctx := context.Background() decr := func(dm *DMap) { <-start defer wg.Done() _, err := dm.Decr(ctx, key, 1) if err != nil { s.log.V(2).Printf("[ERROR] Failed to call Decr: %v", err) return } } dm, err := s.NewDMap("atomic_test") require.NoError(t, err) start = make(chan struct{}) for i := 0; i < 100; i++ { wg.Add(1) go decr(dm) } close(start) wg.Wait() res, err := dm.Get(context.Background(), key) require.NoError(t, err) var value int err = resp.Scan(res.Value(), &value) require.NoError(t, err) require.Equal(t, -100, value) }
explode_data.jsonl/64502
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 347 }
[ 2830, 3393, 35, 2227, 55581, 3075, 78668, 81, 1155, 353, 8840, 836, 8, 341, 197, 18855, 1669, 1273, 18855, 7121, 35063, 1860, 340, 1903, 1669, 10652, 1904, 9366, 27907, 568, 4071, 1860, 340, 16867, 10652, 10849, 18452, 2822, 2405, 63581, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRollDPoS_Metrics(t *testing.T) { t.Parallel() ctrl := gomock.NewController(t) defer ctrl.Finish() candidates := make([]string, 5) for i := 0; i < len(candidates); i++ { candidates[i] = testAddrs[i].RawAddress } blockchain := mock_blockchain.NewMockBlockchain(ctrl) blockchain.EXPECT().TipHeight().Return(uint64(8)).Times(2) blockchain.EXPECT().CandidatesByHeight(gomock.Any()).Return([]*state.Candidate{ {Address: candidates[0]}, {Address: candidates[1]}, {Address: candidates[2]}, {Address: candidates[3]}, {Address: candidates[4]}, }, nil).AnyTimes() r, err := NewRollDPoSBuilder(). SetConfig(config.RollDPoS{NumDelegates: 4}). SetAddr(newTestAddr()). SetBlockchain(blockchain). SetActPool(mock_actpool.NewMockActPool(ctrl)). SetP2P(mock_network.NewMockOverlay(ctrl)). Build() require.NoError(t, err) require.NotNil(t, r) m, err := r.Metrics() require.NoError(t, err) assert.Equal(t, uint64(3), m.LatestEpoch) crypto.SortCandidates(candidates, m.LatestEpoch, r.ctx.epoch.seed) assert.Equal(t, candidates[:4], m.LatestDelegates) assert.Equal(t, candidates[1], m.LatestBlockProducer) assert.Equal(t, candidates, m.Candidates) }
explode_data.jsonl/48877
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 476 }
[ 2830, 3393, 32355, 10298, 72743, 1245, 13468, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 1444, 26222, 1669, 1281, 10556, 917, 11, 220, 20, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFilepathREST_Delete_NoFinalizers(t *testing.T) { f := newRESTFixture(t) defer f.tearDown() obj := &v1alpha1.Manifest{ ObjectMeta: metav1.ObjectMeta{ Name: "test-obj", }, } f.mustCreate(obj) w := f.watch("test-obj") defer w.Stop() // watch always immediately emits ADDED events for pre-existing objects, // so just ignore first event <-w.ResultChan() ctx, cancel := f.ctx() defer cancel() deletedObj, deletedImmediately, err := f.deleter().Delete(ctx, "test-obj", nil, nil) require.NoError(t, err) objMeta := f.mustMeta(deletedObj) assert.Equal(t, "test-obj", objMeta.GetName()) assert.Zero(t, objMeta.GetDeletionTimestamp()) assert.Nil(t, objMeta.GetDeletionGracePeriodSeconds()) assert.True(t, deletedImmediately) e := <-w.ResultChan() assert.Equal(t, watch.Deleted, e.Type) f.mustNotExist("test-obj") }
explode_data.jsonl/64540
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 336 }
[ 2830, 3393, 1703, 2343, 38307, 57418, 36989, 19357, 12230, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 38307, 18930, 1155, 340, 16867, 282, 31853, 59342, 2822, 22671, 1669, 609, 85, 16, 7141, 16, 72272, 515, 197, 23816, 12175, 25, 77...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMedianAvg(t *testing.T) { testsuit.TestSuit{ { Desc: "normal", Args: []float64{1.0, 3.0, 8.0, 2.0, 4.0}, WantResults: 3.0, }, { Desc: "null", Args: []float64{}, WantErr: "numbers is empty", }, }.Range(t, func(c *testsuit.TestCase) (interface{}, error) { numbers := c.Args.([]float64) return MedianAvg(numbers) }) }
explode_data.jsonl/14924
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 195 }
[ 2830, 3393, 79514, 39447, 1155, 353, 8840, 836, 8, 341, 18185, 72040, 8787, 62898, 515, 197, 197, 515, 298, 10957, 3300, 25, 286, 330, 8252, 756, 298, 197, 4117, 25, 286, 3056, 3649, 21, 19, 90, 16, 13, 15, 11, 220, 18, 13, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGreaterEqualUInt64(t *testing.T) { t.Parallel() match, err := path.GreaterEqual(&testType1{ UInt64: 5678, }, "uint64", "5677") require.Nil(t, err) require.True(t, match) match, err = path.GreaterEqual(&testType1{ UInt64: 5678, }, "uint64", "5678") require.Nil(t, err) require.True(t, match) match, err = path.GreaterEqual(&testType1{ UInt64: 5678, }, "uint64", "5679") require.Nil(t, err) require.False(t, match) }
explode_data.jsonl/78467
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 203 }
[ 2830, 3393, 41366, 2993, 18777, 21, 19, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 47706, 11, 1848, 1669, 1815, 1224, 28362, 2993, 2099, 1944, 929, 16, 515, 197, 87190, 21, 19, 25, 220, 20, 21, 22, 23, 345, 197, 2137, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEthKeysPresenter_RenderTable(t *testing.T) { t.Parallel() var ( address = "0x5431F5F973781809D18643b87B44921b11355d81" ethBalance = assets.NewEth(1) linkBalance = assets.NewLinkFromJuels(2) isFunding = true createdAt = time.Now() updatedAt = time.Now().Add(time.Second) maxGasPriceWei = utils.NewBigI(12345) bundleID = cltest.DefaultOCRKeyBundleID buffer = bytes.NewBufferString("") r = cmd.RendererTable{Writer: buffer} ) p := cmd.EthKeyPresenter{ ETHKeyResource: presenters.ETHKeyResource{ JAID: presenters.NewJAID(bundleID), Address: address, EthBalance: ethBalance, LinkBalance: linkBalance, IsFunding: isFunding, CreatedAt: createdAt, UpdatedAt: updatedAt, MaxGasPriceWei: *maxGasPriceWei, }, } // Render a single resource require.NoError(t, p.RenderTable(r)) output := buffer.String() assert.Contains(t, output, address) assert.Contains(t, output, ethBalance.String()) assert.Contains(t, output, linkBalance.String()) assert.Contains(t, output, strconv.FormatBool(isFunding)) assert.Contains(t, output, createdAt.String()) assert.Contains(t, output, updatedAt.String()) assert.Contains(t, output, maxGasPriceWei.String()) // Render many resources buffer.Reset() ps := cmd.EthKeyPresenters{p} require.NoError(t, ps.RenderTable(r)) output = buffer.String() assert.Contains(t, output, address) assert.Contains(t, output, ethBalance.String()) assert.Contains(t, output, linkBalance.String()) assert.Contains(t, output, strconv.FormatBool(isFunding)) assert.Contains(t, output, createdAt.String()) assert.Contains(t, output, updatedAt.String()) assert.Contains(t, output, maxGasPriceWei.String()) }
explode_data.jsonl/79042
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 730 }
[ 2830, 3393, 65390, 8850, 33849, 42102, 2556, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 2405, 2399, 197, 63202, 286, 284, 330, 15, 87, 20, 19, 18, 16, 37, 20, 37, 24, 22, 18, 22, 23, 16, 23, 15, 24, 35, 16, 23, 21,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDelete(t *testing.T) { storage, _, server := newStorage(t) defer server.Terminate(t) defer storage.Store.DestroyFunc() test := registrytest.New(t, storage.Store).AllowCreateOnUpdate() test.TestDelete(validService()) }
explode_data.jsonl/69211
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 79 }
[ 2830, 3393, 6435, 1155, 353, 8840, 836, 8, 341, 197, 16172, 11, 8358, 3538, 1669, 501, 5793, 1155, 340, 16867, 3538, 836, 261, 34016, 1155, 340, 16867, 5819, 38047, 57011, 9626, 741, 18185, 1669, 19424, 1944, 7121, 1155, 11, 5819, 38047...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestConfigDiff(t *testing.T) { testCases := []struct { s, t *serverConfig diff string }{ // 1 {&serverConfig{}, nil, "Given configuration is empty"}, // 2 { &serverConfig{Credential: auth.Credentials{"u1", "p1"}}, &serverConfig{Credential: auth.Credentials{"u1", "p2"}}, "Credential configuration differs", }, // 3 {&serverConfig{Region: "us-east-1"}, &serverConfig{Region: "us-west-1"}, "Region configuration differs"}, // 4 {&serverConfig{Browser: false}, &serverConfig{Browser: true}, "Browser configuration differs"}, // 5 {&serverConfig{Domain: "domain1"}, &serverConfig{Domain: "domain2"}, "Domain configuration differs"}, // 6 { &serverConfig{StorageClass: storageClassConfig{storageClass{"1", 8}, storageClass{"2", 6}}}, &serverConfig{StorageClass: storageClassConfig{storageClass{"1", 8}, storageClass{"2", 4}}}, "StorageClass configuration differs", }, // 7 { &serverConfig{Notify: notifier{AMQP: map[string]target.AMQPArgs{"1": {Enable: true}}}}, &serverConfig{Notify: notifier{AMQP: map[string]target.AMQPArgs{"1": {Enable: false}}}}, "AMQP Notification configuration differs", }, // 8 { &serverConfig{Notify: notifier{NATS: map[string]target.NATSArgs{"1": {Enable: true}}}}, &serverConfig{Notify: notifier{NATS: map[string]target.NATSArgs{"1": {Enable: false}}}}, "NATS Notification configuration differs", }, // 9 { &serverConfig{Notify: notifier{Elasticsearch: map[string]target.ElasticsearchArgs{"1": {Enable: true}}}}, &serverConfig{Notify: notifier{Elasticsearch: map[string]target.ElasticsearchArgs{"1": {Enable: false}}}}, "ElasticSearch Notification configuration differs", }, // 10 { &serverConfig{Notify: notifier{Redis: map[string]target.RedisArgs{"1": {Enable: true}}}}, &serverConfig{Notify: notifier{Redis: map[string]target.RedisArgs{"1": {Enable: false}}}}, "Redis Notification configuration differs", }, // 11 { &serverConfig{Notify: notifier{PostgreSQL: map[string]target.PostgreSQLArgs{"1": {Enable: true}}}}, &serverConfig{Notify: notifier{PostgreSQL: map[string]target.PostgreSQLArgs{"1": {Enable: false}}}}, "PostgreSQL Notification configuration differs", }, // 12 { &serverConfig{Notify: notifier{Kafka: map[string]target.KafkaArgs{"1": {Enable: true}}}}, &serverConfig{Notify: notifier{Kafka: map[string]target.KafkaArgs{"1": {Enable: false}}}}, "Kafka Notification configuration differs", }, // 13 { &serverConfig{Notify: notifier{Webhook: map[string]target.WebhookArgs{"1": {Enable: true}}}}, &serverConfig{Notify: notifier{Webhook: map[string]target.WebhookArgs{"1": {Enable: false}}}}, "Webhook Notification configuration differs", }, // 14 { &serverConfig{Notify: notifier{MySQL: map[string]target.MySQLArgs{"1": {Enable: true}}}}, &serverConfig{Notify: notifier{MySQL: map[string]target.MySQLArgs{"1": {Enable: false}}}}, "MySQL Notification configuration differs", }, // 15 { &serverConfig{Notify: notifier{MQTT: map[string]target.MQTTArgs{"1": {Enable: true}}}}, &serverConfig{Notify: notifier{MQTT: map[string]target.MQTTArgs{"1": {Enable: false}}}}, "MQTT Notification configuration differs", }, } for i, testCase := range testCases { got := testCase.s.ConfigDiff(testCase.t) if got != testCase.diff { t.Errorf("Test %d: got %s expected %s", i+1, got, testCase.diff) } } }
explode_data.jsonl/21515
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1269 }
[ 2830, 3393, 2648, 21751, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 1903, 11, 259, 353, 4030, 2648, 198, 197, 80564, 914, 198, 197, 59403, 197, 197, 322, 220, 16, 198, 197, 197, 90, 5, 4030, 2648, 22655,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_detectContentTypeSecretAndEventKey(t *testing.T) { t.Log("All required headers - should handle") { header := http.Header{ "X-Event-Key": {"repo:refs_changed"}, "Content-Type": {"application/json"}, "X-Hub-Signature": {"secret"}, } contentType, secret, eventKey, err := detectContentTypeSecretAndEventKey(header) require.NoError(t, err) require.Equal(t, "application/json", contentType) require.Equal(t, "repo:refs_changed", eventKey) require.Equal(t, "secret", secret) } t.Log("No signature header - should handle") { header := http.Header{ "X-Event-Key": {"repo:refs_changed"}, "Content-Type": {"application/json"}, } contentType, secret, eventKey, err := detectContentTypeSecretAndEventKey(header) require.NoError(t, err) require.Equal(t, "application/json", contentType) require.Equal(t, "repo:refs_changed", eventKey) require.Equal(t, "", secret) } t.Log("Missing X-Event-Key header") { header := http.Header{ "Content-Type": {"application/json"}, } contentType, secret, eventKey, err := detectContentTypeSecretAndEventKey(header) require.EqualError(t, err, "No X-Event-Key Header found") require.Equal(t, "", contentType) require.Equal(t, "", eventKey) require.Equal(t, "", secret) } t.Log("Missing Content-Type header") { header := http.Header{ "X-Event-Key": {"repo:refs_changed"}, } contentType, secret, eventKey, err := detectContentTypeSecretAndEventKey(header) require.EqualError(t, err, "No Content-Type Header found") require.Equal(t, "", contentType) require.Equal(t, "", eventKey) require.Equal(t, "", secret) } t.Log("Bitbucket Server UTF8 charset Content-Type header") { header := http.Header{ "Content-Type": {"application/json; charset=utf-8"}, "X-Event-Key": {"repo:refs_changed"}, } contentType, secret, eventKey, err := detectContentTypeSecretAndEventKey(header) require.NoError(t, err) require.Equal(t, "application/json; charset=utf-8", contentType) require.Equal(t, "repo:refs_changed", eventKey) require.Equal(t, "", secret) } }
explode_data.jsonl/62886
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 807 }
[ 2830, 3393, 56457, 29504, 19773, 3036, 1556, 1592, 1155, 353, 8840, 836, 8, 341, 3244, 5247, 445, 2403, 2567, 7102, 481, 1265, 3705, 1138, 197, 515, 197, 20883, 1669, 1758, 15753, 515, 298, 197, 1, 55, 12, 1556, 94321, 788, 257, 5212,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTranscodingProgress(t *testing.T) { var inputPath = "/tmp/ffmpeg/avi" var outputPath = "/tmp/ffmpeg/out/avi.mp4" trans := new(transcoder.Transcoder) err := trans.Initialize(inputPath, outputPath) assert.Nil(t, err) done := trans.Run(true) for val := range trans.Output() { if &val != nil { break } } err = <-done assert.Nil(t, err) }
explode_data.jsonl/57667
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 3167, 48367, 9496, 1155, 353, 8840, 836, 8, 1476, 2405, 1946, 1820, 284, 3521, 5173, 14, 72422, 14, 6190, 698, 2405, 95017, 284, 3521, 5173, 14, 72422, 48316, 14, 6190, 16870, 19, 1837, 72453, 1669, 501, 33089, 40170, 11815,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRPC_QuerySupportedChains(t *testing.T) { codec.UpgradeHeight = 7000 _, _, cleanup := NewInMemoryTendermintNode(t, oneValTwoNodeGenesisState()) _, stopCli, evtChan := subscribeTo(t, tmTypes.EventNewBlock) <-evtChan // Wait for block var params = HeightParams{ Height: 0, } q := newQueryRequest("supportedchains", newBody(params)) rec := httptest.NewRecorder() SupportedChains(rec, q, httprouter.Params{}) resp := getResponse(rec) assert.NotNil(t, resp) assert.NotEmpty(t, resp) assert.True(t, strings.Contains(resp, dummyChainsHash)) <-evtChan // Wait for block params = HeightParams{ Height: 2, } q = newQueryRequest("supportedchains", newBody(params)) rec = httptest.NewRecorder() SupportedChains(rec, q, httprouter.Params{}) resp = getResponse(rec) assert.NotNil(t, resp) assert.NotEmpty(t, resp) assert.True(t, strings.Contains(resp, dummyChainsHash)) cleanup() stopCli() }
explode_data.jsonl/44719
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 345 }
[ 2830, 3393, 29528, 48042, 34636, 1143, 1735, 1155, 353, 8840, 836, 8, 341, 43343, 66, 13, 43861, 3640, 284, 220, 22, 15, 15, 15, 198, 197, 6878, 8358, 21290, 1669, 1532, 641, 10642, 51, 1659, 67791, 1955, 1155, 11, 825, 2208, 11613, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFieldDateValue(t *testing.T) { f := newField("Name", "D", 8, 0) f.Offset = 3 recordBuf := []byte(" 20200923 ") d := time.Date(2020, 9, 23, 0, 0, 0, 0, time.UTC) v := f.dateValue(recordBuf) require.Equal(t, d, v) }
explode_data.jsonl/79451
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 109 }
[ 2830, 3393, 1877, 1916, 1130, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 1877, 445, 675, 497, 330, 35, 497, 220, 23, 11, 220, 15, 340, 1166, 61958, 284, 220, 18, 198, 71952, 15064, 1669, 3056, 3782, 445, 256, 220, 17, 15, 17, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnpackGetBindingRequest(t *testing.T) { instanceID := "i1234" bindingID := "b1234" unpackReq, err := unpackGetBindingRequest( createFakeGetBindingRequest(instanceID, bindingID), map[string]string{ "instance_id": instanceID, "binding_id": bindingID, }, ) if err != nil { t.Fatalf("Unpacking get binding request: %v", err) } if unpackReq.InstanceID != instanceID { t.Fatalf("InstanceID was unpacked unsuccessfully. Expecting %s got %s", instanceID, unpackReq.InstanceID) } if unpackReq.BindingID != bindingID { t.Fatalf("BindingID was unpacked unsuccessfully. Expecting %s got %s", bindingID, unpackReq.BindingID) } }
explode_data.jsonl/79739
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 245 }
[ 2830, 3393, 1806, 4748, 1949, 15059, 1900, 1155, 353, 8840, 836, 8, 341, 56256, 915, 1669, 330, 72, 16, 17, 18, 19, 698, 2233, 3961, 915, 1669, 330, 65, 16, 17, 18, 19, 1837, 20479, 4748, 27234, 11, 1848, 1669, 31065, 1949, 15059, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func Test_StoreAll(t *testing.T) { store, mock, close_ := store(t) defer close_() tests := []testQuery{ { "SELECT * FROM variables", []query.Option{}, sqlmock.NewRows(variableCols), []driver.Value{}, []database.Model{}, }, { "SELECT * FROM variables WHERE (namespace_id IN (SELECT id FROM namespaces WHERE (root_id IN (SELECT namespace_id FROM namespace_collaborators WHERE (user_id = $1) UNION SELECT id FROM namespaces WHERE (user_id = $2)))) OR user_id = $3)", []query.Option{}, sqlmock.NewRows(variableCols), []driver.Value{1, 1, 1}, []database.Model{userModel}, }, { "SELECT * FROM variables WHERE (namespace_id = $1)", []query.Option{}, sqlmock.NewRows(variableCols), []driver.Value{1}, []database.Model{namespaceModel}, }, } for i, test := range tests { mock.ExpectQuery(regexp.QuoteMeta(test.query)).WithArgs(test.args...).WillReturnRows(test.rows) store.Bind(test.models...) if _, err := store.All(test.opts...); err != nil { t.Errorf("tests[%d] - %s\n", i, errors.Cause(err)) } store.User = nil store.Namespace = nil } }
explode_data.jsonl/29313
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 453 }
[ 2830, 3393, 92684, 2403, 1155, 353, 8840, 836, 8, 341, 57279, 11, 7860, 11, 3265, 62, 1669, 3553, 1155, 340, 16867, 3265, 62, 2822, 78216, 1669, 3056, 1944, 2859, 515, 197, 197, 515, 298, 197, 1, 4858, 353, 4295, 7332, 756, 298, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestWorkloadLedger(t *testing.T) { templates, err := testutils.LoadTemplates("test-data-alltemplates.yaml") if err != nil || len(templates) < 1 { t.Errorf("cannot load test templates! %v", err) return } ld := NewJSONLedger("workload") summaries := ld.Summarize(templates) expected := []Summary{ Summary{ID: "generic"}, Summary{ID: "highperformance"}, } checkSummaries(t, summaries, expected) }
explode_data.jsonl/45683
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 155 }
[ 2830, 3393, 6776, 1078, 60850, 1389, 1155, 353, 8840, 836, 8, 341, 197, 15463, 11, 1848, 1669, 1273, 6031, 13969, 51195, 445, 1944, 13945, 22346, 15463, 33406, 1138, 743, 1848, 961, 2092, 1369, 2422, 7, 15463, 8, 366, 220, 16, 341, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCreatetHelp(t *testing.T) { test := setupTest(t, newCreateCommand) test.client.Help() require.Equal(t, `Usage of federation create: -bundleEndpointProfile string Endpoint profile type (either "https_web" or "https_spiffe") -bundleEndpointURL string URL of the SPIFFE bundle endpoint that provides the trust bundle (must use the HTTPS protocol) -data string Path to a file containing federation relationships in JSON format (optional). If set to '-', read the JSON from stdin. -endpointSpiffeID string SPIFFE ID of the SPIFFE bundle endpoint server. Only used for 'spiffe' profile.`+common.AddrUsage+ ` -trustDomain string Name of the trust domain to federate with (e.g., example.org) -trustDomainBundleFormat string The format of the bundle data (optional). Either "pem" or "spiffe". (default "pem") -trustDomainBundlePath string Path to the trust domain bundle data (optional). `, test.stderr.String()) }
explode_data.jsonl/17474
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 309 }
[ 2830, 3393, 30744, 295, 12689, 1155, 353, 8840, 836, 8, 341, 18185, 1669, 6505, 2271, 1155, 11, 501, 4021, 4062, 340, 18185, 6581, 70882, 2822, 17957, 12808, 1155, 11, 1565, 14783, 315, 79275, 1855, 510, 220, 481, 34518, 27380, 8526, 91...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeviceTypeFailure(t *testing.T) { var ociSpec specs.Spec invalidDeviceType := "f" ociSpec.Linux = &specs.Linux{} ociSpec.Linux.Devices = []specs.LinuxDevice{ { Path: "/dev/vfio", Type: invalidDeviceType, }, } _, err := containerDeviceInfos(ociSpec) assert.NotNil(t, err, "This test should fail as device type [%s] is invalid ", invalidDeviceType) }
explode_data.jsonl/44048
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 144 }
[ 2830, 3393, 6985, 929, 17507, 1155, 353, 8840, 836, 8, 341, 2405, 93975, 8327, 32247, 36473, 271, 197, 11808, 6985, 929, 1669, 330, 69, 698, 197, 2119, 8327, 1214, 19559, 284, 609, 94531, 1214, 19559, 16094, 197, 2119, 8327, 1214, 19559...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDPT_251600(t *testing.T) { var buf []byte var dst DPT_251600 sources := []DPT_251600{ {Red: 255, Green: 96, Blue: 0, White: 18, RedValid: true, GreenValid: true, BlueValid: true, WhiteValid: true}, {Red: 255, Green: 96, Blue: 0, White: 18, RedValid: false, GreenValid: false, BlueValid: false, WhiteValid: false}, {Red: 255, Green: 96, Blue: 0, White: 18, RedValid: false, GreenValid: true, BlueValid: true, WhiteValid: true}, {Red: 255, Green: 96, Blue: 0, White: 18, RedValid: true, GreenValid: false, BlueValid: true, WhiteValid: true}, {Red: 255, Green: 96, Blue: 0, White: 18, RedValid: true, GreenValid: true, BlueValid: false, WhiteValid: true}, {Red: 255, Green: 96, Blue: 0, White: 18, RedValid: true, GreenValid: true, BlueValid: true, WhiteValid: false}, } for _, src := range sources { buf = src.Pack() _ = dst.Unpack(buf) if !reflect.DeepEqual(src, dst) { fmt.Printf("%+v\n", src) fmt.Printf("%+v\n", dst) t.Errorf("Value \"%s\" after pack/unpack for DPT_251600 differs. Original value was \"%v\"!", dst, src) } } }
explode_data.jsonl/15939
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 416 }
[ 2830, 3393, 35, 2828, 62, 17, 20, 16, 21, 15, 15, 1155, 353, 8840, 836, 8, 341, 2405, 6607, 3056, 3782, 198, 2405, 10648, 422, 2828, 62, 17, 20, 16, 21, 15, 15, 198, 1903, 2360, 1669, 3056, 35, 2828, 62, 17, 20, 16, 21, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetOrgUsersByOrgID(t *testing.T) { assert.NoError(t, PrepareTestDatabase()) orgUsers, err := GetOrgUsersByOrgID(&FindOrgMembersOpts{ ListOptions: ListOptions{}, OrgID: 3, PublicOnly: false, }) assert.NoError(t, err) if assert.Len(t, orgUsers, 3) { assert.Equal(t, OrgUser{ ID: orgUsers[0].ID, OrgID: 3, UID: 2, IsPublic: true}, *orgUsers[0]) assert.Equal(t, OrgUser{ ID: orgUsers[1].ID, OrgID: 3, UID: 4, IsPublic: false}, *orgUsers[1]) } orgUsers, err = GetOrgUsersByOrgID(&FindOrgMembersOpts{ ListOptions: ListOptions{}, OrgID: NonexistentID, PublicOnly: false, }) assert.NoError(t, err) assert.Len(t, orgUsers, 0) }
explode_data.jsonl/71068
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 345 }
[ 2830, 3393, 1949, 42437, 7137, 1359, 42437, 915, 1155, 353, 8840, 836, 8, 341, 6948, 35699, 1155, 11, 31166, 2271, 5988, 12367, 87625, 7137, 11, 1848, 1669, 2126, 42437, 7137, 1359, 42437, 915, 2099, 9885, 42437, 24371, 43451, 515, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestInclusionAndConsistencyProofs(t *testing.T) { tree, err := Open("ahtree_test", DefaultOptions().WithSynced(false)) require.NoError(t, err) defer os.RemoveAll("ahtree_test") N := 1024 for i := 1; i <= N; i++ { _, r, err := tree.Append([]byte{byte(i)}) require.NoError(t, err) iproof, err := tree.InclusionProof(uint64(i), uint64(i)) require.NoError(t, err) h := sha256.Sum256([]byte{LeafPrefix, byte(i)}) verifies := VerifyInclusion(iproof, uint64(i), uint64(i), h, r) require.True(t, verifies) } _, err = tree.InclusionProof(2, 1) require.Equal(t, ErrIllegalArguments, err) _, err = tree.ConsistencyProof(2, 1) require.Equal(t, ErrIllegalArguments, err) for i := 1; i <= N; i++ { for j := i; j <= N; j++ { iproof, err := tree.InclusionProof(uint64(i), uint64(j)) require.NoError(t, err) jroot, err := tree.RootAt(uint64(j)) require.NoError(t, err) h := sha256.Sum256([]byte{LeafPrefix, byte(i)}) verifies := VerifyInclusion(iproof, uint64(i), uint64(j), h, jroot) require.True(t, verifies) cproof, err := tree.ConsistencyProof(uint64(i), uint64(j)) require.NoError(t, err) iroot, err := tree.RootAt(uint64(i)) require.NoError(t, err) verifies = VerifyConsistency(cproof, uint64(i), uint64(j), iroot, jroot) require.True(t, verifies) } } for i := 1; i <= N; i++ { iproof, err := tree.InclusionProof(uint64(i), uint64(N)) require.NoError(t, err) h := sha256.Sum256([]byte{LeafPrefix, byte(i)}) root, err := tree.RootAt(uint64(i)) require.NoError(t, err) verifies := VerifyLastInclusion(iproof, uint64(i), h, root) if i < N { require.False(t, verifies) } else { require.True(t, verifies) } } err = tree.Close() require.NoError(t, err) }
explode_data.jsonl/49663
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 778 }
[ 2830, 3393, 641, 8957, 3036, 15220, 47094, 31076, 82, 1155, 353, 8840, 836, 8, 341, 51968, 11, 1848, 1669, 5264, 445, 64, 426, 765, 4452, 497, 7899, 3798, 1005, 2354, 12154, 291, 3576, 1171, 17957, 35699, 1155, 11, 1848, 340, 16867, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestValidateGraph_Success(t *testing.T) { tests := []struct { name string tasks []PipelineTask }{{ name: "valid dependency graph with multiple tasks", tasks: []PipelineTask{{ Name: "foo", TaskRef: &TaskRef{Name: "foo-task"}, }, { Name: "bar", TaskRef: &TaskRef{Name: "bar-task"}, }, { Name: "foo1", TaskRef: &TaskRef{Name: "foo-task"}, RunAfter: []string{"foo"}, }, { Name: "bar1", TaskRef: &TaskRef{Name: "bar-task"}, RunAfter: []string{"bar"}, }, { Name: "foo-bar", TaskRef: &TaskRef{Name: "bar-task"}, RunAfter: []string{"foo1", "bar1"}, }}, }} for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { err := validateGraph(tt.tasks) if err != nil { t.Errorf("Pipeline.validateGraph() returned error: %v", err) } }) } }
explode_data.jsonl/26531
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 337 }
[ 2830, 3393, 17926, 11212, 87161, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 220, 914, 198, 197, 3244, 4604, 3056, 34656, 6262, 198, 197, 15170, 515, 197, 11609, 25, 330, 1891, 24036, 4771, 448, 5248, 9079, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTokenFromQueryString(t *testing.T) { // the middleware to test authMiddleware, _ := New(&GinJWTMiddleware{ Realm: "test zone", Key: key, Timeout: time.Hour, Authenticator: defaultAuthenticator, Unauthorized: func(c *gin.Context, code int, message string) { c.String(code, message) }, TokenLookup: "query:token", }) handler := ginHandler(authMiddleware) r := gofight.New() userToken, _, _ := authMiddleware.TokenGenerator(MapClaims{ "identity": "admin", }) r.GET("/auth/refresh_token"). SetHeader(gofight.H{ "Authorization": "Bearer " + userToken, }). Run(handler, func(r gofight.HTTPResponse, rq gofight.HTTPRequest) { assert.Equal(t, http.StatusUnauthorized, r.Code) }) r.GET("/auth/refresh_token?token="+userToken). SetHeader(gofight.H{ "Authorization": "Bearer " + userToken, }). Run(handler, func(r gofight.HTTPResponse, rq gofight.HTTPRequest) { assert.Equal(t, http.StatusOK, r.Code) }) }
explode_data.jsonl/64447
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 417 }
[ 2830, 3393, 3323, 3830, 67001, 1155, 353, 8840, 836, 8, 341, 197, 322, 279, 29679, 311, 1273, 198, 78011, 24684, 11, 716, 1669, 1532, 2099, 38, 258, 55172, 24684, 515, 197, 197, 64290, 25, 260, 330, 1944, 10143, 756, 197, 55242, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReadPartitionContents(t *testing.T) { table := GetValidTable() maxPart := len(table.Partitions) request := maxPart - 1 var b bytes.Buffer writer := bufio.NewWriter(&b) size := 100 b2 := make([]byte, size, size) rand.Read(b2) f := &testhelper.FileImpl{ Reader: func(b []byte, offset int64) (int, error) { copy(b, b2) return size, io.EOF }, } read, err := table.Partitions[request].ReadContents(f, writer) if read != int64(size) { t.Errorf("Returned %d bytes read instead of %d", read, size) } if err != nil { t.Errorf("Error was not nil") } writer.Flush() if bytes.Compare(b.Bytes(), b2) != 0 { t.Errorf("Mismatched bytes data") t.Log(b.Bytes()) t.Log(b2) } }
explode_data.jsonl/61030
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 295 }
[ 2830, 3393, 4418, 49978, 14803, 1155, 353, 8840, 836, 8, 341, 26481, 1669, 2126, 4088, 2556, 741, 22543, 5800, 1669, 2422, 15761, 52250, 5930, 340, 23555, 1669, 1932, 5800, 481, 220, 16, 198, 2405, 293, 5820, 22622, 198, 38959, 1669, 96...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHgGetName(t *testing.T) { repo, client := helpers.CreateHgRepo(t, "hg-repo") defer helpers.CleanupHgRepo(t, client) assert.Equal(t, "hg-repo", repo.GetName()) }
explode_data.jsonl/57183
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 78 }
[ 2830, 3393, 39, 70, 49403, 1155, 353, 8840, 836, 8, 341, 17200, 5368, 11, 2943, 1669, 30187, 7251, 39, 70, 25243, 1155, 11, 330, 66602, 5504, 5368, 1138, 16867, 30187, 727, 60639, 39, 70, 25243, 1155, 11, 2943, 692, 6948, 12808, 1155,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestIsFuzzyMatch(t *testing.T) { isFuzzyMatchTestcase(t, `1`, `2`, false) isFuzzyMatchTestcase(t, `"hello"`, `"hi there"`, false) isFuzzyMatchTestcase(t, `true`, `false`, false) isFuzzyMatchTestcase(t, `1`, `1`, true) isFuzzyMatchTestcase(t, `true`, `true`, true) isFuzzyMatchTestcase(t, `[]`, `true`, false) isFuzzyMatchTestcase(t, `[1]`, `[1,2]`, false) isFuzzyMatchTestcase(t, `[1]`, `[true]`, false) isFuzzyMatchTestcase(t, `{}`, `{}`, true) isFuzzyMatchTestcase(t, `{"zip":1}`, `{}`, false) isFuzzyMatchTestcase(t, `{"zip":1}`, `{"zip":1,"zap":2}`, false) isFuzzyMatchTestcase(t, `{"zip":true}`, `{"zip":false}`, false) isFuzzyMatchTestcase(t, `{"zip":true}`, `{"zap":true}`, false) isFuzzyMatchTestcase(t, `{"zip":["a"]}`, `{"zip":["a"]}`, true) isFuzzyMatchTestcase(t, `null`, `null`, true) isFuzzyMatchTestcase(t, `null`, `1`, false) isFuzzyMatchTestcase(t, `{"a":1,"b":2}`, `{"b":2,"a":1}`, true) isFuzzyMatchTestcase(t, `123`, `"?? description"`, true) isFuzzyMatchTestcase(t, `"123"`, `"?? description"`, true) isFuzzyMatchTestcase(t, `"\/[abc]\/"`, `"a"`, true) isFuzzyMatchTestcase(t, `"a"`, `"\/[abc]\/"`, true) }
explode_data.jsonl/57168
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 539 }
[ 2830, 3393, 3872, 37, 34758, 8331, 1155, 353, 8840, 836, 8, 341, 19907, 37, 34758, 8331, 2271, 5638, 1155, 11, 1565, 16, 7808, 1565, 17, 7808, 895, 340, 19907, 37, 34758, 8331, 2271, 5638, 1155, 11, 53305, 14990, 1, 7808, 53305, 6023,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewObjectPool(t *testing.T) { cfg := testcfg.Build(t) locator := config.NewLocator(cfg) _, err := NewObjectPool(cfg, locator, nil, nil, cfg.Storages[0].Name, gittest.NewObjectPoolName(t)) require.NoError(t, err) _, err = NewObjectPool(cfg, locator, nil, nil, "mepmep", gittest.NewObjectPoolName(t)) require.Error(t, err, "creating pool in storage that does not exist should fail") }
explode_data.jsonl/44263
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 150 }
[ 2830, 3393, 3564, 1190, 10551, 1155, 353, 8840, 836, 8, 341, 50286, 1669, 1273, 14072, 25212, 1155, 692, 197, 68033, 1669, 2193, 7121, 33831, 28272, 692, 197, 6878, 1848, 1669, 1532, 1190, 10551, 28272, 11, 47117, 11, 2092, 11, 2092, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGitGetter_setupGitEnv_sshKey(t *testing.T) { if runtime.GOOS == "windows" { t.Skipf("skipping on windows since the test requires sh") return } cmd := exec.Command("/bin/sh", "-c", "echo $GIT_SSH_COMMAND") setupGitEnv(cmd, "/tmp/foo.pem") out, err := cmd.Output() if err != nil { t.Fatal(err) } actual := strings.TrimSpace(string(out)) if actual != "ssh -i /tmp/foo.pem" { t.Fatalf("unexpected GIT_SSH_COMMAND: %q", actual) } }
explode_data.jsonl/39701
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 199 }
[ 2830, 3393, 46562, 31485, 21363, 46562, 14359, 82805, 1592, 1155, 353, 8840, 836, 8, 341, 743, 15592, 97574, 3126, 621, 330, 27077, 1, 341, 197, 3244, 57776, 69, 445, 4886, 5654, 389, 11030, 2474, 279, 1273, 7460, 557, 1138, 197, 853, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestValidateMACAddress(t *testing.T) { if _, err := ValidateMACAddress(`92:d0:c6:0a:29:33`); err != nil { t.Fatalf("ValidateMACAddress(`92:d0:c6:0a:29:33`) got %s", err) } if _, err := ValidateMACAddress(`92:d0:c6:0a:33`); err == nil { t.Fatalf("ValidateMACAddress(`92:d0:c6:0a:33`) succeeded; expected failure on invalid MAC") } if _, err := ValidateMACAddress(`random invalid string`); err == nil { t.Fatalf("ValidateMACAddress(`random invalid string`) succeeded; expected failure on invalid MAC") } }
explode_data.jsonl/26386
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 210 }
[ 2830, 3393, 17926, 25788, 4286, 1155, 353, 8840, 836, 8, 341, 743, 8358, 1848, 1669, 23282, 25788, 4286, 5809, 24, 17, 40422, 15, 48031, 21, 25, 15, 64, 25, 17, 24, 25, 18, 18, 63, 1215, 1848, 961, 2092, 341, 197, 3244, 30762, 445...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestRequest_SkillID(t *testing.T) { tests := []struct { name string request *alice.Request want string }{ { name: "", request: getReq(0), want: "e03f8d5b-35ef-4d57-9450-b721ca17a6c3", }, { name: "", request: getReq(1), want: "e03f8d5b-35ef-4d57-9450-b721ca17a6c3", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { req := tt.request if got := req.SkillID(); got != tt.want { t.Errorf("Request.SkillID() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/18233
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 291 }
[ 2830, 3393, 1900, 1098, 10851, 915, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 23555, 353, 63195, 9659, 198, 197, 50780, 262, 914, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 262, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTopicRackHelpers(t *testing.T) { testBrokers := []BrokerInfo{ { ID: 1, Rack: "rack1", }, { ID: 2, Rack: "rack2", }, { ID: 3, Rack: "rack1", }, { ID: 4, Rack: "rack2", }, { ID: 5, Rack: "rack3", }, } testTopic := TopicInfo{ Config: map[string]string{ "key": "value", "retention.ms": "36000000", }, Partitions: []PartitionInfo{ { Topic: "topic1", ID: 0, Leader: 1, Replicas: []int{1, 2, 5}, ISR: []int{1, 2}, }, { Topic: "topic1", ID: 1, Leader: 2, Replicas: []int{2, 4}, ISR: []int{1, 2}, }, { Topic: "topic1", ID: 2, Leader: 3, Replicas: []int{3, 5}, ISR: []int{1, 2}, }, }, } assert.Equal(t, 10*time.Hour, testTopic.Retention()) assert.Equal(t, 3, testTopic.MaxReplication()) assert.Equal(t, 2, testTopic.MaxISR()) assert.Equal(t, 3, MaxReplication([]TopicInfo{testTopic})) assert.True(t, HasLeaders([]TopicInfo{testTopic})) assert.Equal(t, []int{0, 1, 2}, testTopic.PartitionIDs()) brokerRacks := BrokerRacks(testBrokers) minRacks, maxRacks, err := testTopic.RackCounts(brokerRacks) assert.NoError(t, err) assert.Equal(t, minRacks, 1) assert.Equal(t, maxRacks, 3) numRacks, err := testTopic.Partitions[0].NumRacks(brokerRacks) assert.NoError(t, err) assert.Equal(t, 3, numRacks) racks, err := testTopic.Partitions[0].Racks(brokerRacks) assert.NoError(t, err) assert.Equal(t, []string{"rack1", "rack2", "rack3"}, racks) }
explode_data.jsonl/70029
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 824 }
[ 2830, 3393, 26406, 49, 473, 28430, 1155, 353, 8840, 836, 8, 341, 18185, 26272, 26177, 1669, 3056, 65545, 1731, 515, 197, 197, 515, 298, 29580, 25, 256, 220, 16, 345, 298, 11143, 473, 25, 330, 71685, 16, 756, 197, 197, 1583, 197, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAccDatabricksDBFSFile_CreateViaSource(t *testing.T) { content := acctest.RandString(10) source := qa.TestCreateTempFile(t, content) defer os.Remove(source) b64, err := GetLocalFileB64(source) assert.NoError(t, err, err) md5, err := GetMD5(b64) assert.NoError(t, err, err) content2 := acctest.RandString(10) source2 := qa.TestCreateTempFile(t, content2) defer os.Remove(source2) source2B64, err := GetLocalFileB64(source2) assert.NoError(t, err, err) source2Md5, err := GetMD5(source2B64) assert.NoError(t, err, err) acceptance.AccTest(t, resource.TestCase{ CheckDestroy: testDBFSFileResourceDestroy, Steps: []resource.TestStep{ { Config: qa.EnvironmentTemplate(t, ` resource "databricks_dbfs_file" "file_1" { source = "{var.SOURCE}" content_b64_md5 = md5(filebase64("{var.SOURCE}")) path = "/tmp/tf-test/file-source-{var.RANDOM}" overwrite = "false" mkdirs = "true" validate_remote_file = "true" }`, map[string]string{ "SOURCE": source, }), Check: resource.ComposeTestCheckFunc( // query the API to retrieve the tokenInfo object testCheckDBFSFileResourceExists("databricks_dbfs_file.file_1", b64, t), resource.TestCheckResourceAttr("databricks_dbfs_file.file_1", "content_b64_md5", md5), ), Destroy: false, }, { Config: qa.EnvironmentTemplate(t, ` resource "databricks_dbfs_file" "file_1" { source = "{var.SOURCE}" content_b64_md5 = md5(filebase64("{var.SOURCE}")) path = "/tmp/tf-test/file-source-{var.RANDOM}" overwrite = "false" mkdirs = "true" validate_remote_file = "true" }`, map[string]string{ "SOURCE": source2, }), Check: resource.ComposeTestCheckFunc( // query the API to retrieve the tokenInfo object testCheckDBFSFileResourceExists("databricks_dbfs_file.file_1", source2B64, t), resource.TestCheckResourceAttr("databricks_dbfs_file.file_1", "content_b64_md5", source2Md5), ), Destroy: false, }, }, }) }
explode_data.jsonl/58533
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 886 }
[ 2830, 3393, 14603, 35, 2096, 77789, 3506, 8485, 1703, 34325, 54428, 3608, 1155, 353, 8840, 836, 8, 341, 27751, 1669, 1613, 67880, 2013, 437, 703, 7, 16, 15, 340, 47418, 1669, 88496, 8787, 4021, 12151, 1703, 1155, 11, 2213, 340, 16867, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRateLimitingQueue(t *testing.T) { limiter := NewItemExponentialFailureRateLimiter(1*time.Millisecond, 1*time.Second) queue := NewRateLimitingQueue(limiter).(*rateLimitingType) fakeClock := clock.NewFakeClock(time.Now()) delayingQueue := &delayingType{ Interface: New(), clock: fakeClock, heartbeat: fakeClock.NewTicker(maxWait), stopCh: make(chan struct{}), waitingForAddCh: make(chan *waitFor, 1000), metrics: newRetryMetrics(""), } queue.DelayingInterface = delayingQueue queue.AddRateLimited("one") waitEntry := <-delayingQueue.waitingForAddCh if e, a := 1*time.Millisecond, waitEntry.readyAt.Sub(fakeClock.Now()); e != a { t.Errorf("expected %v, got %v", e, a) } queue.AddRateLimited("one") waitEntry = <-delayingQueue.waitingForAddCh if e, a := 2*time.Millisecond, waitEntry.readyAt.Sub(fakeClock.Now()); e != a { t.Errorf("expected %v, got %v", e, a) } if e, a := 2, queue.NumRequeues("one"); e != a { t.Errorf("expected %v, got %v", e, a) } queue.AddRateLimited("two") waitEntry = <-delayingQueue.waitingForAddCh if e, a := 1*time.Millisecond, waitEntry.readyAt.Sub(fakeClock.Now()); e != a { t.Errorf("expected %v, got %v", e, a) } queue.AddRateLimited("two") waitEntry = <-delayingQueue.waitingForAddCh if e, a := 2*time.Millisecond, waitEntry.readyAt.Sub(fakeClock.Now()); e != a { t.Errorf("expected %v, got %v", e, a) } queue.Forget("one") if e, a := 0, queue.NumRequeues("one"); e != a { t.Errorf("expected %v, got %v", e, a) } queue.AddRateLimited("one") waitEntry = <-delayingQueue.waitingForAddCh if e, a := 1*time.Millisecond, waitEntry.readyAt.Sub(fakeClock.Now()); e != a { t.Errorf("expected %v, got %v", e, a) } }
explode_data.jsonl/37567
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 716 }
[ 2830, 3393, 11564, 16527, 287, 7554, 1155, 353, 8840, 836, 8, 341, 197, 4659, 2015, 1669, 1532, 1234, 840, 59825, 17507, 11564, 43, 17700, 7, 16, 77053, 71482, 11, 220, 16, 77053, 32435, 340, 46993, 1669, 1532, 11564, 16527, 287, 7554, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestPrintf(t *testing.T) { t.Run("enabled output", func(t *testing.T) { pterm.Output = true for _, randomString := range internal.RandomStrings { out := captureStdout(func(w io.Writer) { pterm.Printf(randomString) }) testza.AssertEqual(t, randomString, out) } out := captureStdout(func(w io.Writer) { pterm.Printf("Hello, %s!", "World") }) testza.AssertEqual(t, "Hello, World!", out) }) t.Run("disabled output", func(t *testing.T) { pterm.Output = false for _, randomString := range internal.RandomStrings { out := captureStdout(func(w io.Writer) { pterm.Printf(randomString) }) testza.AssertEqual(t, "", out) } out := captureStdout(func(w io.Writer) { pterm.Printf("Hello, %s!", "World") }) testza.AssertEqual(t, "", out) pterm.Output = true }) }
explode_data.jsonl/49135
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 353 }
[ 2830, 3393, 42736, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 15868, 2550, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 60796, 4195, 34246, 284, 830, 198, 197, 2023, 8358, 4194, 703, 1669, 2088, 5306, 26709, 20859, 341, 298, 139...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTransmuxer_Join(t *testing.T) { run, dir := setupTest(t) defer os.RemoveAll(dir) cmd := ` # run segmenter and sanity check frame counts . Hardcode for now. ffmpeg -loglevel warning -i "$1"/../transcoder/test.ts -c:a copy -c:v copy -f hls test.m3u8 ffprobe -loglevel warning -select_streams v -count_frames -show_streams test0.ts | grep nb_read_frames=120 ffprobe -loglevel warning -select_streams v -count_frames -show_streams test1.ts | grep nb_read_frames=120 ffprobe -loglevel warning -select_streams v -count_frames -show_streams test2.ts | grep nb_read_frames=120 ffprobe -loglevel warning -select_streams v -count_frames -show_streams test3.ts | grep nb_read_frames=120 ` run(cmd) tc := NewTranscoder() out := []TranscodeOptions{ { Oname: fmt.Sprintf("%s/out.mp4", dir), VideoEncoder: ComponentOptions{ Name: "copy", }, AudioEncoder: ComponentOptions{ Name: "copy", }, Profile: VideoProfile{Format: FormatNone}, Muxer: ComponentOptions{ Name: "mp4", Opts: map[string]string{"movflags": "frag_keyframe+negative_cts_offsets+omit_tfhd_offset+disable_chpl+default_base_moof"}, }, }, } for i := 0; i < 4; i++ { in := &TranscodeOptionsIn{ Fname: fmt.Sprintf("%s/test%d.ts", dir, i), Transmuxing: true, } res, err := tc.Transcode(in, out) if err != nil { t.Fatal(err) } if res.Decoded.Frames != 120 { t.Error(in.Fname, " Mismatched frame count: expected 120 got ", res.Decoded.Frames) } } tc.StopTranscoder() cmd = ` ffprobe -loglevel warning -select_streams v -count_frames -show_streams out.mp4 | grep nb_read_frames=480 ` run(cmd) }
explode_data.jsonl/14963
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 704 }
[ 2830, 3393, 3167, 75066, 261, 10598, 1961, 1155, 353, 8840, 836, 8, 341, 56742, 11, 5419, 1669, 6505, 2271, 1155, 340, 16867, 2643, 84427, 14161, 340, 25920, 1669, 22074, 262, 671, 1598, 10238, 261, 323, 46842, 1779, 4034, 14579, 659, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestHypervisorDefaultsVhostUserStorePath(t *testing.T) { assert := assert.New(t) h := hypervisor{} vhostUserStorePath := h.vhostUserStorePath() assert.Equal(vhostUserStorePath, defaultVhostUserStorePath, "default vhost-user store path wrong") testVhostUserStorePath := "/test/vhost/user/store/path" h = hypervisor{ VhostUserStorePath: testVhostUserStorePath, } vhostUserStorePath = h.vhostUserStorePath() assert.Equal(vhostUserStorePath, testVhostUserStorePath, "custom vhost-user store path wrong") }
explode_data.jsonl/11738
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 178 }
[ 2830, 3393, 39, 1082, 31396, 16273, 53, 3790, 1474, 6093, 1820, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 9598, 1669, 9751, 31396, 16094, 5195, 3790, 1474, 6093, 1820, 1669, 305, 3133, 3790, 1474, 6093, 1820, 741,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetSignatureVerificationResult(t *testing.T) { // Commit with signature and verification requested { service := newServiceWithSignature("../..") src := argoappv1.ApplicationSource{Path: "manifests/base"} q := apiclient.ManifestRequest{Repo: &argoappv1.Repository{}, ApplicationSource: &src, VerifySignature: true} res, err := service.GenerateManifest(context.Background(), &q) assert.NoError(t, err) assert.Equal(t, testSignature, res.VerifyResult) } // Commit with signature and verification not requested { service := newServiceWithSignature("../..") src := argoappv1.ApplicationSource{Path: "manifests/base"} q := apiclient.ManifestRequest{Repo: &argoappv1.Repository{}, ApplicationSource: &src} res, err := service.GenerateManifest(context.Background(), &q) assert.NoError(t, err) assert.Empty(t, res.VerifyResult) } // Commit without signature and verification requested { service := newService("../..") src := argoappv1.ApplicationSource{Path: "manifests/base"} q := apiclient.ManifestRequest{Repo: &argoappv1.Repository{}, ApplicationSource: &src, VerifySignature: true} res, err := service.GenerateManifest(context.Background(), &q) assert.NoError(t, err) assert.Empty(t, res.VerifyResult) } // Commit without signature and verification not requested { service := newService("../..") src := argoappv1.ApplicationSource{Path: "manifests/base"} q := apiclient.ManifestRequest{Repo: &argoappv1.Repository{}, ApplicationSource: &src, VerifySignature: true} res, err := service.GenerateManifest(context.Background(), &q) assert.NoError(t, err) assert.Empty(t, res.VerifyResult) } }
explode_data.jsonl/5694
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 569 }
[ 2830, 3393, 1949, 25088, 62339, 2077, 1155, 353, 8840, 836, 8, 341, 197, 322, 9205, 448, 11957, 323, 22901, 11223, 198, 197, 515, 197, 52934, 1669, 501, 1860, 2354, 25088, 17409, 496, 5130, 197, 41144, 1669, 1392, 78, 676, 85, 16, 175...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInvalidPositionalSlice(t *testing.T) { var args struct { Foo []int `arg:"positional"` } err := parse("1 2 xyz", &args) require.Error(t, err) }
explode_data.jsonl/13022
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 64 }
[ 2830, 3393, 7928, 3812, 278, 33236, 1155, 353, 8840, 836, 8, 341, 2405, 2827, 2036, 341, 197, 12727, 2624, 3056, 396, 1565, 858, 2974, 966, 3005, 8805, 197, 532, 9859, 1669, 4715, 445, 16, 220, 17, 40511, 497, 609, 2116, 340, 17957, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestNewMappingRuleSnapshotNoStoragePoliciesAndDropPolicy(t *testing.T) { proto := &rulepb.MappingRuleSnapshot{} _, err := newMappingRuleSnapshotFromProto(proto, testTagsFilterOptions()) require.Equal(t, errNoStoragePoliciesAndDropPolicyInMappingRuleSnapshot, err) }
explode_data.jsonl/64566
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 93 }
[ 2830, 3393, 3564, 6807, 11337, 15009, 2753, 5793, 47, 42038, 3036, 19871, 13825, 1155, 353, 8840, 836, 8, 341, 197, 15110, 1669, 609, 12937, 16650, 76455, 11337, 15009, 16094, 197, 6878, 1848, 1669, 501, 6807, 11337, 15009, 3830, 31549, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRecorder_ResponseStatus_ErrorsIfFinalEventNotAResponse(t *testing.T) { _, err := NewTestRecorder(). AddHttpRequest(HttpRequest{}). ResponseStatus() assert.Equal(t, "final event should be a response type", err.Error()) }
explode_data.jsonl/76380
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 78 }
[ 2830, 3393, 47023, 65873, 2522, 93623, 1087, 2679, 19357, 1556, 2623, 934, 288, 1025, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 1532, 2271, 47023, 25829, 197, 37972, 26362, 12286, 1900, 6257, 4292, 197, 69604, 2522, 2822, 6948,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func Test_noCancelCtx_Value(t *testing.T) { type fields struct { parent context.Context } type args struct { key interface{} } tests := []struct { name string fields fields args args want interface{} }{ { name: "pass", fields: fields{ parent: context.Background(), }, args: args{ key: "key", }, want: nil, }, { name: "pass with context canceled", fields: fields{ parent: canceledContext(), }, args: args{ key: "key", }, want: nil, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { ctx := ContextWithoutCancel(tt.fields.parent) assert.Equalf(t, tt.want, ctx.Value(tt.args.key), "Value(%v)", tt.args.key) }) } }
explode_data.jsonl/19438
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 334 }
[ 2830, 3393, 6536, 9269, 23684, 27867, 1155, 353, 8840, 836, 8, 341, 13158, 5043, 2036, 341, 197, 24804, 2266, 9328, 198, 197, 532, 13158, 2827, 2036, 341, 197, 23634, 3749, 16094, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 256,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestShouldNotParseWithMalformedHash(t *testing.T) { hashExtraField := "$argon2id$v=19$m=65536,t=3,p=2$abc$BpLnfgDsc2WD8F2q$^^vzA4myCqZZ36bUGsDY//8mKUYNZZaR0t4MFFSs+iM" hashMissingSaltAndParams := "$argon2id$v=1$2t9X8nNCN2n3/kFYJ3xWNBg5k/rO782Qr7JJoJIK7G4" hashMissingSalt := "$argon2id$v=1$m=65536,t=3,p=2$2t9X8nNCN2n3/kFYJ3xWNBg5k/rO782Qr7JJoJIK7G4" passwordHash, err := ParseHash(hashExtraField) assert.EqualError(t, err, fmt.Sprintf("Hash key is not the last parameter, the hash is likely malformed (%s)", hashExtraField)) assert.Nil(t, passwordHash) passwordHash, err = ParseHash(hashMissingSaltAndParams) assert.EqualError(t, err, fmt.Sprintf("Hash key is not the last parameter, the hash is likely malformed (%s)", hashMissingSaltAndParams)) assert.Nil(t, passwordHash) passwordHash, err = ParseHash(hashMissingSalt) assert.EqualError(t, err, fmt.Sprintf("Hash key is not the last parameter, the hash is likely malformed (%s)", hashMissingSalt)) assert.Nil(t, passwordHash) }
explode_data.jsonl/40180
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 420 }
[ 2830, 3393, 14996, 2623, 14463, 2354, 29600, 10155, 6370, 1155, 353, 8840, 836, 8, 341, 50333, 11612, 1877, 1669, 5201, 70821, 17, 307, 65020, 28, 16, 24, 53516, 28, 21, 20, 20, 18, 21, 13960, 28, 18, 7237, 28, 17, 3, 13683, 3, 33...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFormatter_FormatValidationEvent(t *testing.T) { testCases := map[string]struct { previewStrategy common.DryRunStrategy event event.ValidationEvent statusCollector list.Collector expected string expectedError error }{ "zero objects, return error": { previewStrategy: common.DryRunNone, event: event.ValidationEvent{ Identifiers: object.ObjMetadataSet{}, Error: errors.New("unexpected"), }, expectedError: errors.New("invalid validation event: no identifiers: unexpected"), }, "one object, missing namespace": { previewStrategy: common.DryRunNone, event: event.ValidationEvent{ Identifiers: object.ObjMetadataSet{ { GroupKind: schema.GroupKind{ Group: "apps", Kind: "Deployment", }, Namespace: "foo", Name: "bar", }, }, Error: validation.NewError( field.Required(field.NewPath("metadata", "namespace"), "namespace is required"), object.ObjMetadata{ GroupKind: schema.GroupKind{ Group: "apps", Kind: "Deployment", }, Namespace: "foo", Name: "bar", }, ), }, expected: "Invalid object (deployment.apps/bar): metadata.namespace: Required value: namespace is required", }, "two objects, cyclic dependency": { previewStrategy: common.DryRunNone, event: event.ValidationEvent{ Identifiers: object.ObjMetadataSet{ { GroupKind: schema.GroupKind{ Group: "apps", Kind: "Deployment", }, Namespace: "default", Name: "bar", }, { GroupKind: schema.GroupKind{ Group: "apps", Kind: "Deployment", }, Namespace: "default", Name: "foo", }, }, Error: validation.NewError( graph.CyclicDependencyError{ Edges: []graph.Edge{ { From: object.ObjMetadata{ GroupKind: schema.GroupKind{ Group: "apps", Kind: "Deployment", }, Namespace: "default", Name: "bar", }, To: object.ObjMetadata{ GroupKind: schema.GroupKind{ Group: "apps", Kind: "Deployment", }, Namespace: "default", Name: "foo", }, }, { From: object.ObjMetadata{ GroupKind: schema.GroupKind{ Group: "apps", Kind: "Deployment", }, Namespace: "default", Name: "foo", }, To: object.ObjMetadata{ GroupKind: schema.GroupKind{ Group: "apps", Kind: "Deployment", }, Namespace: "default", Name: "bar", }, }, }, }, object.ObjMetadata{ GroupKind: schema.GroupKind{ Group: "apps", Kind: "Deployment", }, Namespace: "default", Name: "bar", }, object.ObjMetadata{ GroupKind: schema.GroupKind{ Group: "apps", Kind: "Deployment", }, Namespace: "default", Name: "foo", }, ), }, expected: `Invalid objects (deployment.apps/bar, deployment.apps/foo): cyclic dependency: - apps/namespaces/default/Deployment/bar -> apps/namespaces/default/Deployment/foo - apps/namespaces/default/Deployment/foo -> apps/namespaces/default/Deployment/bar`, }, } for tn, tc := range testCases { t.Run(tn, func(t *testing.T) { ioStreams, _, out, _ := genericclioptions.NewTestIOStreams() //nolint:dogsled formatter := NewFormatter(ioStreams, tc.previewStrategy) err := formatter.FormatValidationEvent(tc.event) if tc.expectedError != nil { assert.EqualError(t, err, tc.expectedError.Error()) } else { assert.NoError(t, err) } assert.Equal(t, tc.expected, strings.TrimSpace(out.String())) }) } }
explode_data.jsonl/58261
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1882 }
[ 2830, 3393, 14183, 72999, 13799, 1556, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 2415, 14032, 60, 1235, 341, 197, 197, 27171, 19816, 4185, 909, 884, 6727, 19816, 198, 197, 28302, 1843, 1538, 42816, 1556, 198, 197, 23847, 53694, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDockerConfigDefault(t *testing.T) { p := newTestPlugin(t) require.NotNil(t, p.docker) require.Equal(t, dockerclient.DefaultDockerHost, p.docker.(*dockerclient.Client).DaemonHost()) require.Equal(t, "1.41", p.docker.(*dockerclient.Client).ClientVersion()) require.Equal(t, &defaultContainerIDFinder{}, p.containerIDFinder) }
explode_data.jsonl/28002
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 122 }
[ 2830, 3393, 35, 13659, 2648, 3675, 1155, 353, 8840, 836, 8, 341, 3223, 1669, 501, 2271, 11546, 1155, 692, 17957, 93882, 1155, 11, 281, 91131, 340, 17957, 12808, 1155, 11, 26588, 2972, 13275, 35, 13659, 9296, 11, 281, 91131, 41399, 28648...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHandlerWrongType(t *testing.T) { c, logs := newTestClientWithRulesAndFilters(t, ExtractionRules{}, Filters{}) assert.Equal(t, logs.Len(), 0) c.handlePodAdd(1) c.handlePodDelete(1) c.handlePodUpdate(1, 2) assert.Equal(t, logs.Len(), 3) for _, l := range logs.All() { assert.Equal(t, l.Message, "object received was not of type api_v1.Pod") } }
explode_data.jsonl/56852
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 3050, 29185, 929, 1155, 353, 8840, 836, 8, 341, 1444, 11, 18422, 1669, 501, 2271, 2959, 2354, 26008, 3036, 28351, 1155, 11, 94506, 26008, 22655, 45012, 37790, 6948, 12808, 1155, 11, 18422, 65819, 1507, 220, 15, 340, 1444, 10...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFileHeaderRoundTrip(t *testing.T) { fh := &FileHeader{ Name: "foo.txt", UncompressedSize: 987654321, ModifiedTime: 1234, ModifiedDate: 5678, } testHeaderRoundTrip(fh, fh.UncompressedSize, uint64(fh.UncompressedSize), t) }
explode_data.jsonl/18859
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 125 }
[ 2830, 3393, 1703, 4047, 27497, 56352, 1155, 353, 8840, 836, 8, 341, 1166, 71, 1669, 609, 1703, 4047, 515, 197, 21297, 25, 1797, 330, 7975, 3909, 756, 197, 197, 1806, 45703, 1695, 25, 220, 24, 23, 22, 21, 20, 19, 18, 17, 16, 345, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDetectImagesWithNewImage(t *testing.T) { // Just one image initially. manager, fakeRuntime, _ := newRealImageGCManager(ImageGCPolicy{}) fakeRuntime.ImageList = []container.Image{ makeImage(0, 1024), makeImage(1, 2048), } fakeRuntime.AllPodList = []*containertest.FakePod{ {Pod: &container.Pod{ Containers: []*container.Container{ makeContainer(1), }, }}, } err := manager.detectImages(zero) assert := assert.New(t) require.NoError(t, err) assert.Equal(manager.imageRecordsLen(), 2) // Add a new image. fakeRuntime.ImageList = []container.Image{ makeImage(0, 1024), makeImage(1, 1024), makeImage(2, 1024), } detectedTime := zero.Add(time.Second) startTime := time.Now().Add(-time.Millisecond) err = manager.detectImages(detectedTime) require.NoError(t, err) assert.Equal(manager.imageRecordsLen(), 3) noContainer, ok := manager.getImageRecord(imageID(0)) require.True(t, ok) assert.Equal(zero, noContainer.firstDetected) assert.Equal(zero, noContainer.lastUsed) withContainer, ok := manager.getImageRecord(imageID(1)) require.True(t, ok) assert.Equal(zero, withContainer.firstDetected) assert.True(withContainer.lastUsed.After(startTime)) newContainer, ok := manager.getImageRecord(imageID(2)) require.True(t, ok) assert.Equal(detectedTime, newContainer.firstDetected) assert.Equal(zero, noContainer.lastUsed) }
explode_data.jsonl/48104
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 504 }
[ 2830, 3393, 57193, 14228, 2354, 3564, 1906, 1155, 353, 8840, 836, 8, 341, 197, 322, 4599, 825, 2168, 15102, 624, 92272, 11, 12418, 15123, 11, 716, 1669, 501, 12768, 1906, 22863, 2043, 30122, 38, 7123, 8018, 37790, 1166, 726, 15123, 7528...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClhGenerateSocket(t *testing.T) { assert := assert.New(t) // Ensure the type is fully constructed hypervisor, err := NewHypervisor("clh") assert.NoError(err) clh, ok := hypervisor.(*cloudHypervisor) assert.True(ok) clh.config = HypervisorConfig{ VMStorePath: "/foo", RunStorePath: "/bar", } clh.addVSock(1, "path") s, err := clh.GenerateSocket("c") assert.NoError(err) assert.NotNil(s) hvsock, ok := s.(types.HybridVSock) assert.True(ok) assert.NotEmpty(hvsock.UdsPath) // Path must be absolute assert.True(strings.HasPrefix(hvsock.UdsPath, "/"), "failed: socket path: %s", hvsock.UdsPath) assert.NotZero(hvsock.Port) }
explode_data.jsonl/68506
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 264 }
[ 2830, 3393, 5066, 71, 31115, 10286, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 197, 322, 29279, 279, 943, 374, 7225, 20346, 198, 9598, 1082, 31396, 11, 1848, 1669, 1532, 39, 1082, 31396, 445, 564, 71, 1138, 6948,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1