text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestUnexportedExecuteQueryWithClientAndLogger(t *testing.T) { defer func() { client = nil logger = nil }() dialer := &mockDialer{} client, _ = grammes.Dial(dialer) logger = logging.NewBasicLogger() execute := func(string, map[string]string, map[string]string) ([]byte, error) { return nil, nil } client.GraphManager = manager.NewGraphManager(dialer, logging.NewBasicLogger(), execute) Convey("Given a host and query", t, func() { host := "testhost" query := "testquery" Convey("When executeQuery is called with a client and logger established", func() { _, err := executeQuery(host, query) Convey("Then no error should be returned", func() { So(err, ShouldBeNil) }) }) }) }
explode_data.jsonl/9669
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 260 }
[ 2830, 3393, 1806, 1533, 291, 17174, 2859, 2354, 2959, 3036, 7395, 1155, 353, 8840, 836, 8, 341, 16867, 2915, 368, 341, 197, 25291, 284, 2092, 198, 197, 17060, 284, 2092, 198, 197, 69826, 2698, 530, 261, 1669, 609, 16712, 35, 530, 261,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAppendTo_DuplicatedNames(t *testing.T) { arrayVal := [3]string{"1", "2", "3"} sliceVal := []string{"1", "2", "3"} testCases := []struct { field string data *testStruct want interface{} }{ {"array[]", &testStruct{Array: arrayVal}, sliceVal}, {"array_ptr[]", &testStruct{ArrayPtr: &arrayVal}, sliceVal}, {"slice[]", &testStruct{Slice: sliceVal}, sliceVal}, {"slice_ptr[]", &testStruct{SlicePtr: &sliceVal}, sliceVal}, // Tests slice nested inside of map nested inside of another map { "map[foo][bar][]", &testStruct{Map: map[string]interface{}{ "foo": map[string]interface{}{"bar": sliceVal}, }}, sliceVal, }, } for _, tc := range testCases { t.Run(tc.field, func(t *testing.T) { form := &Values{} AppendTo(form, tc.data) values := form.ToValues() //fmt.Printf("values = %+v", values) // This is the only difference between this test case and the one // above: we used square brackets to grab a []string instead of // just a single value. assert.Equal(t, tc.want, values[tc.field]) }) } }
explode_data.jsonl/30321
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 422 }
[ 2830, 3393, 23877, 1249, 1557, 98984, 7980, 1155, 353, 8840, 836, 8, 341, 11923, 2208, 1669, 508, 18, 30953, 4913, 16, 497, 330, 17, 497, 330, 18, 16707, 1903, 4754, 2208, 1669, 3056, 917, 4913, 16, 497, 330, 17, 497, 330, 18, 63159...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFormatterColorHex(test *testing.T) { formatted, err := formatter.Format(`{color "0xF3AC67"}funky{normal}`) assert.NoError(test, err) assert.Equal(test, "\x1b[38;2;243;172;103mfunky\x1b[0m", formatted) }
explode_data.jsonl/39793
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 94 }
[ 2830, 3393, 14183, 1636, 20335, 8623, 353, 8840, 836, 8, 341, 37410, 12127, 11, 1848, 1669, 24814, 9978, 5809, 90, 3423, 330, 15, 9770, 18, 1706, 21, 22, 9207, 69, 69205, 90, 8252, 5541, 692, 6948, 35699, 8623, 11, 1848, 340, 6948, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUIDPresentSubject(t *testing.T) { inputPath := "subjectUID.pem" expected := lint.Error out := test.TestLint("e_cert_contains_unique_identifier", inputPath) if out.Status != expected { t.Errorf("%s: expected %s, got %s", inputPath, expected, out.Status) } }
explode_data.jsonl/41433
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 6463, 21195, 13019, 1155, 353, 8840, 836, 8, 341, 22427, 1820, 1669, 330, 11501, 6463, 49373, 698, 42400, 1669, 57920, 6141, 198, 13967, 1669, 1273, 8787, 47556, 445, 68, 37097, 63598, 21218, 33176, 497, 1946, 1820, 340, 743, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStateProposerSelection0(t *testing.T) { cs1, vss := randState(4) height, round := cs1.Height, cs1.Round newRoundCh := subscribe(cs1.eventBus, types.EventQueryNewRound) proposalCh := subscribe(cs1.eventBus, types.EventQueryCompleteProposal) startTestRound(cs1, height, round) // Wait for new round so proposer is set. ensureNewRound(newRoundCh, height, round) // Commit a block and ensure proposer for the next height is correct. prop := cs1.GetRoundState().Validators.GetProposer() pv, err := cs1.privValidator.GetPubKey() require.NoError(t, err) address := pv.Address() if !bytes.Equal(prop.Address, address) { t.Fatalf("expected proposer to be validator %d. Got %X", 0, prop.Address) } // Wait for complete proposal. ensureNewProposal(proposalCh, height, round) rs := cs1.GetRoundState() signAddVotes(cs1, types.PrecommitType, rs.ProposalBlock.Hash(), rs.ProposalBlockParts.Header(), vss[1:]...) // Wait for new round so next validator is set. ensureNewRound(newRoundCh, height+1, 0) prop = cs1.GetRoundState().Validators.GetProposer() pv1, err := vss[1].GetPubKey() require.NoError(t, err) addr := pv1.Address() if !bytes.Equal(prop.Address, addr) { panic(fmt.Sprintf("expected proposer to be validator %d. Got %X", 1, prop.Address)) } }
explode_data.jsonl/81640
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 461 }
[ 2830, 3393, 1397, 2008, 23438, 11177, 15, 1155, 353, 8840, 836, 8, 341, 71899, 16, 11, 348, 778, 1669, 10382, 1397, 7, 19, 340, 30500, 11, 4778, 1669, 10532, 16, 17743, 11, 10532, 16, 37646, 271, 8638, 27497, 1143, 1669, 17963, 41153,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTagsForBranchFailureWrongBranch(t *testing.T) { testRepo := newTestRepo(t) defer testRepo.cleanup(t) result, err := testRepo.sut.TagsForBranch("wrong-branch") require.NotNil(t, err) require.Nil(t, result) }
explode_data.jsonl/14012
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 93 }
[ 2830, 3393, 15930, 2461, 18197, 17507, 29185, 18197, 1155, 353, 8840, 836, 8, 341, 18185, 25243, 1669, 501, 2271, 25243, 1155, 340, 16867, 1273, 25243, 87689, 1155, 692, 9559, 11, 1848, 1669, 1273, 25243, 514, 332, 73522, 2461, 18197, 445...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSysctlOverrideHost(t *testing.T) { c := &container.Container{ Config: &containertypes.Config{}, HostConfig: &containertypes.HostConfig{ NetworkMode: "host", Sysctls: map[string]string{}, UsernsMode: "host", }, } d := setupFakeDaemon(t, c) defer cleanupFakeContainer(c) // Ensure that the implicit sysctl is not set s, err := d.createSpec(c) assert.NilError(t, err) assert.Equal(t, s.Linux.Sysctl["net.ipv4.ip_unprivileged_port_start"], "") assert.Equal(t, s.Linux.Sysctl["net.ipv4.ping_group_range"], "") // Set an explicit sysctl. c.HostConfig.Sysctls["net.ipv4.ip_unprivileged_port_start"] = "1024" s, err = d.createSpec(c) assert.NilError(t, err) assert.Equal(t, s.Linux.Sysctl["net.ipv4.ip_unprivileged_port_start"], c.HostConfig.Sysctls["net.ipv4.ip_unprivileged_port_start"]) }
explode_data.jsonl/51545
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 346 }
[ 2830, 3393, 32792, 12373, 2177, 9296, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 609, 3586, 33672, 515, 197, 66156, 25, 609, 52095, 529, 1804, 10753, 38837, 197, 197, 9296, 2648, 25, 609, 52095, 529, 1804, 29840, 2648, 515, 298, 197, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDuplicateUpdates(t *testing.T) { ctx := context.Background() initMetrics.Do(func() { createMetrics(monitoring.InertMetricFactory{}) }) ks, err := keyset.NewHandle(signature.ECDSAP256KeyTemplate()) if err != nil { t.Fatalf("keyset.NewHandle(): %v", err) } signer, err := signature.NewSigner(ks) if err != nil { t.Fatalf("signature.NewSigner(): %v", err) } authorizedKeys, err := ks.Public() if err != nil { t.Fatalf("Failed to setup tink keyset: %v", err) } index := []byte("index") userID := "userID" log0 := []mutator.LogMessage{} mapRev := int64(0) for i, data := range []string{"data1", "data2"} { m := entry.NewMutation(index, directoryID, userID) if err := m.SetCommitment([]byte(data)); err != nil { t.Fatalf("SetCommitment(): %v", err) } if err := m.ReplaceAuthorizedKeys(authorizedKeys); err != nil { t.Fatalf("ReplaceAuthorizedKeys(): %v", err) } update, err := m.SerializeAndSign([]tink.Signer{signer}) if err != nil { t.Fatalf("SerializeAndSign(): %v", err) } log0 = append(log0, mutator.LogMessage{ ID: int64(i), Mutation: update.Mutation, ExtraData: update.Committed}, ) } s := Server{ logs: fakeLogs{0: log0}, batcher: &fakeBatcher{ highestRev: mapRev, batches: map[int64]*spb.MapMetadata{ 1: {Sources: []*spb.MapMetadata_SourceSlice{{LogId: 0, HighestExclusive: 2}}}, }, }, trillian: &fakeTrillianFactory{ tmap: &fakeMap{ MapClient: MapClient{&tclient.MapClient{Conn: &fakeMapConn{}}}, latestMapRoot: &types.MapRootV1{Revision: uint64(mapRev)}, }, }, } _, err = s.ApplyRevision(ctx, &spb.ApplyRevisionRequest{ DirectoryId: directoryID, Revision: 1, }) if got, want := status.Convert(err).Message(), status.Convert(errSuccess).Message(); !strings.Contains(got, want) { t.Fatalf("ApplyRevision(): %v, want\n%v", got, want) } }
explode_data.jsonl/64850
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 791 }
[ 2830, 3393, 53979, 37091, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 28248, 27328, 33596, 18552, 368, 314, 1855, 27328, 1255, 30314, 287, 5337, 529, 54310, 4153, 28875, 2751, 197, 2787, 11, 1848, 1669, 1376, 746, 7121, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrintPodDisruptionBudget(t *testing.T) { minAvailable := intstr.FromInt(22) maxUnavailable := intstr.FromInt(11) tests := []struct { pdb policy.PodDisruptionBudget expected []metav1.TableRow }{ // Min Available set, no Max Available. { pdb: policy.PodDisruptionBudget{ ObjectMeta: metav1.ObjectMeta{ Namespace: "ns1", Name: "pdb1", CreationTimestamp: metav1.Time{Time: time.Now().Add(1.9e9)}, }, Spec: policy.PodDisruptionBudgetSpec{ MinAvailable: &minAvailable, }, Status: policy.PodDisruptionBudgetStatus{ DisruptionsAllowed: 5, }, }, // Columns: Name, Min Available, Max Available, Allowed Disruptions, Age expected: []metav1.TableRow{{Cells: []interface{}{"pdb1", "22", "N/A", int64(5), "0s"}}}, }, // Max Available set, no Min Available. { pdb: policy.PodDisruptionBudget{ ObjectMeta: metav1.ObjectMeta{ Namespace: "ns2", Name: "pdb2", CreationTimestamp: metav1.Time{Time: time.Now().Add(1.9e9)}, }, Spec: policy.PodDisruptionBudgetSpec{ MaxUnavailable: &maxUnavailable, }, Status: policy.PodDisruptionBudgetStatus{ DisruptionsAllowed: 5, }, }, // Columns: Name, Min Available, Max Available, Allowed Disruptions, Age expected: []metav1.TableRow{{Cells: []interface{}{"pdb2", "N/A", "11", int64(5), "0s"}}}, }} for i, test := range tests { rows, err := printPodDisruptionBudget(&test.pdb, printers.GenerateOptions{}) if err != nil { t.Fatal(err) } for i := range rows { rows[i].Object.Object = nil } if !reflect.DeepEqual(test.expected, rows) { t.Errorf("%d mismatch: %s", i, diff.ObjectReflectDiff(test.expected, rows)) } } }
explode_data.jsonl/21614
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 784 }
[ 2830, 3393, 8994, 23527, 4839, 14123, 62901, 1155, 353, 8840, 836, 8, 341, 25320, 16485, 1669, 526, 495, 11439, 1072, 7, 17, 17, 340, 22543, 92928, 1669, 526, 495, 11439, 1072, 7, 16, 16, 340, 78216, 1669, 3056, 1235, 341, 197, 3223, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGap_errorResponse(t *testing.T) { var private protos.ProtocolData dns := newDns(testing.Verbose()) q := sophosTxtTcp.request r := sophosTxtTcp.response[:10] tcptuple := testTcpTuple() packet := newPacket(forward, q) private = dns.Parse(packet, tcptuple, tcp.TcpDirectionOriginal, private) assert.Equal(t, 1, dns.transactions.Size(), "There should be one transaction.") packet = newPacket(reverse, r) private = dns.Parse(packet, tcptuple, tcp.TcpDirectionReverse, private) assert.Equal(t, 1, dns.transactions.Size(), "There should be one transaction.") private, drop := dns.GapInStream(tcptuple, tcp.TcpDirectionReverse, 10, private) assert.Equal(t, true, drop) dns.ReceivedFin(tcptuple, tcp.TcpDirectionReverse, private) m := expectResult(t, dns) assertRequest(t, m, sophosTxtTcp) assert.Equal(t, IncompleteMsg.ResponseError(), mapValue(t, m, "notes")) assert.Nil(t, mapValue(t, m, "answers")) }
explode_data.jsonl/68710
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 361 }
[ 2830, 3393, 12868, 4096, 2582, 1155, 353, 8840, 836, 8, 341, 2405, 869, 1724, 436, 54096, 1043, 198, 2698, 4412, 1669, 501, 35, 4412, 8623, 287, 42505, 8297, 2398, 18534, 1669, 18701, 436, 35629, 77536, 8223, 198, 7000, 1669, 18701, 436...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAlreadyExistingSecretControllerFlow(t *testing.T) { stopChannel := make(chan struct{}) defer close(stopChannel) received := make(chan bool) expectedSecretName := "new-secret" serviceName := "svc-name" serviceUID := "some-uid" expectedSecretAnnotations := map[string]string{ServiceUIDAnnotation: serviceUID, ServiceNameAnnotation: serviceName} namespace := "ns" existingSecret := &v1.Secret{} existingSecret.Name = expectedSecretName existingSecret.Namespace = namespace existingSecret.Type = v1.SecretTypeTLS existingSecret.Annotations = expectedSecretAnnotations caName, kubeclient, fakeWatch, _, controller, informerFactory := controllerSetup([]runtime.Object{existingSecret}, stopChannel, t) kubeclient.PrependReactor("create", "secrets", func(action clientgotesting.Action) (handled bool, ret runtime.Object, err error) { return true, &v1.Secret{}, kapierrors.NewAlreadyExists(v1.Resource("secrets"), "new-secret") }) controller.syncHandler = func(serviceKey string) error { defer func() { received <- true }() err := controller.syncService(serviceKey) if err != nil { t.Errorf("unexpected error: %v", err) } return err } informerFactory.Start(stopChannel) go controller.Run(1, stopChannel) expectedServiceAnnotations := map[string]string{ServingCertSecretAnnotation: expectedSecretName, ServingCertCreatedByAnnotation: caName} serviceToAdd := &v1.Service{} serviceToAdd.Name = serviceName serviceToAdd.Namespace = namespace serviceToAdd.UID = types.UID(serviceUID) serviceToAdd.Annotations = map[string]string{ServingCertSecretAnnotation: expectedSecretName} fakeWatch.Add(serviceToAdd) t.Log("waiting to reach syncHandler") select { case <-received: case <-time.After(time.Duration(30 * time.Second)): t.Fatalf("failed to call into syncService") } foundSecret := false foundServiceUpdate := false for _, action := range kubeclient.Actions() { switch { case action.Matches("get", "secrets"): foundSecret = true case action.Matches("update", "services"): updateService := action.(clientgotesting.UpdateAction) service := updateService.GetObject().(*v1.Service) if !reflect.DeepEqual(service.Annotations, expectedServiceAnnotations) { t.Errorf("expected %v, got %v", expectedServiceAnnotations, service.Annotations) continue } foundServiceUpdate = true } } if !foundSecret { t.Errorf("secret wasn't retrieved. Got %v\n", kubeclient.Actions()) } if !foundServiceUpdate { t.Errorf("service wasn't updated. Got %v\n", kubeclient.Actions()) } }
explode_data.jsonl/49899
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 829 }
[ 2830, 3393, 38370, 53067, 19773, 2051, 18878, 1155, 353, 8840, 836, 8, 341, 62644, 9629, 1669, 1281, 35190, 2036, 37790, 16867, 3265, 60170, 9629, 340, 17200, 8771, 1669, 1281, 35190, 1807, 692, 42400, 19773, 675, 1669, 330, 931, 68892, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestApiTest_Report(t *testing.T) { getUser := apitest.NewMock(). Get("http://localhost:8080"). RespondWith(). Status(http.StatusOK). Body("1"). Times(1). End() reporter := &RecorderCaptor{} apitest.New("some test"). Debug(). Meta(map[string]interface{}{"host": "abc.com"}). Report(reporter). Mocks(getUser). Handler(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { getUserData() w.WriteHeader(http.StatusOK) })). Post("/hello"). Body(`{"a": 12345}`). Headers(map[string]string{"Content-Type": "application/json"}). Expect(t). Status(http.StatusOK). End() r := reporter.capturedRecorder assert.Equal(t, "POST /hello", r.Title) assert.Equal(t, "some test", r.SubTitle) assert.Equal(t, 4, len(r.Events)) assert.Equal(t, 200, r.Meta["status_code"]) assert.Equal(t, "/hello", r.Meta["path"]) assert.Equal(t, "POST", r.Meta["method"]) assert.Equal(t, "some test", r.Meta["name"]) assert.Equal(t, "abc.com", r.Meta["host"]) assert.Equal(t, true, r.Meta["duration"] != nil) }
explode_data.jsonl/54809
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 446 }
[ 2830, 3393, 6563, 2271, 76428, 1155, 353, 8840, 836, 8, 341, 10366, 1474, 1669, 1443, 97105, 7121, 11571, 25829, 197, 37654, 445, 1254, 1110, 8301, 25, 23, 15, 23, 15, 38609, 197, 197, 65354, 2354, 25829, 197, 58321, 19886, 52989, 4292,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStepOnCallPtrInstr(t *testing.T) { protest.AllowRecording(t) withTestProcess("teststepprog", t, func(p *proc.Target, fixture protest.Fixture) { setFileBreakpoint(p, t, fixture.Source, 10) assertNoError(p.Continue(), t, "Continue()") found := false for { _, ln := currentLineNumber(p, t) if ln != 10 { break } regs, err := p.CurrentThread().Registers() assertNoError(err, t, "Registers()") pc := regs.PC() text, err := proc.Disassemble(p.Memory(), regs, p.Breakpoints(), p.BinInfo(), pc, pc+uint64(p.BinInfo().Arch.MaxInstructionLength())) assertNoError(err, t, "Disassemble()") if text[0].IsCall() { found = true break } assertNoError(p.StepInstruction(), t, "StepInstruction()") } if !found { t.Fatal("Could not find CALL instruction") } assertNoError(p.Step(), t, "Step()") if goversion.VersionAfterOrEqual(runtime.Version(), 1, 11) && !protest.RegabiSupported() { assertLineNumber(p, t, 6, "Step continued to wrong line,") } else { assertLineNumber(p, t, 5, "Step continued to wrong line,") } }) }
explode_data.jsonl/56275
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 446 }
[ 2830, 3393, 8304, 1925, 7220, 5348, 74375, 1155, 353, 8840, 836, 8, 341, 197, 776, 1944, 29081, 52856, 1155, 340, 46948, 2271, 7423, 445, 1944, 5342, 602, 11918, 497, 259, 11, 2915, 1295, 353, 15782, 35016, 11, 12507, 8665, 991, 12735, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestGetConfig(t *testing.T) { t.Parallel() Convey(`With a testing configuration`, t, func() { c, env := ct.Install(true) s := gaeconfig.Settings{ ConfigServiceHost: "example.com", } So(s.SetIfChanged(c, "test", "test"), ShouldBeNil) svr := New() Convey(`Returns Forbidden error if not a service.`, func() { _, err := svr.GetConfig(c, nil) So(err, ShouldBeRPCPermissionDenied) }) Convey(`When logged in as a service, can retrieve the configuration.`, func() { env.JoinGroup("services") cr, err := svr.GetConfig(c, nil) So(err, ShouldBeRPCOK) So(cr, ShouldResemble, &logdog.GetConfigResponse{ ConfigServiceUrl: "test://example.com", ConfigSet: "services/app", ServiceConfigPath: svcconfig.ServiceConfigPath, ConfigServiceHost: "example.com", }) }) }) }
explode_data.jsonl/40698
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 339 }
[ 2830, 3393, 1949, 2648, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 93070, 5617, 5809, 2354, 264, 7497, 6546, 7808, 259, 11, 2915, 368, 341, 197, 1444, 11, 6105, 1669, 20251, 71207, 541, 3715, 692, 197, 1903, 1669, 342, 591...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEventBusPublishEventNewBlock(t *testing.T) { eventBus := NewEventBus() err := eventBus.Start() require.NoError(t, err) defer eventBus.Stop() block := MakeBlock(0, []Tx{}, nil, []Evidence{}) resultBeginBlock := abci.ResponseBeginBlock{ Events: []abci.Event{ {Type: "testType", Attributes: []abci.EventAttribute{{Key: []byte("baz"), Value: []byte("1")}}}, }, } resultEndBlock := abci.ResponseEndBlock{ Events: []abci.Event{ {Type: "testType", Attributes: []abci.EventAttribute{{Key: []byte("foz"), Value: []byte("2")}}}, }, } // PublishEventNewBlock adds the tm.event compositeKey, so the query below should work query := "tm.event='NewBlock' AND testType.baz=1 AND testType.foz=2" blocksSub, err := eventBus.Subscribe(context.Background(), "test", tmquery.MustParse(query)) require.NoError(t, err) done := make(chan struct{}) go func() { msg := <-blocksSub.Out() edt := msg.Data().(EventDataNewBlock) assert.Equal(t, block, edt.Block) assert.Equal(t, resultBeginBlock, edt.ResultBeginBlock) assert.Equal(t, resultEndBlock, edt.ResultEndBlock) close(done) }() err = eventBus.PublishEventNewBlock(EventDataNewBlock{ Block: block, ResultBeginBlock: resultBeginBlock, ResultEndBlock: resultEndBlock, }) assert.NoError(t, err) select { case <-done: case <-time.After(1 * time.Second): t.Fatal("did not receive a block after 1 sec.") } }
explode_data.jsonl/34192
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 535 }
[ 2830, 3393, 1556, 15073, 50145, 1556, 3564, 4713, 1155, 353, 8840, 836, 8, 341, 28302, 15073, 1669, 1532, 1556, 15073, 741, 9859, 1669, 1538, 15073, 12101, 741, 17957, 35699, 1155, 11, 1848, 340, 16867, 1538, 15073, 30213, 2822, 47996, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMarathonSDEmptyList(t *testing.T) { ch, md := newTestDiscovery(func(url string) (*AppList, error) { return &AppList{}, nil }) go func() { select { case tg := <-ch: if len(tg) > 0 { t.Fatalf("Got group: %v", tg) } default: } }() err := md.updateServices(context.Background(), ch) if err != nil { t.Fatalf("Got error: %s", err) } }
explode_data.jsonl/65403
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 166 }
[ 2830, 3393, 12061, 23941, 50, 1150, 76, 1595, 852, 1155, 353, 8840, 836, 8, 341, 23049, 11, 10688, 1669, 501, 2271, 67400, 18552, 6522, 914, 8, 4609, 2164, 852, 11, 1465, 8, 341, 197, 853, 609, 2164, 852, 22655, 2092, 198, 197, 3518...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNumericTagFilter(t *testing.T) { var empty = map[string]pdata.AttributeValue{} filter := NewNumericAttributeFilter(zap.NewNop(), "example", math.MinInt32, math.MaxInt32) resAttr := map[string]pdata.AttributeValue{} resAttr["example"] = pdata.NewAttributeValueInt(8) cases := []struct { Desc string Trace *TraceData Decision Decision }{ { Desc: "nonmatching span attribute", Trace: newTraceIntAttrs(empty, "non_matching", math.MinInt32), Decision: NotSampled, }, { Desc: "span attribute with lower limit", Trace: newTraceIntAttrs(empty, "example", math.MinInt32), Decision: Sampled, }, { Desc: "span attribute with upper limit", Trace: newTraceIntAttrs(empty, "example", math.MaxInt32), Decision: Sampled, }, { Desc: "span attribute below min limit", Trace: newTraceIntAttrs(empty, "example", math.MinInt32-1), Decision: NotSampled, }, { Desc: "span attribute above max limit", Trace: newTraceIntAttrs(empty, "example", math.MaxInt32+1), Decision: NotSampled, }, } for _, c := range cases { t.Run(c.Desc, func(t *testing.T) { u, _ := uuid.NewRandom() decision, err := filter.Evaluate(pdata.NewTraceID(u), c.Trace) assert.NoError(t, err) assert.Equal(t, decision, c.Decision) }) } }
explode_data.jsonl/35889
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 577 }
[ 2830, 3393, 36296, 5668, 5632, 1155, 353, 8840, 836, 8, 1476, 2405, 4287, 284, 2415, 14032, 60, 57796, 33775, 1130, 16094, 50108, 1669, 1532, 36296, 3907, 5632, 13174, 391, 7121, 45, 453, 1507, 330, 8687, 497, 6888, 17070, 1072, 18, 17,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnmarshallError(t *testing.T) { testStr := ` log_level: info behavior: read_only: false autocommit: true user: name: root password: "" listener: host: localhost port: 3306 max_connections: 1 read_timeout_millis: 28800000 write_timeout_millis: 28800000 databases: - name: irs_soi path: ./datasets/irs-soi - name: noaa path: /Users/brian/datasets/noaa ` _, err := NewYamlConfig([]byte(testStr)) assert.Error(t, err) }
explode_data.jsonl/46778
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 219 }
[ 2830, 3393, 1806, 84161, 1454, 1155, 353, 8840, 836, 8, 341, 18185, 2580, 1669, 22074, 839, 8274, 25, 3546, 271, 55866, 510, 878, 18410, 25, 895, 198, 2717, 11986, 1763, 25, 830, 271, 872, 510, 262, 829, 25, 3704, 198, 262, 3552, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPrimitiveGetBool(t *testing.T) { client := newPrimitiveClient() result, err := client.GetBool(context.Background(), nil) if err != nil { t.Fatalf("GetBool: %v", err) } if r := cmp.Diff(result.BooleanWrapper, BooleanWrapper{ FieldFalse: to.BoolPtr(false), FieldTrue: to.BoolPtr(true), }); r != "" { t.Fatal(r) } }
explode_data.jsonl/61675
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 141 }
[ 2830, 3393, 33313, 1949, 11233, 1155, 353, 8840, 836, 8, 341, 25291, 1669, 501, 33313, 2959, 741, 9559, 11, 1848, 1669, 2943, 2234, 11233, 5378, 19047, 1507, 2092, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 1949, 11233, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGenreService_Get(t *testing.T) { f, err := ioutil.ReadFile(testGenreGet) if err != nil { t.Fatal(err) } init := make([]*Genre, 1) err = json.Unmarshal(f, &init) if err != nil { t.Fatal(err) } var tests = []struct { name string file string id int opts []Option wantGenre *Genre wantErr error }{ {"Valid response", testGenreGet, 13, []Option{SetFields("name")}, init[0], nil}, {"Invalid ID", testFileEmpty, -1, nil, nil, ErrNegativeID}, {"Empty response", testFileEmpty, 13, nil, nil, errInvalidJSON}, {"Invalid option", testFileEmpty, 13, []Option{SetOffset(-99999)}, nil, ErrOutOfRange}, {"No results", testFileEmptyArray, 0, nil, nil, ErrNoResults}, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { ts, c, err := testServerFile(http.StatusOK, test.file) if err != nil { t.Fatal(err) } defer ts.Close() gen, err := c.Genres.Get(test.id, test.opts...) if errors.Cause(err) != test.wantErr { t.Errorf("got: <%v>, want: <%v>", errors.Cause(err), test.wantErr) } if !reflect.DeepEqual(gen, test.wantGenre) { t.Errorf("got: <%v>, \nwant: <%v>", gen, test.wantGenre) } }) } }
explode_data.jsonl/41084
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 543 }
[ 2830, 3393, 37525, 1860, 13614, 1155, 353, 8840, 836, 8, 341, 1166, 11, 1848, 1669, 43144, 78976, 8623, 37525, 1949, 340, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 630, 28248, 1669, 1281, 85288, 37525, 11, 220, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestUsesForm(t *testing.T) { cases := []struct { action string expected model.Uses }{ { action: `action "a" { uses = "docker://alpine" }`, expected: &model.UsesDockerImage{}, }, { action: `action "a" { uses = "./actions/foo" }`, expected: &model.UsesPath{}, }, { action: `action "a" { uses = "name/owner/path@5678ac" }`, expected: &model.UsesRepository{}, }, { action: `action "a" { uses = "name/owner@5678ac" }`, expected: &model.UsesRepository{}, }, { action: `action "a" { uses = "" }`, expected: &model.UsesInvalid{}, }, { action: `action "a" { uses = "foo@" }`, expected: &model.UsesInvalid{}, }, { action: `action "a" { uses = "foo" }`, expected: &model.UsesInvalid{}, }, } for _, tc := range cases { workflow, err := Parse(strings.NewReader(tc.action), WithSuppressErrors()) require.NoError(t, err) assert.IsType(t, tc.expected, workflow.Actions[0].Uses) } }
explode_data.jsonl/55700
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 442 }
[ 2830, 3393, 68965, 1838, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 38933, 256, 914, 198, 197, 42400, 1614, 13, 68965, 198, 197, 59403, 197, 197, 515, 298, 38933, 25, 256, 1565, 1311, 330, 64, 1, 314, 5711...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestTransformBasicMessage(t *testing.T) { schema := []byte(` syntax = "proto3"; package test; message Simple { double field_double = 1; float field_float = 2; int32 field_int32 = 3; int64 field_int64 = 4; uint64 field_uint64 = 5; sint32 field_sint32 = 6; sint64 field_sint64 = 7; fixed32 field_fixed32 = 8; fixed64 field_fixed64 = 9; sfixed32 field_sfixed32 = 10; sfixed64 field_sfixed64 = 11; bool field_bool = 12; string field_string = 13; bytes field_bytes = 14; } `) input := new(bytes.Buffer) input.Write(schema) output := new(bytes.Buffer) transformer := proto2gql.NewTransformer(output) if err := transformer.Transform(input); err != nil { t.Fatal(err) } expected := ` type TestSimple { field_double: Float field_float: Float field_int32: Int field_int64: Int field_uint64: Int field_sint32: Int field_sint64: Int field_fixed32: Int field_fixed64: Int field_sfixed32: Int field_sfixed64: Int field_bool: Boolean field_string: String field_bytes: [String] } ` expected = strings.TrimSpace(expected) actual := strings.TrimSpace(output.String()) if expected != actual { t.Fatalf("Expected %s to equal to %s", expected, actual) } }
explode_data.jsonl/2064
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 522 }
[ 2830, 3393, 8963, 15944, 2052, 1155, 353, 8840, 836, 8, 341, 1903, 3416, 1669, 3056, 3782, 61528, 197, 1903, 13662, 284, 330, 15110, 18, 876, 197, 197, 1722, 1273, 401, 197, 24753, 8993, 341, 298, 8646, 2070, 24598, 284, 220, 16, 280,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestServerSpanDuration(t *testing.T) { client, cleanup := testpb.NewTestClient(t) defer cleanup() te := testExporter{make(chan *trace.SpanData, 100)} trace.RegisterExporter(&te) defer trace.UnregisterExporter(&te) trace.ApplyConfig(trace.Config{DefaultSampler: trace.AlwaysSample()}) ctx := context.Background() const sleep = 100 * time.Millisecond client.Single(ctx, &testpb.FooRequest{SleepNanos: int64(sleep)}) loop: for { select { case span := <-te.ch: if span.SpanKind != trace.SpanKindServer { continue loop } if got, want := span.EndTime.Sub(span.StartTime), sleep; got < want { t.Errorf("span duration = %dns; want at least %dns", got, want) } break loop default: t.Fatal("no more spans") } } }
explode_data.jsonl/27161
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 292 }
[ 2830, 3393, 5475, 12485, 12945, 1155, 353, 8840, 836, 8, 341, 25291, 11, 21290, 1669, 1273, 16650, 7121, 2271, 2959, 1155, 340, 16867, 21290, 2822, 197, 665, 1669, 1273, 88025, 90, 6927, 35190, 353, 15067, 85309, 1043, 11, 220, 16, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGetReplicaIP(t *testing.T) { tests := map[string]struct { expectedOutput string Volume VolumeInfo }{ "Fetching ReplicaIP from openebs.io/replica-ips": { Volume: VolumeInfo{ Volume: v1alpha1.CASVolume{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{ "openebs.io/replica-ips": "10.60.0.11, 10.60.1.16, 10.60.2.10", }, }, }, }, expectedOutput: "10.60.0.11, 10.60.1.16, 10.60.2.10", }, "Fetching ReplicaIP from vsm.openebs.io/replica-ips": { Volume: VolumeInfo{ Volume: v1alpha1.CASVolume{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{ "vsm.openebs.io/replica-ips": "10.60.0.11, 10.60.1.16, 10.60.2.10", }, }, }, }, expectedOutput: "10.60.0.11, 10.60.1.16, 10.60.2.10", }, "Fetching ReplicaIP when both keys are present": { Volume: VolumeInfo{ Volume: v1alpha1.CASVolume{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{ "vsm.openebs.io/replica-ips": "10.60.0.11, 10.60.1.16, 10.60.2.10", "openebs.io/replica-ips": "10.60.0.11, 10.60.1.16, 10.60.2.10", }, }, }, }, expectedOutput: "10.60.0.11, 10.60.1.16, 10.60.2.10", }, "Fetching ReplicaIP when no key is present": { Volume: VolumeInfo{ Volume: v1alpha1.CASVolume{ ObjectMeta: metav1.ObjectMeta{ Annotations: map[string]string{}, }, }, }, expectedOutput: "", }, } for name, tt := range tests { t.Run(name, func(t *testing.T) { got := tt.Volume.GetReplicaIP() if got != tt.expectedOutput { t.Fatalf("Test: %v Expected: %v but got: %v", name, tt.expectedOutput, got) } }) } }
explode_data.jsonl/78054
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 876 }
[ 2830, 3393, 1949, 18327, 15317, 3298, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 2415, 14032, 60, 1235, 341, 197, 42400, 5097, 914, 198, 197, 17446, 4661, 260, 20265, 1731, 198, 197, 59403, 197, 197, 1, 52416, 94036, 3298, 504, 1787, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSelect(t *testing.T) { user := User{Name: "SelectUser1"} DB.Save(&user) var result User DB.Where("name = ?", user.Name).Select("name").Find(&result) if result.ID != 0 { t.Errorf("Should not have ID because only selected name, %+v", result.ID) } if user.Name != result.Name { t.Errorf("Should have user Name when selected it") } dryDB := DB.Session(&gorm.Session{DryRun: true}) r := dryDB.Select("name", "age").Find(&User{}) if !regexp.MustCompile("SELECT .*name.*,.*age.* FROM .*users.*").MatchString(r.Statement.SQL.String()) { t.Fatalf("Build Select with strings, but got %v", r.Statement.SQL.String()) } r = dryDB.Select([]string{"name", "age"}).Find(&User{}) if !regexp.MustCompile("SELECT .*name.*,.*age.* FROM .*users.*").MatchString(r.Statement.SQL.String()) { t.Fatalf("Build Select with slice, but got %v", r.Statement.SQL.String()) } r = dryDB.Table("users").Select("COALESCE(age,?)", 42).Find(&User{}) if !regexp.MustCompile(`SELECT COALESCE\(age,.*\) FROM .*users.*`).MatchString(r.Statement.SQL.String()) { t.Fatalf("Build Select with func, but got %v", r.Statement.SQL.String()) } // SELECT COALESCE(age,'42') FROM users; r = dryDB.Select("u.*").Table("users as u").First(&User{}, user.ID) if !regexp.MustCompile(`SELECT u\.\* FROM .*users.*`).MatchString(r.Statement.SQL.String()) { t.Fatalf("Build Select with u.*, but got %v", r.Statement.SQL.String()) } r = dryDB.Select("count(*)").Select("u.*").Table("users as u").First(&User{}, user.ID) if !regexp.MustCompile(`SELECT u\.\* FROM .*users.*`).MatchString(r.Statement.SQL.String()) { t.Fatalf("Build Select with u.*, but got %v", r.Statement.SQL.String()) } }
explode_data.jsonl/48705
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 637 }
[ 2830, 3393, 3379, 1155, 353, 8840, 836, 8, 341, 19060, 1669, 2657, 63121, 25, 330, 3379, 1474, 16, 16707, 45409, 13599, 2099, 872, 692, 2405, 1102, 2657, 198, 45409, 17067, 445, 606, 284, 42313, 1196, 2967, 568, 3379, 445, 606, 1827, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestBoolEqual(t *testing.T) { validate := New() type Test struct { Value bool `validate:"eq=true"` } var test Test err := validate.Struct(test) NotEqual(t, err, nil) AssertError(t, err.(ValidationErrors), "Test.Value", "Test.Value", "Value", "Value", "eq") test.Value = true err = validate.Struct(test) Equal(t, err, nil) }
explode_data.jsonl/77340
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 135 }
[ 2830, 3393, 11233, 2993, 1155, 353, 8840, 836, 8, 1476, 197, 7067, 1669, 1532, 2822, 13158, 3393, 2036, 341, 197, 47399, 1807, 1565, 7067, 2974, 11006, 11265, 8805, 197, 630, 2405, 1273, 3393, 271, 9859, 1669, 9593, 51445, 8623, 340, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWrongTypeArgInConfigFile(t *testing.T) { f := NewFixture(t, model.UserConfigState{}) defer f.TearDown() f.File("Tiltfile", ` config.define_string_list('foo') cfg = config.parse() print("foo:",cfg.get('foo', [])) `) f.File(UserConfigFileName, `{"foo": "1"}`) _, err := f.ExecFile("Tiltfile") require.Error(t, err) require.Contains(t, err.Error(), "specified invalid value for setting foo: expected array") }
explode_data.jsonl/65239
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 161 }
[ 2830, 3393, 29185, 929, 2735, 641, 2648, 1703, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 1532, 18930, 1155, 11, 1614, 7344, 2648, 1397, 37790, 16867, 282, 836, 682, 4454, 2822, 1166, 8576, 445, 51, 2963, 1192, 497, 22074, 1676, 16756, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNoUnreadByteAfterPeek(t *testing.T) { br := NewReader(strings.NewReader("example")) br.ReadByte() br.Peek(1) if err := br.UnreadByte(); err == nil { t.Error("UnreadByte didn't fail after Peek") } }
explode_data.jsonl/22852
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 84 }
[ 2830, 3393, 2753, 1806, 878, 7153, 6025, 10197, 1225, 1155, 353, 8840, 836, 8, 341, 80255, 1669, 1532, 5062, 51442, 68587, 445, 8687, 5455, 80255, 89626, 741, 80255, 94279, 7, 16, 340, 743, 1848, 1669, 1411, 10616, 878, 7153, 2129, 1848...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestNewRollupTargetV1ProtoWithCustomAggregationID(t *testing.T) { proto := &rulepb.RollupTarget{ Name: "testV1Proto", Tags: []string{"testTag2", "testTag1"}, Policies: []*policypb.Policy{ { StoragePolicy: &policypb.StoragePolicy{ Resolution: policypb.Resolution{ WindowSize: 10 * time.Second.Nanoseconds(), Precision: time.Second.Nanoseconds(), }, Retention: policypb.Retention{ Period: 24 * time.Hour.Nanoseconds(), }, }, AggregationTypes: []aggregationpb.AggregationType{1, 2}, }, { StoragePolicy: &policypb.StoragePolicy{ Resolution: policypb.Resolution{ WindowSize: time.Minute.Nanoseconds(), Precision: time.Minute.Nanoseconds(), }, Retention: policypb.Retention{ Period: 720 * time.Hour.Nanoseconds(), }, }, AggregationTypes: []aggregationpb.AggregationType{1, 2}, }, { StoragePolicy: &policypb.StoragePolicy{ Resolution: policypb.Resolution{ WindowSize: time.Hour.Nanoseconds(), Precision: time.Hour.Nanoseconds(), }, Retention: policypb.Retention{ Period: 365 * 24 * time.Hour.Nanoseconds(), }, }, AggregationTypes: []aggregationpb.AggregationType{1, 2}, }, }, } res, err := newRollupTargetFromV1Proto(proto) require.NoError(t, err) rr1, err := pipeline.NewRollupOp( pipeline.GroupByRollupType, "testV1Proto", []string{"testTag1", "testTag2"}, aggregation.MustCompressTypes(aggregation.Last, aggregation.Min), ) require.NoError(t, err) expected := rollupTarget{ Pipeline: pipeline.NewPipeline([]pipeline.OpUnion{ { Type: pipeline.RollupOpType, Rollup: rr1, }, }), StoragePolicies: policy.StoragePolicies{ policy.NewStoragePolicy(10*time.Second, xtime.Second, 24*time.Hour), policy.NewStoragePolicy(time.Minute, xtime.Minute, 720*time.Hour), policy.NewStoragePolicy(time.Hour, xtime.Hour, 365*24*time.Hour), }, } require.Equal(t, expected, res) }
explode_data.jsonl/8363
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 890 }
[ 2830, 3393, 3564, 32355, 454, 6397, 53, 16, 31549, 2354, 10268, 9042, 34442, 915, 1155, 353, 8840, 836, 8, 341, 197, 15110, 1669, 609, 12937, 16650, 71212, 454, 6397, 515, 197, 21297, 25, 330, 1944, 53, 16, 31549, 756, 197, 10261, 203...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDefaultProcessor_SendDocument(t *testing.T) { srv := &testingcommons.MockIdentityService{} srv.On("ValidateSignature", mock.Anything, mock.Anything).Return(nil).Once() dp := DefaultProcessor(srv, nil, nil, cfg).(defaultProcessor) ctxh := testingconfig.CreateAccountContext(t, cfg) self, err := contextutil.Account(ctxh) assert.NoError(t, err) sr := utils.RandomSlice(32) sig, err := self.SignMsg(sr) assert.NoError(t, err) // validations failed id := utils.RandomSlice(32) aid, err := anchors.ToAnchorID(id) assert.NoError(t, err) next := utils.RandomSlice(32) model := new(mockModel) model.On("ID").Return(id) model.On("CurrentVersion").Return(id) model.On("NextVersion").Return(next) model.On("CalculateSigningRoot").Return(sr, nil) model.On("Signatures").Return() model.On("CalculateDocumentRoot").Return(utils.RandomSlice(32), nil) model.sigs = append(model.sigs, sig) srv = &testingcommons.MockIdentityService{} srv.On("ValidateSignature", sig, sr).Return(nil).Once() dp.identityService = srv repo := mockRepo{} repo.On("GetDocumentRootOf", aid).Return(nil, errors.New("error")) dp.anchorRepository = repo err = dp.SendDocument(ctxh, model) model.AssertExpectations(t) srv.AssertExpectations(t) repo.AssertExpectations(t) assert.Error(t, err) assert.Contains(t, err.Error(), "post anchor validations failed") // get collaborators failed dr, err := anchors.ToDocumentRoot(utils.RandomSlice(32)) assert.NoError(t, err) model = new(mockModel) model.On("ID").Return(id) model.On("CurrentVersion").Return(id) model.On("NextVersion").Return(next) model.On("CalculateSigningRoot").Return(sr, nil) model.On("Signatures").Return() model.On("CalculateDocumentRoot").Return(dr[:], nil) model.On("GetSignerCollaborators", mock.Anything).Return(nil, errors.New("error")).Once() model.sigs = append(model.sigs, sig) srv = &testingcommons.MockIdentityService{} srv.On("ValidateSignature", sig, sr).Return(nil).Once() dp.identityService = srv repo = mockRepo{} repo.On("GetDocumentRootOf", aid).Return(dr, nil).Once() dp.anchorRepository = repo err = dp.SendDocument(ctxh, model) model.AssertExpectations(t) srv.AssertExpectations(t) repo.AssertExpectations(t) assert.Error(t, err) // pack core document failed model = new(mockModel) model.On("ID").Return(id) model.On("CurrentVersion").Return(id) model.On("NextVersion").Return(next) model.On("CalculateSigningRoot").Return(sr, nil) model.On("Signatures").Return() model.On("CalculateDocumentRoot").Return(dr[:], nil) model.On("GetSignerCollaborators", mock.Anything).Return([]identity.DID{testingidentity.GenerateRandomDID()}, nil).Once() model.On("PackCoreDocument").Return(nil, errors.New("error")).Once() model.sigs = append(model.sigs, sig) srv = &testingcommons.MockIdentityService{} srv.On("ValidateSignature", sig, sr).Return(nil).Once() dp.identityService = srv repo = mockRepo{} repo.On("GetDocumentRootOf", aid).Return(dr, nil).Once() dp.anchorRepository = repo err = dp.SendDocument(ctxh, model) model.AssertExpectations(t) srv.AssertExpectations(t) repo.AssertExpectations(t) assert.Error(t, err) // send failed cd := coredocumentpb.CoreDocument{} did := testingidentity.GenerateRandomDID() model = new(mockModel) model.On("ID").Return(id) model.On("CurrentVersion").Return(id) model.On("NextVersion").Return(next) model.On("CalculateSigningRoot").Return(sr, nil) model.On("Signatures").Return() model.On("CalculateDocumentRoot").Return(dr[:], nil) model.On("GetSignerCollaborators", mock.Anything).Return([]identity.DID{did}, nil).Once() model.On("PackCoreDocument").Return(cd, nil).Once() model.sigs = append(model.sigs, sig) srv = &testingcommons.MockIdentityService{} srv.On("ValidateSignature", sig, sr).Return(nil).Once() dp.identityService = srv repo = mockRepo{} repo.On("GetDocumentRootOf", aid).Return(dr, nil).Once() client := new(p2pClient) client.On("SendAnchoredDocument", mock.Anything, did, mock.Anything).Return(nil, errors.New("error")).Once() dp.anchorRepository = repo dp.p2pClient = client err = dp.SendDocument(ctxh, model) model.AssertExpectations(t) srv.AssertExpectations(t) repo.AssertExpectations(t) client.AssertExpectations(t) assert.Error(t, err) // successful model = new(mockModel) model.On("ID").Return(id) model.On("CurrentVersion").Return(id) model.On("NextVersion").Return(next) model.On("CalculateSigningRoot").Return(sr, nil) model.On("Signatures").Return() model.On("CalculateDocumentRoot").Return(dr[:], nil) model.On("GetSignerCollaborators", mock.Anything).Return([]identity.DID{did}, nil).Once() model.On("PackCoreDocument").Return(cd, nil).Once() model.sigs = append(model.sigs, sig) srv = &testingcommons.MockIdentityService{} srv.On("ValidateSignature", sig, sr).Return(nil).Once() dp.identityService = srv repo = mockRepo{} repo.On("GetDocumentRootOf", aid).Return(dr, nil).Once() client = new(p2pClient) client.On("SendAnchoredDocument", mock.Anything, did, mock.Anything).Return(&p2ppb.AnchorDocumentResponse{Accepted: true}, nil).Once() dp.anchorRepository = repo dp.p2pClient = client err = dp.SendDocument(ctxh, model) model.AssertExpectations(t) srv.AssertExpectations(t) repo.AssertExpectations(t) client.AssertExpectations(t) assert.NoError(t, err) }
explode_data.jsonl/57870
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2009 }
[ 2830, 3393, 3675, 22946, 46267, 7524, 1155, 353, 8840, 836, 8, 341, 1903, 10553, 1669, 609, 8840, 52361, 24664, 18558, 1860, 16094, 1903, 10553, 8071, 445, 17926, 25088, 497, 7860, 13311, 1596, 11, 7860, 13311, 1596, 568, 5598, 27907, 568...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHotpCode(t *testing.T) { var cotp OTPConfig // reuse our test values from above // perhaps create more? cotp.Secret = "2SH3V3GDW7ZNMGYE" cotp.HotpCounter = 1 cotp.WindowSize = 3 var counterCodes = []struct { code int result bool counter int }{ { /* 1 */ 293240, true, 2}, // increments on success { /* 1 */ 293240, false, 3}, // and failure { /* 5 */ 932068, true, 6}, // inside of window { /* 10 */ 481725, false, 7}, // outside of window { /* 10 */ 481725, false, 8}, // outside of window { /* 10 */ 481725, true, 11}, // now inside of window } for i, s := range counterCodes { r := cotp.checkHotpCode(s.code) if r != s.result { t.Errorf("counterCode(%d) (step %d) failed: got %t expected %t", s.code, i, r, s.result) } if cotp.HotpCounter != s.counter { t.Errorf("hotpCounter incremented poorly: got %d expected %d", cotp.HotpCounter, s.counter) } } }
explode_data.jsonl/75089
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 375 }
[ 2830, 3393, 20170, 79, 2078, 1155, 353, 8840, 836, 8, 1476, 2405, 47581, 79, 80868, 2648, 271, 197, 322, 25978, 1039, 1273, 2750, 504, 3403, 198, 197, 322, 8365, 1855, 803, 5267, 1444, 48708, 74779, 284, 330, 17, 8590, 18, 53, 18, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetChaincodeData(t *testing.T) { cryptoProvider, err := sw.NewDefaultSecurityLevelWithKeystore(sw.NewDummyKeyStore()) require.NoError(t, err) scc := &SCC{ BuiltinSCCs: map[string]struct{}{"lscc": {}}, Support: &SupportImpl{GetMSPIDs: getMSPIDs}, ACLProvider: mockAclProvider, GetMSPIDs: getMSPIDs, BCCSP: cryptoProvider, BuildRegistry: &container.BuildRegistry{}, ChaincodeBuilder: &mock.ChaincodeBuilder{}, } require.NotNil(t, scc) stub := shimtest.NewMockStub("lscc", scc) res := stub.MockInit("1", nil) require.Equal(t, int32(shim.OK), res.Status, res.Message) _, err = scc.getChaincodeData("barf", []byte("barf")) require.Error(t, err) _, err = scc.getChaincodeData("barf", protoutil.MarshalOrPanic(&ccprovider.ChaincodeData{Name: "barf s'more"})) require.Error(t, err) require.True(t, len(err.Error()) > 0) }
explode_data.jsonl/11797
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 390 }
[ 2830, 3393, 1949, 18837, 1851, 1043, 1155, 353, 8840, 836, 8, 341, 1444, 9444, 5179, 11, 1848, 1669, 2021, 7121, 3675, 15352, 4449, 2354, 6608, 63373, 58902, 7121, 43344, 1592, 6093, 2398, 17957, 35699, 1155, 11, 1848, 340, 1903, 638, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreateAppOK(t *testing.T) { testServer(func(s *core.Server) { headers := make(map[string]string) headers["X-Access-Token"] = accessToken url := "/api/v1/developers/" + devID + "/apps/" //make request res, err := testHTTPRequestWithHeaders("POST", url, `{"name":"traverik","os":"android"}`, headers) if err != nil { t.Fatalf("unable to create app: %v , %v", url, err) } else { body, _ := ioutil.ReadAll(res.Body) if res.StatusCode != 200 { t.Fatalf("unable to create app: %v , %v", url, string(body)) } //fmt.Printf("query response: %v ", string(body)) response := core.AppResponse{} err := json.Unmarshal(body, &response) if err != nil { t.Fatalf("fail to parse body: %v", string(body)) } apiToken = response.APIToken apiSecret = response.APISecret fmt.Printf("apiToken: %v\n", apiToken) fmt.Printf("apiSecret: %v\n", apiSecret) } }) }
explode_data.jsonl/42212
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 381 }
[ 2830, 3393, 4021, 2164, 3925, 1155, 353, 8840, 836, 8, 1476, 18185, 5475, 18552, 1141, 353, 2153, 22997, 8, 1476, 197, 67378, 1669, 1281, 9147, 14032, 30953, 340, 197, 67378, 1183, 55, 12, 6054, 89022, 1341, 284, 37725, 271, 197, 19320,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAcls_ListAclsSuccess(t *testing.T) { mock := MockHttpClient{} mk := MockKafkaClient{} mock.DoRequestFn = func(method string, uri string, reqBody io.Reader) (responseBody []byte, statusCode int, status string, err error) { assert.Equal(t, http.MethodGet, method, "Expected method 'GET', got %s", method) assert.Equal(t, "/clusters/cluster-1/acls", uri) return []byte(` { "kind": "KafkaAclList", "metadata": { "self": "http://localhost:9391/v3/clusters/cluster-1/acls?principal=alice" }, "data": [ { "kind": "KafkaAcl", "metadata": { "self": "http://localhost:9391/v3/clusters/cluster-1/acls?resource_type=TOPIC&resource_name=topic-&pattern_type=PREFIXED&principal=alice&host=*&operation=ALL&permission=ALLOW" }, "cluster_id": "cluster-1", "resource_type": "TOPIC", "resource_name": "topic-", "pattern_type": "PREFIXED", "principal": "alice", "host": "*", "operation": "ALL", "permission": "ALLOW" }, { "kind": "KafkaAcl", "metadata": { "self": "http://localhost:9391/v3/clusters/cluster-1/acls?resource_type=CLUSTER&resource_name=cluster-1&pattern_type=LITERAL&principal=bob&host=*&operation=DESCRIBE&permission=DENY" }, "cluster_id": "cluster-1", "resource_type": "CLUSTER", "resource_name": "cluster-2", "pattern_type": "LITERAL", "principal": "alice", "host": "*", "operation": "DESCRIBE", "permission": "DENY" } ] } `), 200, "200 OK", nil } clusterAdmin, _ := mk.NewSaramaClusterAdmin() c := NewClient(&mock, &mk, clusterAdmin) acls, err := c.ListAcls("cluster-1") assert.NoError(t, err) assert.Equal(t, 2, len(acls)) assert.Equal(t, "alice", acls[0].Principal) }
explode_data.jsonl/10189
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 802 }
[ 2830, 3393, 32, 18074, 27104, 32, 18074, 7188, 1155, 353, 8840, 836, 8, 341, 77333, 1669, 14563, 26316, 16094, 2109, 74, 1669, 14563, 42, 21883, 2959, 16094, 77333, 33596, 1900, 24911, 284, 2915, 17262, 914, 11, 13071, 914, 11, 4232, 54...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSolveOperators_ResolveOnlyInCachedNamespaces(t *testing.T) { APISet := APISet{opregistry.APIKey{"g", "v", "k", "ks"}: struct{}{}} Provides := APISet namespace := "olm" catalog := registry.CatalogKey{"community", namespace} otherCatalog := registry.CatalogKey{Name: "secret", Namespace: "secret"} csv := existingOperator(namespace, "packageA.v1", "packageA", "alpha", "", Provides, nil, nil, nil) csvs := []*v1alpha1.ClusterServiceVersion{csv} newSub := newSub(namespace, "packageA", "alpha", catalog) subs := []*v1alpha1.Subscription{newSub} fakeNamespacedOperatorCache := NamespacedOperatorCache{ snapshots: map[registry.CatalogKey]*CatalogSnapshot{ catalog: { operators: []*Operator{ genOperator("packageA.v0.0.1", "0.0.1", "packageA.v1", "packageA", "alpha", otherCatalog.Name, otherCatalog.Namespace, nil, Provides, nil, ""), }, }, }, namespaces: []string{otherCatalog.Namespace}, } satResolver := SatResolver{ cache: getFakeOperatorCache(fakeNamespacedOperatorCache), } operators, err := satResolver.SolveOperators([]string{namespace}, csvs, subs) assert.Error(t, err) assert.Equal(t, err.Error(), "expected exactly one operator, got 0", "did not expect to receive a resolution") assert.Len(t, operators, 0) }
explode_data.jsonl/43807
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 461 }
[ 2830, 3393, 50, 3948, 77760, 62, 56808, 7308, 641, 70293, 7980, 27338, 1155, 353, 8840, 836, 8, 341, 197, 2537, 1637, 295, 1669, 10106, 1637, 295, 90, 453, 29172, 24922, 1592, 4913, 70, 497, 330, 85, 497, 330, 74, 497, 330, 2787, 92...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValueBinder_String(t *testing.T) { var testCases = []struct { name string givenFailFast bool givenBindErrors []error whenURL string whenMust bool expectValue string expectError string }{ { name: "ok, binds value", whenURL: "/search?param=en&param=de", expectValue: "en", }, { name: "ok, params values empty, value is not changed", whenURL: "/search?nr=en", expectValue: "default", }, { name: "nok, previous errors fail fast without binding value", givenFailFast: true, whenURL: "/search?nr=en&id=1&id=100", expectValue: "default", expectError: "previous error", }, { name: "ok (must), binds value", whenMust: true, whenURL: "/search?param=en&param=de", expectValue: "en", }, { name: "ok (must), params values empty, returns error, value is not changed", whenMust: true, whenURL: "/search?nr=en", expectValue: "default", expectError: "code=400, message=required field value is empty, field=param", }, { name: "nok (must), previous errors fail fast without binding value", givenFailFast: true, whenMust: true, whenURL: "/search?nr=en&id=1&id=100", expectValue: "default", expectError: "previous error", }, } for _, tc := range testCases { t.Run(tc.name, func(t *testing.T) { c := createTestContext(tc.whenURL, nil, nil) b := QueryParamsBinder(c).FailFast(tc.givenFailFast) if tc.givenFailFast { b.errors = []error{errors.New("previous error")} } dest := "default" var err error if tc.whenMust { err = b.MustString("param", &dest).BindError() } else { err = b.String("param", &dest).BindError() } assert.Equal(t, tc.expectValue, dest) if tc.expectError != "" { assert.EqualError(t, err, tc.expectError) } else { assert.NoError(t, err) } }) } }
explode_data.jsonl/82538
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 908 }
[ 2830, 3393, 1130, 44055, 31777, 1155, 353, 8840, 836, 8, 341, 2405, 1273, 37302, 284, 3056, 1235, 341, 197, 11609, 310, 914, 198, 197, 3174, 2071, 19524, 32174, 256, 1807, 198, 197, 3174, 2071, 9950, 13877, 3056, 841, 198, 197, 60180, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestURLVisitorBuilder(t *testing.T) { vs, _ := NewVisitorBuilder(). Stdin(). URL(0, &url.URL{}). HTTPAttemptCount(1). CommandParam(&CommandOptions{Kind: resource.KindCanary, Name: "name"}). Command(). Do() if vs != nil { for _, v := range vs { v.Visit(func(mo meta.MeshObject, e error) error { return nil }) } } }
explode_data.jsonl/12977
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 142 }
[ 2830, 3393, 3144, 16796, 3297, 1155, 353, 8840, 836, 8, 341, 5195, 82, 11, 716, 1669, 1532, 16796, 3297, 25829, 197, 197, 22748, 258, 25829, 197, 79055, 7, 15, 11, 609, 1085, 20893, 6257, 4292, 197, 197, 9230, 47052, 2507, 7, 16, 42...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidateReportingWithGerritLabel(t *testing.T) { cases := []struct { name string labels map[string]string reporter Reporter expected error }{ { name: "no errors if job is set to report", reporter: Reporter{ Context: "context", }, labels: map[string]string{ gerrit.GerritReportLabel: "label", }, }, { name: "no errors if Gerrit report label is not defined", reporter: Reporter{SkipReport: true}, labels: map[string]string{ "label": "value", }, }, { name: "no errors if job is set to skip report and Gerrit report label is empty", reporter: Reporter{SkipReport: true}, labels: map[string]string{ gerrit.GerritReportLabel: "", }, }, { name: "error if job is set to skip report and Gerrit report label is set to non-empty", reporter: Reporter{SkipReport: true}, labels: map[string]string{ gerrit.GerritReportLabel: "label", }, expected: fmt.Errorf("Gerrit report label %s set to non-empty string but job is configured to skip reporting.", gerrit.GerritReportLabel), }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { base := JobBase{ Name: "test-job", Labels: tc.labels, } presubmits := []Presubmit{ { JobBase: base, Reporter: tc.reporter, }, } var expected error if tc.expected != nil { expected = fmt.Errorf("invalid presubmit job %s: %v", "test-job", tc.expected) } if err := validatePresubmits(presubmits, "default-namespace"); !reflect.DeepEqual(err, utilerrors.NewAggregate([]error{expected})) { t.Errorf("did not get expected validation result:\n%v", cmp.Diff(expected, err)) } postsubmits := []Postsubmit{ { JobBase: base, Reporter: tc.reporter, }, } if tc.expected != nil { expected = fmt.Errorf("invalid postsubmit job %s: %v", "test-job", tc.expected) } if err := validatePostsubmits(postsubmits, "default-namespace"); !reflect.DeepEqual(err, utilerrors.NewAggregate([]error{expected})) { t.Errorf("did not get expected validation result:\n%v", cmp.Diff(expected, err)) } }) } }
explode_data.jsonl/8078
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 884 }
[ 2830, 3393, 17926, 70131, 2354, 38, 615, 275, 2476, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 257, 914, 198, 197, 95143, 256, 2415, 14032, 30953, 198, 197, 69931, 261, 46981, 198, 197, 42400, 1465, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestPowerbuilderLexer(t *testing.T) { for _, file := range examples { input, err := newCharStream(file) if err != nil { t.Errorf("Failed to open example file: %s", err) } // Create the Lexer lexer := powerbuilder.NewpowerbuilderLexer(input) // Try and read all tokens i := 0 for ; i < MAX_TOKENS; i++ { tok := lexer.NextToken() if tok.GetTokenType() == antlr.TokenEOF { break } } // If we read too many tokens, then perhaps there is a problem with the lexer. if i >= MAX_TOKENS { t.Errorf("NewpowerbuilderLexer(%q) read %d tokens without finding EOF", file, i) } } }
explode_data.jsonl/54878
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 248 }
[ 2830, 3393, 14986, 17850, 92847, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1034, 1669, 2088, 10295, 341, 197, 22427, 11, 1848, 1669, 501, 4768, 3027, 4866, 340, 197, 743, 1848, 961, 2092, 341, 298, 3244, 13080, 445, 9408, 311, 1787, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestWalk(t *testing.T) { dir := fs.NewDir(t, "mytestfiles", fs.WithFile("foo.js", "foo", fs.WithMode(0755)), fs.WithDir("mysubdir", fs.WithFile("bar.js", "bar", fs.WithMode(0755))), ) defer dir.Remove() type args struct { paths []string pattern string } tests := []struct { name string args args want []string wantErr bool }{ { name: "simple", args: args{ paths: []string{dir.Path()}, pattern: ".*.js", }, want: []string{dir.Join("foo.js"), dir.Join("mysubdir", "bar.js")}, wantErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := Walk(tt.args.paths, tt.args.pattern) if (err != nil) != tt.wantErr { t.Errorf("Walk() error = %v, wantErr %v", err, tt.wantErr) return } if !reflect.DeepEqual(got, tt.want) { t.Errorf("Walk() got = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/72033
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 444 }
[ 2830, 3393, 48849, 1155, 353, 8840, 836, 8, 341, 48532, 1669, 8619, 7121, 6184, 1155, 11, 330, 2408, 1944, 7198, 756, 197, 53584, 26124, 1703, 445, 7975, 2857, 497, 330, 7975, 497, 8619, 26124, 3636, 7, 15, 22, 20, 20, 6965, 197, 53...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSendLinkHeaderOtherHost(t *testing.T) { assert := assert.New(t) reqs := make(chan req, 1) endpoint := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { reqs <- req{r.FormValue("source"), r.FormValue("target")} })) defer endpoint.Close() target := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Link", "<"+endpoint.URL+"/webmention>; rel=webmention") })) defer target.Close() err := Send("http://example.com/my-post", target.URL) assert.Nil(err) select { case r := <-reqs: assert.Equal("http://example.com/my-post", r.source) assert.Equal(target.URL, r.target) case <-time.After(10 * time.Millisecond): assert.Fail("timed out") } }
explode_data.jsonl/3188
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 293 }
[ 2830, 3393, 11505, 3939, 4047, 11409, 9296, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 340, 24395, 82, 1669, 1281, 35190, 4232, 11, 220, 16, 692, 6246, 2768, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 3622, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLog_pipelinerun_still_running_v1beta1(t *testing.T) { var ( pipelineName = "inprogress-pipeline" prName = "inprogress-run" ns = "namespace" taskName = "inprogress-task" ) nsList := []*corev1.Namespace{ { ObjectMeta: metav1.ObjectMeta{ Name: ns, }, }, } initialPRs := []*v1beta1.PipelineRun{ { ObjectMeta: metav1.ObjectMeta{ Name: prName, Namespace: ns, Labels: map[string]string{"tekton.dev/pipeline": prName}, }, Spec: v1beta1.PipelineRunSpec{ PipelineRef: &v1beta1.PipelineRef{ Name: pipelineName, }, }, Status: v1beta1.PipelineRunStatus{ Status: duckv1beta1.Status{ Conditions: duckv1beta1.Conditions{ { Status: corev1.ConditionUnknown, Type: apis.ConditionSucceeded, Message: "Running", }, }, }, }, }, } finalPRs := []*v1beta1.PipelineRun{ { ObjectMeta: metav1.ObjectMeta{ Name: prName, Namespace: ns, }, Spec: v1beta1.PipelineRunSpec{ PipelineRef: &v1beta1.PipelineRef{ Name: pipelineName, }, }, Status: v1beta1.PipelineRunStatus{ Status: duckv1beta1.Status{ Conditions: duckv1beta1.Conditions{ { Status: corev1.ConditionUnknown, Type: apis.ConditionSucceeded, Message: "Running", }, }, }, }, }, { ObjectMeta: metav1.ObjectMeta{ Name: prName, Namespace: ns, }, Spec: v1beta1.PipelineRunSpec{ PipelineRef: &v1beta1.PipelineRef{ Name: pipelineName, }, }, Status: v1beta1.PipelineRunStatus{ Status: duckv1beta1.Status{ Conditions: duckv1beta1.Conditions{ { Status: corev1.ConditionTrue, Type: apis.ConditionSucceeded, Message: "Running", }, }, }, }, }, } ps := []*v1beta1.Pipeline{ { ObjectMeta: metav1.ObjectMeta{ Name: pipelineName, Namespace: ns, }, Spec: v1beta1.PipelineSpec{ Tasks: []v1beta1.PipelineTask{ { Name: taskName, TaskRef: &v1beta1.TaskRef{ Name: taskName, }, }, }, }, }, } cs, _ := test.SeedV1beta1TestData(t, pipelinev1beta1test.Data{PipelineRuns: initialPRs, Pipelines: ps, Namespaces: nsList}) watcher := watch.NewFake() cs.Pipeline.Resources = cb.APIResourceList(versionB1, []string{"taskrun", "pipeline", "pipelinerun"}) tdc := testDynamic.Options{WatchResource: "pipelineruns", Watcher: watcher} dc, err := tdc.Client( cb.UnstructuredV1beta1P(ps[0], versionB1), cb.UnstructuredV1beta1PR(initialPRs[0], versionB1), ) if err != nil { t.Errorf("unable to create dynamic client: %v", err) } prlo := logOptsv1beta1(prName, ns, cs, dc, fake.Streamer([]fake.Log{}), false, false) updatePRv1beta1(finalPRs, watcher) output, err := fetchLogs(prlo) if err != nil { t.Errorf("Unexpected error: %v", err) } test.AssertOutput(t, "Pipeline still running ..."+"\n", output) }
explode_data.jsonl/14873
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1487 }
[ 2830, 3393, 2201, 620, 81079, 10453, 359, 1261, 483, 37333, 2273, 16, 19127, 16, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 3223, 8790, 675, 284, 330, 258, 14366, 2268, 8790, 698, 197, 25653, 675, 981, 284, 330, 258, 14366, 22973,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestClientCredentials_PopulateTokenEndpointResponse(t *testing.T) { ctrl := gomock.NewController(t) store := internal.NewMockClientCredentialsGrantStorage(ctrl) chgen := internal.NewMockAccessTokenStrategy(ctrl) areq := fosite.NewAccessRequest(new(fosite.DefaultSession)) aresp := fosite.NewAccessResponse() defer ctrl.Finish() h := ClientCredentialsGrantHandler{ HandleHelper: &HandleHelper{ AccessTokenStorage: store, AccessTokenStrategy: chgen, AccessTokenLifespan: time.Hour, }, ScopeStrategy: fosite.HierarchicScopeStrategy, } for k, c := range []struct { description string mock func() req *http.Request expectErr error }{ { description: "should fail because not responsible", expectErr: fosite.ErrUnknownRequest, mock: func() { areq.GrantTypes = fosite.Arguments{""} }, }, { description: "should fail because client not allowed", expectErr: fosite.ErrInvalidGrant, mock: func() { areq.GrantTypes = fosite.Arguments{"client_credentials"} areq.Client = &fosite.DefaultClient{GrantTypes: fosite.Arguments{"foo"}} }, }, { description: "should pass", mock: func() { areq.GrantTypes = fosite.Arguments{"client_credentials"} areq.Session = &fosite.DefaultSession{} areq.Client = &fosite.DefaultClient{GrantTypes: fosite.Arguments{"client_credentials"}} chgen.EXPECT().GenerateAccessToken(nil, areq).Return("tokenfoo.bar", "bar", nil) store.EXPECT().CreateAccessTokenSession(nil, "bar", gomock.Eq(areq.Sanitize([]string{}))).Return(nil) }, }, } { t.Run(fmt.Sprintf("case=%d", k), func(t *testing.T) { c.mock() err := h.PopulateTokenEndpointResponse(nil, areq, aresp) if c.expectErr != nil { require.EqualError(t, err, c.expectErr.Error()) } else { require.NoError(t, err) } }) } }
explode_data.jsonl/41176
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 756 }
[ 2830, 3393, 2959, 27025, 1088, 453, 6334, 3323, 27380, 2582, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 57279, 1669, 5306, 7121, 11571, 2959, 27025, 67971, 5793, 62100, 340, 23049, 4370, 1669, 5306, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClientServerNoAddrPrefix(t *testing.T) { addr := "localhost:26658" transport := "socket" app := kvstore.NewKVStoreApplication() server, err := abciserver.NewServer(addr, transport, app) assert.NoError(t, err, "expected no error on NewServer") err = server.Start() assert.NoError(t, err, "expected no error on server.Start") client, err := abciclient.NewClient(addr, transport, true) assert.NoError(t, err, "expected no error on NewClient") err = client.Start() assert.NoError(t, err, "expected no error on client.Start") }
explode_data.jsonl/15284
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 186 }
[ 2830, 3393, 2959, 5475, 2753, 13986, 14335, 1155, 353, 8840, 836, 8, 341, 53183, 1669, 330, 8301, 25, 17, 21, 21, 20, 23, 698, 197, 26445, 1669, 330, 9556, 698, 28236, 1669, 16178, 4314, 7121, 82707, 6093, 4988, 2822, 41057, 11, 1848,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAccAWSS3BucketObject_contentBase64(t *testing.T) { var obj s3.GetObjectOutput resourceName := "aws_s3_bucket_object.object" rInt := acctest.RandInt() resource.ParallelTest(t, resource.TestCase{ PreCheck: func() { testAccPreCheck(t) }, Providers: testAccProviders, CheckDestroy: testAccCheckAWSS3BucketObjectDestroy, Steps: []resource.TestStep{ { PreConfig: func() {}, Config: testAccAWSS3BucketObjectConfigContentBase64(rInt, base64.StdEncoding.EncodeToString([]byte("some_bucket_content"))), Check: resource.ComposeTestCheckFunc( testAccCheckAWSS3BucketObjectExists(resourceName, &obj), testAccCheckAWSS3BucketObjectBody(&obj, "some_bucket_content"), ), }, }, }) }
explode_data.jsonl/64957
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 294 }
[ 2830, 3393, 14603, 14419, 1220, 18, 36018, 1190, 7495, 3978, 21, 19, 1155, 353, 8840, 836, 8, 341, 2405, 2839, 274, 18, 25618, 5097, 198, 50346, 675, 1669, 330, 8635, 643, 18, 38749, 5314, 6035, 698, 7000, 1072, 1669, 1613, 67880, 201...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCancelJobHappyCase(t *testing.T) { httpmock.Activate() defer httpmock.DeactivateAndReset() ctx := context.Background() response := SavepointJobResponse{ TriggerID: "133", } responder, _ := httpmock.NewJsonResponder(203, response) httpmock.RegisterResponder("POST", fakeCancelURL, responder) client := getTestJobManagerClient() resp, err := client.CancelJobWithSavepoint(ctx, testURL, "1") assert.Equal(t, response.TriggerID, resp) assert.NoError(t, err) }
explode_data.jsonl/32370
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 170 }
[ 2830, 3393, 9269, 12245, 32847, 4207, 1155, 353, 8840, 836, 8, 341, 28080, 16712, 14140, 731, 741, 16867, 1758, 16712, 8934, 16856, 3036, 14828, 741, 20985, 1669, 2266, 19047, 741, 21735, 1669, 10255, 2768, 12245, 2582, 515, 197, 197, 179...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestKustomizationError_Error(t *testing.T) { filepath := "/path/to/" + constants.KustomizationFileName errorMsg := "Kustomization not found" me := KustomizationError{KustomizationPath: filepath, ErrorMsg: errorMsg} if !strings.Contains(me.Error(), filepath) { t.Errorf("Incorrect KustomizationError.Error() message \n") t.Errorf("Expected filepath %s, but unfound\n", filepath) } if !strings.Contains(me.Error(), errorMsg) { t.Errorf("Incorrect KustomizationError.Error() message \n") t.Errorf("Expected errorMsg %s, but unfound\n", errorMsg) } }
explode_data.jsonl/72344
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 202 }
[ 2830, 3393, 42, 1450, 2022, 1454, 28651, 1155, 353, 8840, 836, 8, 341, 17661, 2343, 1669, 3521, 2343, 32429, 11225, 488, 18021, 11352, 1450, 2022, 10903, 198, 18290, 6611, 1669, 330, 42, 1450, 2022, 537, 1730, 1837, 49294, 1669, 730, 14...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestConverterRecursiveSimple(t *testing.T) { fieldMap := make(map[string]codegen.FieldMapperEntry) lines, err := convertTypes( "Foo", "Bar", `struct Foo { 1: required string three 2: optional Foo recur } struct Bar { 1: required string three 2: optional Bar recur }`, nil, fieldMap, ) assert.NoError(t, err) assertPrettyEqual(t, trim(` out.Three = string(in.Three) var convertBarHelper1 func(in *structs.Foo) (out *structs.Bar) convertBarHelper1 = func(in *structs.Foo) (out *structs.Bar) { if in != nil { out = &structs.Bar{} out.Three = string(in.Three) out.Recur = convertBarHelper1(in.Recur) } else { out = nil } return } out.Recur = convertBarHelper1(in.Recur) `), lines) }
explode_data.jsonl/62092
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 331 }
[ 2830, 3393, 14920, 78542, 16374, 1155, 353, 8840, 836, 8, 341, 39250, 2227, 1669, 1281, 9147, 14032, 60, 95859, 17087, 10989, 5874, 692, 78390, 11, 1848, 1669, 5508, 4173, 1006, 197, 197, 1, 40923, 497, 330, 3428, 756, 197, 197, 63, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLoadDaemonConfigWithNetwork(t *testing.T) { content := `{"bip": "127.0.0.2", "ip": "127.0.0.1"}` tempFile := tempfile.NewTempFile(t, "config", content) defer tempFile.Remove() opts := defaultOptions(tempFile.Name()) loadedConfig, err := loadDaemonCliConfig(opts) assert.NilError(t, err) assert.NotNil(t, loadedConfig) assert.Equal(t, loadedConfig.IP, "127.0.0.2") assert.Equal(t, loadedConfig.DefaultIP.String(), "127.0.0.1") }
explode_data.jsonl/12065
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 181 }
[ 2830, 3393, 5879, 89177, 2648, 2354, 12320, 1155, 353, 8840, 836, 8, 341, 27751, 1669, 1565, 4913, 65, 573, 788, 330, 16, 17, 22, 13, 15, 13, 15, 13, 17, 497, 330, 573, 788, 330, 16, 17, 22, 13, 15, 13, 15, 13, 16, 9207, 3989,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetCellStyleBorderErrors(t *testing.T) { f, err := prepareTestBook1() if !assert.NoError(t, err) { t.FailNow() } // Set border with invalid style parameter. _, err = f.NewStyle("") if !assert.EqualError(t, err, "unexpected end of JSON input") { t.FailNow() } // Set border with invalid style index number. _, err = f.NewStyle(`{"border":[{"type":"left","color":"0000FF","style":-1},{"type":"top","color":"00FF00","style":14},{"type":"bottom","color":"FFFF00","style":5},{"type":"right","color":"FF0000","style":6},{"type":"diagonalDown","color":"A020F0","style":9},{"type":"diagonalUp","color":"A020F0","style":8}]}`) if !assert.NoError(t, err) { t.FailNow() } }
explode_data.jsonl/36970
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 251 }
[ 2830, 3393, 1649, 15171, 10691, 13877, 1155, 353, 8840, 836, 8, 341, 1166, 11, 1848, 1669, 10549, 2271, 7134, 16, 741, 743, 753, 2207, 35699, 1155, 11, 1848, 8, 341, 197, 3244, 57243, 7039, 741, 197, 630, 197, 322, 2573, 3886, 448, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGetUnstakeRequests(t *testing.T) { if !areTestAPIKeysSet() { t.Skip("skipping test, api keys not set") } _, err := f.GetUnstakeRequests(context.Background()) if err != nil { t.Error(err) } }
explode_data.jsonl/15252
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 86 }
[ 2830, 3393, 1949, 1806, 267, 726, 35295, 1155, 353, 8840, 836, 8, 341, 743, 753, 546, 2271, 7082, 8850, 1649, 368, 341, 197, 3244, 57776, 445, 4886, 5654, 1273, 11, 6330, 6894, 537, 738, 1138, 197, 532, 197, 6878, 1848, 1669, 282, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
3
func TestGetPodDNSCustom(t *testing.T) { customDNSEnabled := utilfeature.DefaultFeatureGate.Enabled("CustomPodDNS") defer func() { // Restoring the old value. if err := utilfeature.DefaultFeatureGate.Set(fmt.Sprintf("CustomPodDNS=%v", customDNSEnabled)); err != nil { t.Errorf("Failed to set CustomPodDNS feature gate: %v", err) } }() recorder := record.NewFakeRecorder(20) nodeRef := &v1.ObjectReference{ Kind: "Node", Name: string("testNode"), UID: types.UID("testNode"), Namespace: "", } testPodNamespace := "testNS" testClusterNameserver := "10.0.0.10" testClusterDNSDomain := "kubernetes.io" testSvcDomain := fmt.Sprintf("svc.%s", testClusterDNSDomain) testNsSvcDomain := fmt.Sprintf("%s.svc.%s", testPodNamespace, testClusterDNSDomain) testNdotsOptionValue := "3" testHostNameserver := "8.8.8.8" testHostDomain := "host.domain" testPod := &v1.Pod{ ObjectMeta: metav1.ObjectMeta{ Name: "test_pod", Namespace: testPodNamespace, }, } resolvConfContent := []byte(fmt.Sprintf("nameserver %s\nsearch %s\n", testHostNameserver, testHostDomain)) tmpfile, err := ioutil.TempFile("", "tmpResolvConf") if err != nil { t.Fatal(err) } defer os.Remove(tmpfile.Name()) if _, err := tmpfile.Write(resolvConfContent); err != nil { t.Fatal(err) } if err := tmpfile.Close(); err != nil { t.Fatal(err) } configurer := NewConfigurer(recorder, nodeRef, nil, []net.IP{net.ParseIP(testClusterNameserver)}, testClusterDNSDomain, tmpfile.Name()) testCases := []struct { desc string customPodDNSFeatureGate bool hostnetwork bool dnsPolicy v1.DNSPolicy dnsConfig *v1.PodDNSConfig expectedDNSConfig *runtimeapi.DNSConfig }{ { desc: "feature gate is disabled, DNSNone should fallback to DNSClusterFirst", dnsPolicy: v1.DNSNone, expectedDNSConfig: &runtimeapi.DNSConfig{ Servers: []string{testClusterNameserver}, Searches: []string{testNsSvcDomain, testSvcDomain, testClusterDNSDomain, testHostDomain}, Options: []string{"ndots:5"}, }, }, { desc: "feature gate is enabled, DNSNone without DNSConfig should have empty DNS settings", customPodDNSFeatureGate: true, dnsPolicy: v1.DNSNone, expectedDNSConfig: &runtimeapi.DNSConfig{}, }, { desc: "feature gate is enabled, DNSNone with DNSConfig should have a merged DNS settings", customPodDNSFeatureGate: true, dnsPolicy: v1.DNSNone, dnsConfig: &v1.PodDNSConfig{ Nameservers: []string{"203.0.113.1"}, Searches: []string{"my.domain", "second.domain"}, Options: []v1.PodDNSConfigOption{ {Name: "ndots", Value: &testNdotsOptionValue}, {Name: "debug"}, }, }, expectedDNSConfig: &runtimeapi.DNSConfig{ Servers: []string{"203.0.113.1"}, Searches: []string{"my.domain", "second.domain"}, Options: []string{"ndots:3", "debug"}, }, }, { desc: "feature gate is enabled, DNSClusterFirst with DNSConfig should have a merged DNS settings", customPodDNSFeatureGate: true, dnsPolicy: v1.DNSClusterFirst, dnsConfig: &v1.PodDNSConfig{ Nameservers: []string{"10.0.0.11"}, Searches: []string{"my.domain"}, Options: []v1.PodDNSConfigOption{ {Name: "ndots", Value: &testNdotsOptionValue}, {Name: "debug"}, }, }, expectedDNSConfig: &runtimeapi.DNSConfig{ Servers: []string{testClusterNameserver, "10.0.0.11"}, Searches: []string{testNsSvcDomain, testSvcDomain, testClusterDNSDomain, testHostDomain, "my.domain"}, Options: []string{"ndots:3", "debug"}, }, }, { desc: "feature gate is enabled, DNSClusterFirstWithHostNet with DNSConfig should have a merged DNS settings", customPodDNSFeatureGate: true, hostnetwork: true, dnsPolicy: v1.DNSClusterFirstWithHostNet, dnsConfig: &v1.PodDNSConfig{ Nameservers: []string{"10.0.0.11"}, Searches: []string{"my.domain"}, Options: []v1.PodDNSConfigOption{ {Name: "ndots", Value: &testNdotsOptionValue}, {Name: "debug"}, }, }, expectedDNSConfig: &runtimeapi.DNSConfig{ Servers: []string{testClusterNameserver, "10.0.0.11"}, Searches: []string{testNsSvcDomain, testSvcDomain, testClusterDNSDomain, testHostDomain, "my.domain"}, Options: []string{"ndots:3", "debug"}, }, }, { desc: "feature gate is enabled, DNSDefault with DNSConfig should have a merged DNS settings", customPodDNSFeatureGate: true, dnsPolicy: v1.DNSDefault, dnsConfig: &v1.PodDNSConfig{ Nameservers: []string{"10.0.0.11"}, Searches: []string{"my.domain"}, Options: []v1.PodDNSConfigOption{ {Name: "ndots", Value: &testNdotsOptionValue}, {Name: "debug"}, }, }, expectedDNSConfig: &runtimeapi.DNSConfig{ Servers: []string{testHostNameserver, "10.0.0.11"}, Searches: []string{testHostDomain, "my.domain"}, Options: []string{"ndots:3", "debug"}, }, }, } for _, tc := range testCases { if err := utilfeature.DefaultFeatureGate.Set(fmt.Sprintf("CustomPodDNS=%v", tc.customPodDNSFeatureGate)); err != nil { t.Errorf("Failed to set CustomPodDNS feature gate: %v", err) } testPod.Spec.HostNetwork = tc.hostnetwork testPod.Spec.DNSConfig = tc.dnsConfig testPod.Spec.DNSPolicy = tc.dnsPolicy resDNSConfig, err := configurer.GetPodDNS(testPod) if err != nil { t.Errorf("%s: GetPodDNS(%v), unexpected error: %v", tc.desc, testPod, err) } if !dnsConfigsAreEqual(resDNSConfig, tc.expectedDNSConfig) { t.Errorf("%s: GetPodDNS(%v)=%v, want %v", tc.desc, testPod, resDNSConfig, tc.expectedDNSConfig) } } }
explode_data.jsonl/59250
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2467 }
[ 2830, 3393, 1949, 23527, 61088, 10268, 1155, 353, 8840, 836, 8, 341, 1444, 1450, 31264, 925, 77, 2312, 1669, 4094, 12753, 13275, 13859, 42318, 13690, 445, 10268, 23527, 61088, 1138, 16867, 2915, 368, 341, 197, 197, 322, 9063, 5503, 279, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDeleteBackend(t *testing.T) { // Boilerplate mocking code mockCtrl := gomock.NewController(t) // Set fake values backendName := "foobar" backendUUID := "1234" // Create a mocked storage backend mockBackend := mockstorage.NewMockBackend(mockCtrl) // Set backend behavior we don't care about for this testcase mockBackend.EXPECT().Name().Return(backendName).AnyTimes() // Always return the fake name mockBackend.EXPECT().BackendUUID().Return(backendUUID).AnyTimes() // Always return the fake UUID mockBackend.EXPECT().ConfigRef().Return("").AnyTimes() // Always return an empty configRef mockBackend.EXPECT().GetDriverName().Return("baz").AnyTimes() // Always return a fake driver name mockBackend.EXPECT().Storage().Return(map[string]storage.Pool{}).AnyTimes() // Always return an empty storage list mockBackend.EXPECT().HasVolumes().Return(false).AnyTimes() // Always return no volumes // Set the backend behavior we do care about for this testcase mockBackend.EXPECT().SetState(storage.Deleting) // The backend should be set to deleting mockBackend.EXPECT().SetOnline(false) // The backend should be set offline mockBackend.EXPECT().Terminate(gomock.Any()) // The backend should be terminated // Create a mocked persistent store client mockStoreClient := mockpersistentstore.NewMockStoreClient(mockCtrl) // Set the store client behavior we don't care about for this testcase mockStoreClient.EXPECT().GetVolumeTransactions(gomock.Any()).Return([]*storage.VolumeTransaction{}, nil).AnyTimes() // Set the store client behavior we do care about for this testcase mockStoreClient.EXPECT().DeleteBackend(gomock.Any(), mockBackend).Return(nil) // Create an instance of the orchestrator for this test orchestrator := getOrchestrator(t) // Add the mocked objects to the orchestrator orchestrator.storeClient = mockStoreClient orchestrator.backends[backendUUID] = mockBackend // Perform the test err := orchestrator.DeleteBackend(ctx(), backendName) // Verify the results assert.Nilf(t, err, "Error getting backend; %v", err) _, ok := orchestrator.backends[backendUUID] assert.False(t, ok, "Backend was not properly deleted") }
explode_data.jsonl/62766
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 745 }
[ 2830, 3393, 6435, 29699, 1155, 353, 8840, 836, 8, 341, 197, 322, 45665, 1750, 66483, 2038, 198, 77333, 15001, 1669, 342, 316, 1176, 7121, 2051, 1155, 692, 197, 322, 2573, 12418, 2750, 198, 197, 20942, 675, 1669, 330, 50267, 698, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetDefaultProvider(t *testing.T) { if assert.NotNil(t, helper.GetDefaultProvider(), "should not be nil") { assert.Equal(t, helper.GetDefaultProvider().Name(), ATS, "should return ATS") } }
explode_data.jsonl/15639
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 69 }
[ 2830, 3393, 1949, 3675, 5179, 1155, 353, 8840, 836, 8, 341, 743, 2060, 93882, 1155, 11, 13137, 2234, 3675, 5179, 1507, 330, 5445, 537, 387, 2092, 899, 341, 197, 6948, 12808, 1155, 11, 13137, 2234, 3675, 5179, 1005, 675, 1507, 95375, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestSimpleRun(t *testing.T) { ctx, cancel := context.WithTimeout(context.Background(), defaultTestTimeout) defer cancel() ns, cleanup := getNamespace(t) defer cleanup() args := fmt.Sprintf("run --image-pull-policy IfNotPresent --wait -p testImage/yaml/job-junit-passing-singlefile.yaml -n %v", ns) mustRunSonobuoyCommandWithContext(ctx, t, args) }
explode_data.jsonl/54727
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 124 }
[ 2830, 3393, 16374, 6727, 1155, 353, 8840, 836, 8, 341, 20985, 11, 9121, 1669, 2266, 26124, 7636, 5378, 19047, 1507, 1638, 2271, 7636, 340, 16867, 9121, 2822, 84041, 11, 21290, 1669, 633, 22699, 1155, 340, 16867, 21290, 2822, 31215, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseQueryParamBool(t *testing.T) { req := httptest.NewRequest("GET", "/target?boolean=true", nil) require.True(t, rest.ParseQueryParamBool(req, "boolean")) require.False(t, rest.ParseQueryParamBool(req, "nokey")) req = httptest.NewRequest("GET", "/target?boolean=false", nil) require.False(t, rest.ParseQueryParamBool(req, "boolean")) require.False(t, rest.ParseQueryParamBool(req, "")) }
explode_data.jsonl/55934
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 155 }
[ 2830, 3393, 14463, 84085, 11233, 1155, 353, 8840, 836, 8, 341, 24395, 1669, 54320, 70334, 75274, 445, 3806, 497, 3521, 5657, 30, 6117, 11265, 497, 2092, 340, 17957, 32443, 1155, 11, 2732, 8937, 84085, 11233, 6881, 11, 330, 6117, 5455, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGenesisChainID(t *testing.T) { a := assert.New(t) g := GetDefaultGenesis() chainID, err := g.ChainID() a.Nil(err) a.True(g.ID.Equals(&defaultChainID)) fmt.Println("len:", len(chainID)) fmt.Println("chain_id: ", enc.ToString(chainID)) }
explode_data.jsonl/47845
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 110 }
[ 2830, 3393, 84652, 18837, 915, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 2060, 7121, 1155, 340, 3174, 1669, 2126, 3675, 84652, 741, 197, 8819, 915, 11, 1848, 1669, 342, 98269, 915, 741, 11323, 59678, 3964, 340, 11323, 32443, 3268, 9910...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestLookupHashNew(t *testing.T) { l := createLookup(t, "lookup_hash", false) if want, got := l.(*LookupHash).writeOnly, false; got != want { t.Errorf("Create(lookup, false): %v, want %v", got, want) } l = createLookup(t, "lookup_hash", true) if want, got := l.(*LookupHash).writeOnly, true; got != want { t.Errorf("Create(lookup, false): %v, want %v", got, want) } _, err := CreateVindex("lookup_hash", "lookup_hash", map[string]string{ "table": "t", "from": "fromc", "to": "toc", "write_only": "invalid", }) want := "write_only value must be 'true' or 'false': 'invalid'" if err == nil || err.Error() != want { t.Errorf("Create(bad_scatter): %v, want %s", err, want) } }
explode_data.jsonl/3413
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 310 }
[ 2830, 3393, 34247, 6370, 3564, 1155, 353, 8840, 836, 8, 341, 8810, 1669, 1855, 34247, 1155, 11, 330, 21020, 8950, 497, 895, 340, 743, 1366, 11, 2684, 1669, 326, 41399, 34247, 6370, 568, 4934, 7308, 11, 895, 26, 2684, 961, 1366, 341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestVectorsJWECorrupt(t *testing.T) { priv := &rsa.PrivateKey{ PublicKey: rsa.PublicKey{ N: fromHexInt(` a8b3b284af8eb50b387034a860f146c4919f318763cd6c5598c8 ae4811a1e0abc4c7e0b082d693a5e7fced675cf4668512772c0c bc64a742c6c630f533c8cc72f62ae833c40bf25842e984bb78bd bf97c0107d55bdb662f5c4e0fab9845cb5148ef7392dd3aaff93 ae1e6b667bb3d4247616d4f5ba10d4cfd226de88d39f16fb`), E: 65537, }, D: fromHexInt(` 53339cfdb79fc8466a655c7316aca85c55fd8f6dd898fdaf1195 17ef4f52e8fd8e258df93fee180fa0e4ab29693cd83b152a553d 4ac4d1812b8b9fa5af0e7f55fe7304df41570926f3311f15c4d6 5a732c483116ee3d3d2d0af3549ad9bf7cbfb78ad884f84d5beb 04724dc7369b31def37d0cf539e9cfcdd3de653729ead5d1`), Primes: []*big.Int{ fromHexInt(` d32737e7267ffe1341b2d5c0d150a81b586fb3132bed2f8d5262 864a9cb9f30af38be448598d413a172efb802c21acf1c11c520c 2f26a471dcad212eac7ca39d`), fromHexInt(` cc8853d1d54da630fac004f471f281c7b8982d8224a490edbeb3 3d3e3d5cc93c4765703d1dd791642f1f116a0dd852be2419b2af 72bfe9a030e860b0288b5d77`), }, } corruptCiphertext := stripWhitespace(` eyJhbGciOiJSU0EtT0FFUCIsImVuYyI6IkExMjhHQ00ifQ.NFl09dehy IR2Oh5iSsvEa82Ps7DLjRHeo0RnuTuSR45OsaIP6U8yu7vLlWaZKSZMy B2qRBSujf-5XIRoNhtyIyjk81eJRXGa_Bxaor1XBCMyyhGchW2H2P71f PhDO6ufSC7kV4bNqgHR-4ziS7KXwzN83_5kogXqxUpymUoJDNc.tk-GT W_VVhiTIKFF.D_BE6ImZUl9F.52a-zFnRb3YQwIC7UrhVyQ`) corruptAuthtag := stripWhitespace(` eyJhbGciOiJSU0EtT0FFUCIsImVuYyI6IkExMjhHQ00ifQ.NFl09dehy IR2Oh5iSsvEa82Ps7DLjRHeo0RnuTuSR45OsaIP6U8yu7vLlWaZKSZMy B2qRBSujf-5XIRoNhtyIyjk81eJRXGa_Bxaor1XBCMyyhGchW2H2P71f PhDO6ufSC7kV4bNqgHR-4ziS7KNwzN83_5kogXqxUpymUoJDNc.tk-GT W_VVhiTIKFF.D_BE6ImZUl9F.52a-zFnRb3YQwiC7UrhVyQ`) msg, _ := ParseEncrypted(corruptCiphertext) _, err := msg.Decrypt(priv) if err != ErrCryptoFailure { t.Error("should detect corrupt ciphertext") } msg, _ = ParseEncrypted(corruptAuthtag) _, err = msg.Decrypt(priv) if err != ErrCryptoFailure { t.Error("should detect corrupt auth tag") } }
explode_data.jsonl/14781
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1303 }
[ 2830, 3393, 84744, 41, 54, 7498, 269, 6585, 1155, 353, 8840, 836, 8, 972, 71170, 1669, 609, 60869, 87738, 1592, 1666, 197, 73146, 1592, 25, 68570, 49139, 1592, 1666, 298, 18317, 25, 504, 20335, 1072, 5809, 319, 571, 11323, 23, 65, 18,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestQueryOnLevelDB(t *testing.T) { env := NewTestVDBEnv(t) defer env.Cleanup() db, err := env.DBProvider.GetDBHandle("testquery") assert.NoError(t, err) db.Open() defer db.Close() batch := statedb.NewUpdateBatch() jsonValue1 := `{"asset_name": "marble1","color": "blue","size": 1,"owner": "tom"}` batch.Put("ns1", "key1", []byte(jsonValue1), version.NewHeight(1, 1)) savePoint := version.NewHeight(2, 22) db.ApplyUpdates(batch, savePoint) // query for owner=jerry, use namespace "ns1" // As queries are not supported in levelDB, call to ExecuteQuery() // should return a error message itr, err := db.ExecuteQuery("ns1", `{"selector":{"owner":"jerry"}}`) assert.Error(t, err, "ExecuteQuery not supported for leveldb") assert.Nil(t, itr) }
explode_data.jsonl/63387
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 286 }
[ 2830, 3393, 2859, 1925, 4449, 3506, 1155, 353, 8840, 836, 8, 341, 57538, 1669, 1532, 2271, 53, 3506, 14359, 1155, 340, 16867, 6105, 727, 60639, 741, 20939, 11, 1848, 1669, 6105, 22537, 5179, 2234, 3506, 6999, 445, 1944, 1631, 1138, 6948...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServiceInGroupParallel(t *testing.T) { registry := newMockRegistry() goNum := 5 wg := &sync.WaitGroup{} wg.Add(goNum) for i := 0; i < 5; i++ { go func() { defer wg.Done() for i := 0; i < 10000; i++ { assert.True(t, ServiceInGroup(registry, "testGroup", "testService")) assert.False(t, ServiceInGroup(registry, "testGroup", "testNoneExistService")) } }() } wg.Wait() }
explode_data.jsonl/67632
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 179 }
[ 2830, 3393, 1860, 641, 2808, 16547, 1155, 353, 8840, 836, 8, 341, 197, 29172, 1669, 501, 11571, 15603, 741, 30680, 4651, 1669, 220, 20, 198, 72079, 1669, 609, 12996, 28384, 2808, 16094, 72079, 1904, 47415, 4651, 340, 2023, 600, 1669, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPostgres(t *testing.T) { sqltrace.Register("postgres", &pq.Driver{}) db, err := Open("postgres", "postgres://postgres:postgres@127.0.0.1:5432/postgres?sslmode=disable") if err != nil { log.Fatal(err) } defer db.Close() testConfig := &sqltest.Config{ DB: db.DB(), DriverName: "postgres", TableName: tableName, ExpectName: "postgres.query", ExpectTags: map[string]interface{}{ ext.ServiceName: "postgres.db", ext.SpanType: ext.SpanTypeSQL, ext.TargetHost: "127.0.0.1", ext.TargetPort: "5432", "db.user": "postgres", "db.name": "postgres", }, } sqltest.RunAll(t, testConfig) }
explode_data.jsonl/31331
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 302 }
[ 2830, 3393, 4133, 17818, 1155, 353, 8840, 836, 8, 341, 30633, 15067, 19983, 445, 43070, 497, 609, 64016, 41768, 37790, 20939, 11, 1848, 1669, 5264, 445, 43070, 497, 330, 43070, 1110, 43070, 25, 43070, 31, 16, 17, 22, 13, 15, 13, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestName(t *testing.T) { testEnv := testEnvsMap[levelDBtestEnvName] testEnv.init(t, "testLedger", nil) defer testEnv.cleanup() txMgr := testEnv.getTxMgr() assert.Equal(t, "state", txMgr.Name()) }
explode_data.jsonl/63625
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 92 }
[ 2830, 3393, 675, 1155, 353, 8840, 836, 8, 341, 18185, 14359, 1669, 1273, 1702, 11562, 2227, 64586, 3506, 1944, 14359, 675, 921, 18185, 14359, 8271, 1155, 11, 330, 1944, 60850, 1389, 497, 2092, 340, 16867, 1273, 14359, 87689, 741, 46237, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestGetUserNameEndWith(t *testing.T) { var users []User filter.EndWith("Farcha", "name", "AND"). ToSql(). Find(&users) assert.Equal(t, 2, len(users)) filter.Clear() }
explode_data.jsonl/80107
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 75 }
[ 2830, 3393, 1949, 18856, 3727, 2354, 1155, 353, 8840, 836, 8, 341, 2405, 3847, 3056, 1474, 198, 50108, 18569, 2354, 445, 37, 1113, 64, 497, 330, 606, 497, 330, 3976, 38609, 197, 197, 1249, 8269, 25829, 197, 197, 9885, 2099, 4218, 692,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSeekDataUnexpectedSize(t *testing.T) { dir, err := ioutil.TempDir("", "testdb") if err != nil { t.Fatal(err) } filePathPrefix := filepath.Join(dir, "") defer os.RemoveAll(dir) w := newTestWriter(t, filePathPrefix) writerOpts := DataWriterOpenOptions{ BlockSize: testBlockSize, Identifier: FileSetFileIdentifier{ Namespace: testNs1ID, Shard: 0, BlockStart: testWriterStart, }, } metadata := persist.NewMetadataFromIDAndTags( ident.StringID("foo"), ident.Tags{}, persist.MetadataOptions{}) err = w.Open(writerOpts) assert.NoError(t, err) dataFile := w.(*writer).dataFdWithDigest.Fd().Name() assert.NoError(t, w.Write(metadata, bytesRefd([]byte{1, 2, 3}), digest.Checksum([]byte{1, 2, 3}))) assert.NoError(t, w.Close()) // Truncate one byte assert.NoError(t, os.Truncate(dataFile, 1)) resources := newTestReusableSeekerResources() s := newTestSeeker(filePathPrefix) err = s.Open(testNs1ID, 0, testWriterStart, 0, resources) assert.NoError(t, err) _, err = s.SeekByID(ident.StringID("foo"), resources) assert.Error(t, err) assert.Equal(t, errors.New("unexpected EOF"), err) assert.NoError(t, s.Close()) }
explode_data.jsonl/10714
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 482 }
[ 2830, 3393, 39350, 1043, 29430, 1695, 1155, 353, 8840, 836, 8, 341, 48532, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 1944, 1999, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 17661, 1820, 14335, 1669, 260...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAdvisoryLockWithStdlib(t *testing.T) { db, cleanup := testCreateDB(t, 3) defer cleanup() var wg sync.WaitGroup numExec := 10 wg.Add(numExec) for i := 0; i <= numExec-1; i++ { go func() { var ( locked bool procID int ) err := db.QueryRow(lockSQL, 1).Scan(&locked, &procID) if err != nil { t.Fatal(err) } t.Logf("proc: %d -> lock: %t", procID, locked) wg.Done() }() } wg.Wait() }
explode_data.jsonl/74286
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 212 }
[ 2830, 3393, 2589, 2682, 679, 11989, 2354, 22748, 2740, 1155, 353, 8840, 836, 8, 341, 20939, 11, 21290, 1669, 1273, 4021, 3506, 1155, 11, 220, 18, 340, 16867, 21290, 2822, 2405, 63581, 12811, 28384, 2808, 198, 22431, 10216, 1669, 220, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAutoProperty(t *testing.T) { gopClTest(t, `import "github.com/goplus/gop/ast/goptest" func foo(script string) { doc := goptest.New(script)! println(doc.any.funcDecl.name) println(doc.any.importSpec.name) } `, `package main import ( fmt "fmt" goptest "github.com/goplus/gop/ast/goptest" gopq "github.com/goplus/gop/ast/gopq" ) func foo(script string) { doc := func() (_gop_ret gopq.NodeSet) { var _gop_err error _gop_ret, _gop_err = goptest.New(script) if _gop_err != nil { panic(_gop_err) } return }() fmt.Println(doc.Any().FuncDecl().Name()) fmt.Println(doc.Any().ImportSpec().Name()) } `) }
explode_data.jsonl/73608
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 293 }
[ 2830, 3393, 13253, 3052, 1155, 353, 8840, 836, 8, 341, 3174, 453, 5066, 2271, 1155, 11, 1565, 474, 330, 5204, 905, 4846, 55078, 355, 4846, 453, 14, 559, 4846, 2912, 477, 1837, 2830, 15229, 42795, 914, 8, 341, 59536, 1669, 728, 70334, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestChatSrvGap(t *testing.T) { runWithMemberTypes(t, func(mt chat1.ConversationMembersType) { ctc := makeChatTestContext(t, "GetOutbox", 2) defer ctc.cleanup() users := ctc.users() var err error created := mustCreateConversationForTest(t, ctc, users[0], chat1.TopicType_CHAT, mt, ctc.as(t, users[1]).user()) res, err := postLocalForTest(t, ctc, users[0], created, chat1.NewMessageBodyWithText(chat1.MessageText{Body: "Sometimes you eat the bar"})) require.NoError(t, err) u := users[0] h := ctc.as(t, users[0]).h ctx := ctc.as(t, users[0]).startCtx tc := ctc.world.Tcs[ctc.as(t, users[0]).user().Username] msgID := res.MessageID mres, err := h.remoteClient().GetMessagesRemote(ctx, chat1.GetMessagesRemoteArg{ ConversationID: created.Id, MessageIDs: []chat1.MessageID{msgID}, }) require.NoError(t, err) require.Len(t, mres.Msgs, 1, "number of messages") ooMsg := mres.Msgs[0] ooMsg.ServerHeader.MessageID = 5 payload := chat1.NewMessagePayload{ Action: types.ActionNewMessage, ConvID: created.Id, Message: ooMsg, } listener := newServerChatListener() tc.G.NotifyRouter.AddListener(listener) mh := codec.MsgpackHandle{WriteExt: true} var data []byte enc := codec.NewEncoderBytes(&data, &mh) require.NoError(t, enc.Encode(payload)) ph := NewPushHandler(tc.Context()) require.NoError(t, ph.Activity(ctx, &gregor1.OutOfBandMessage{ Uid_: u.User.GetUID().ToBytes(), System_: gregor1.System(types.PushActivity), Body_: data, })) updates := consumeNewThreadsStale(t, listener) require.Equal(t, 1, len(updates)) require.Equal(t, created.Id, updates[0].ConvID, "wrong cid") require.Equal(t, chat1.StaleUpdateType_CLEAR, updates[0].UpdateType) ooMsg.ServerHeader.MessageID = 6 payload = chat1.NewMessagePayload{ Action: types.ActionNewMessage, ConvID: created.Id, Message: ooMsg, } enc = codec.NewEncoderBytes(&data, &mh) require.NoError(t, enc.Encode(payload)) require.NoError(t, ph.Activity(ctx, &gregor1.OutOfBandMessage{ Uid_: u.User.GetUID().ToBytes(), System_: gregor1.System(types.PushActivity), Body_: data, })) select { case <-listener.threadsStale: require.Fail(t, "should not get stale event here") default: } }) }
explode_data.jsonl/63686
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 966 }
[ 2830, 3393, 15672, 50, 10553, 12868, 1155, 353, 8840, 836, 8, 341, 56742, 2354, 9366, 4173, 1155, 11, 2915, 81618, 6236, 16, 4801, 22323, 24371, 929, 8, 341, 197, 89216, 66, 1669, 1281, 15672, 2271, 1972, 1155, 11, 330, 1949, 2662, 20...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIssue149(t *testing.T) { ver, _ := goversion.Parse(runtime.Version()) if ver.Major > 0 && !ver.AfterOrEqual(goversion.GoVersion{Major: 1, Minor: 7, Rev: -1}) { return } // setting breakpoint on break statement withTestProcess("break", t, func(p *proc.Target, fixture protest.Fixture) { findFileLocation(p, t, fixture.Source, 8) }) }
explode_data.jsonl/56249
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 128 }
[ 2830, 3393, 42006, 16, 19, 24, 1155, 353, 8840, 836, 8, 341, 197, 423, 11, 716, 1669, 728, 4366, 8937, 89467, 35842, 2398, 743, 2739, 1321, 3035, 861, 220, 15, 1009, 753, 423, 36892, 2195, 2993, 3268, 859, 1325, 67131, 5637, 90, 344...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseApiMarkdownDescription(t *testing.T) { searchDir := "testdata/tags" p := New(SetMarkdownFileDirectory(searchDir)) p.PropNamingStrategy = PascalCase err := p.ParseAPI(searchDir, mainAPIFile, defaultParseDepth) if err != nil { t.Error("Failed to parse api description: " + err.Error()) } if p.swagger.Info.Description == "" { t.Error("Failed to parse api description: " + err.Error()) } }
explode_data.jsonl/63583
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 147 }
[ 2830, 3393, 14463, 6563, 68005, 5009, 1155, 353, 8840, 836, 8, 341, 45573, 6184, 1669, 330, 92425, 84460, 698, 3223, 1669, 1532, 52474, 68005, 1703, 9310, 20447, 6184, 1171, 3223, 42483, 85410, 19816, 284, 57359, 4207, 198, 9859, 1669, 28...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestClientStreamingRpc(t *testing.T) { cs, err := stub.InvokeRpcClientStream(context.Background(), "", clientStreamingMd) testutil.Ok(t, err, "Failed to invoke client-streaming RPC") req := &grpc_testing.StreamingInputCallRequest{Payload: payload} for i := 0; i < 3; i++ { err = cs.SendMsg(req) testutil.Ok(t, err, "Failed to send request message") } resp, err := cs.CloseAndReceive() testutil.Ok(t, err, "Failed to receive response") dm := resp.(*dynamic.Message) fd := dm.GetMessageDescriptor().FindFieldByName("aggregated_payload_size") sz := dm.GetField(fd) expectedSz := 3 * len(payload.Body) testutil.Eq(t, expectedSz, int(sz.(int32)), "Incorrect response returned from RPC") }
explode_data.jsonl/51787
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 264 }
[ 2830, 3393, 2959, 76509, 60248, 1155, 353, 8840, 836, 8, 341, 71899, 11, 1848, 1669, 13633, 32784, 60248, 2959, 3027, 5378, 19047, 1507, 7342, 2943, 76509, 72529, 340, 18185, 1314, 54282, 1155, 11, 1848, 11, 330, 9408, 311, 19873, 2943, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGenerateExplainStmt(t *testing.T) { if test.GetEnv("SQLFLOW_TEST_DB", "mysql") != "mysql" { t.Skip(fmt.Sprintf("%s: skip test", test.GetEnv("SQLFLOW_TEST_DB", "mysql"))) } a := assert.New(t) connStr := database.GetTestingMySQLURL() cwd, e := ioutil.TempDir("/tmp", "sqlflow_models") a.Nil(e) defer os.RemoveAll(cwd) modelDir := "" a.NoError(model.MockInDB(cwd, `SELECT * FROM iris.train TO TRAIN xgboost.gbtree WITH objective="multi:softprob", train.num_boost_round = 30, eta = 0.4, num_class = 3 LABEL class INTO sqlflow_models.my_xgboost_model;`, "sqlflow_models.my_xgboost_model")) pr, e := parser.ParseStatement("mysql", ` SELECT * FROM iris.train TO EXPLAIN sqlflow_models.my_xgboost_model WITH summary.plot_type="bar", summary.alpha=1, summary.sort=True USING TreeExplainer; `) a.NoError(e) ExplainStmt, e := GenerateExplainStmt(pr.SQLFlowSelectStmt, connStr, modelDir, cwd, true) a.NoError(e) a.Equal(ExplainStmt.Explainer, "TreeExplainer") a.Equal(len(ExplainStmt.Attributes), 3) a.Equal(ExplainStmt.Attributes["summary.sort"], true) a.Equal(ExplainStmt.Attributes["summary.plot_type"], "bar") a.Equal(ExplainStmt.Attributes["summary.alpha"], 1) nc, ok := ExplainStmt.TrainStmt.Features["feature_columns"][0].(*NumericColumn) a.True(ok) a.Equal("sepal_length", nc.FieldDesc.Name) pr, e = parser.ParseStatement("mysql", ` SELECT * FROM iris.train TO EXPLAIN sqlflow_models.my_xgboost_model WITH summary.plot_type="bar", summary.alpha=1, summary.sort=True USING TreeExplainer INTO db.explain_result; `) a.NoError(e) ExplainIntoStmt, e := GenerateExplainStmt(pr.SQLFlowSelectStmt, connStr, modelDir, cwd, true) a.NoError(e) a.Equal(ExplainIntoStmt.Explainer, "TreeExplainer") a.Equal(len(ExplainIntoStmt.Attributes), 3) a.Equal("db.explain_result", ExplainIntoStmt.Into) pr, e = parser.ParseStatement("mysql", `SELECT * FROM iris.train TO EXPLAIN sqlflow_models.my_xgboost_model;`) a.NoError(e) shortExplainStmt, e := GenerateExplainStmt(pr.SQLFlowSelectStmt, connStr, modelDir, cwd, true) a.NoError(e) a.Equal(shortExplainStmt.Explainer, "") a.Equal(len(shortExplainStmt.Attributes), 0) }
explode_data.jsonl/45950
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 945 }
[ 2830, 3393, 31115, 840, 20772, 31063, 1155, 353, 8840, 836, 8, 341, 743, 1273, 2234, 14359, 445, 6688, 46060, 11641, 16310, 497, 330, 12272, 899, 961, 330, 12272, 1, 341, 197, 3244, 57776, 28197, 17305, 4430, 82, 25, 10706, 1273, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestClient_SetDefaultGasPrice(t *testing.T) { t.Parallel() ethMock, assertMocksCalled := newEthMock(t) defer assertMocksCalled() app := startNewApplication(t, withKey(), withMocks(ethMock), withConfigSet(func(c *configtest.TestGeneralConfig) { c.Overrides.EVMEnabled = null.BoolFrom(true) c.Overrides.GlobalEvmNonceAutoSync = null.BoolFrom(false) c.Overrides.GlobalBalanceMonitorEnabled = null.BoolFrom(false) }), ) client, _ := app.NewClientAndRenderer() t.Run("without specifying chain id setting value", func(t *testing.T) { set := flag.NewFlagSet("setgasprice", 0) set.Parse([]string{"8616460799"}) c := cli.NewContext(nil, set, nil) assert.NoError(t, client.SetEvmGasPriceDefault(c)) ch, err := app.GetChains().EVM.Default() require.NoError(t, err) cfg := ch.Config() assert.Equal(t, big.NewInt(8616460799), cfg.EvmGasPriceDefault()) client, _ = app.NewClientAndRenderer() set = flag.NewFlagSet("setgasprice", 0) set.String("amount", "", "") set.Bool("gwei", true, "") set.Parse([]string{"-gwei", "861.6460799"}) c = cli.NewContext(nil, set, nil) assert.NoError(t, client.SetEvmGasPriceDefault(c)) assert.Equal(t, big.NewInt(861646079900), cfg.EvmGasPriceDefault()) }) t.Run("specifying wrong chain id", func(t *testing.T) { set := flag.NewFlagSet("setgasprice", 0) set.String("evmChainID", "", "") set.Parse([]string{"-evmChainID", "985435435435", "8616460799"}) c := cli.NewContext(nil, set, nil) err := client.SetEvmGasPriceDefault(c) assert.Error(t, err) assert.Contains(t, err.Error(), "evmChainID does not match any local chains") ch, err := app.GetChains().EVM.Default() require.NoError(t, err) cfg := ch.Config() assert.Equal(t, big.NewInt(861646079900), cfg.EvmGasPriceDefault()) }) t.Run("specifying correct chain id", func(t *testing.T) { set := flag.NewFlagSet("setgasprice", 0) set.String("evmChainID", "", "") set.Parse([]string{"-evmChainID", "0", "12345678900"}) c := cli.NewContext(nil, set, nil) assert.NoError(t, client.SetEvmGasPriceDefault(c)) ch, err := app.GetChains().EVM.Default() require.NoError(t, err) cfg := ch.Config() assert.Equal(t, big.NewInt(12345678900), cfg.EvmGasPriceDefault()) }) }
explode_data.jsonl/5271
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 931 }
[ 2830, 3393, 2959, 14812, 3675, 58728, 6972, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 769, 11571, 11, 2060, 72577, 20960, 1669, 501, 65390, 11571, 1155, 340, 16867, 2060, 72577, 20960, 741, 28236, 1669, 1191, 3564, 4988,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBusUpDown(t *testing.T) { bsToken := getBusToken() mSm := new(logics.SmLogic) args := &cards.ArgsDownUpSm{ BsToken: bsToken, SmIds: []int{1, 2, 5}, OptType: cards.OPT_DOWN, } logs.Info(mSm.DownUpSm(context.Background(), args)) }
explode_data.jsonl/35925
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 15073, 33801, 1155, 353, 8840, 836, 8, 341, 93801, 3323, 1669, 633, 15073, 3323, 741, 2109, 10673, 1669, 501, 12531, 1211, 92445, 26751, 340, 31215, 1669, 609, 25024, 51015, 4454, 2324, 10673, 515, 197, 12791, 82, 3323, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIncorrectString(t *testing.T) { lines := []string{ "assafasfasfasfafa", } currentTime := time.Now() _, err := Parse(strings.Join(lines, "\n"), currentTime) // error should not be nil here if err == nil { t.Log("Error should not be nil") t.Fail() } }
explode_data.jsonl/15583
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 108 }
[ 2830, 3393, 40468, 703, 1155, 353, 8840, 836, 8, 341, 78390, 1669, 3056, 917, 515, 197, 197, 1, 395, 2577, 300, 14847, 14847, 69, 35834, 756, 197, 630, 20121, 1462, 1669, 882, 13244, 741, 197, 6878, 1848, 1669, 14775, 51442, 22363, 33...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGenericArrayScanScannerSliceBytes(t *testing.T) { src, expected, nss := []byte(`{NULL,abc,"\""}`), []sql.NullString{{}, {String: `abc`, Valid: true}, {String: `"`, Valid: true}}, []sql.NullString{{String: ``, Valid: true}, {}, {}, {}, {}} if err := (GenericArray{&nss}).Scan(src); err != nil { t.Fatalf("Expected no error, got %v", err) } if !reflect.DeepEqual(nss, expected) { t.Errorf("Expected %v, got %v", expected, nss) } }
explode_data.jsonl/5344
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 183 }
[ 2830, 3393, 19964, 1857, 26570, 31002, 33236, 7078, 1155, 353, 8840, 836, 8, 341, 41144, 11, 3601, 11, 308, 778, 1669, 3056, 3782, 5809, 90, 4576, 11, 13683, 1335, 57139, 5541, 1326, 197, 197, 1294, 3544, 23979, 703, 2979, 2137, 314, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetServerQueryGood(t *testing.T) { _, err := GetServerQuery("mc.syfaro.net", 25565) if err != nil { t.Log(err) t.Fail() } }
explode_data.jsonl/24171
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 63 }
[ 2830, 3393, 1949, 5475, 2859, 15216, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1848, 1669, 2126, 5475, 2859, 445, 12887, 77577, 69, 17165, 5071, 497, 220, 17, 20, 20, 21, 20, 340, 743, 1848, 961, 2092, 341, 197, 3244, 5247, 3964, 340...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestDBDelete(t *testing.T) { for _, item := range updated { t.Run(item, func(*testing.T) { request, _ := http.NewRequest("DELETE", endpointPrefix+"/name/delete/"+item, nil) client := &http.Client{} resp, err := client.Do(request) if err != nil { t.Error(err.Error()) } if resp.StatusCode != 200 { t.Errorf("TestDBDelete: response code is not 200, error: %d", resp.StatusCode) } data, err := ioutil.ReadAll(resp.Body) if err != nil { t.Error(err.Error()) } var result Delete json.Unmarshal(data, &result) if result.Status != true || result.Id != item { t.Errorf("TestDBDelete: status is %t and id is %s not %s", result.Status, result.Id, item) } }) } }
explode_data.jsonl/64864
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 307 }
[ 2830, 3393, 3506, 6435, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1509, 1669, 2088, 6049, 341, 197, 3244, 16708, 5393, 11, 2915, 4071, 8840, 836, 8, 341, 298, 23555, 11, 716, 1669, 1758, 75274, 445, 14424, 497, 14887, 14335, 27569, 60...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestDeleteBatchChangesCredential(t *testing.T) { if testing.Short() { t.Skip() } ctx := context.Background() db := dbtesting.GetDB(t) pruneUserCredentials(t, db) userID := ct.CreateTestUser(t, db, true).ID cstore := store.New(db) cred, err := cstore.UserCredentials().Create(ctx, database.UserCredentialScope{ Domain: database.UserCredentialDomainBatches, ExternalServiceType: extsvc.TypeGitHub, ExternalServiceID: "https://github.com/", UserID: userID, }, &auth.OAuthBearerToken{Token: "SOSECRET"}) if err != nil { t.Fatal(err) } r := &Resolver{store: cstore} s, err := graphqlbackend.NewSchema(db, r, nil, nil, nil, nil, nil) if err != nil { t.Fatal(err) } input := map[string]interface{}{ "batchChangesCredential": marshalBatchChangesCredentialID(cred.ID), } var response struct{ DeleteBatchChangesCredential apitest.EmptyResponse } actorCtx := actor.WithActor(ctx, actor.FromUser(userID)) // First time it should work, because a credential exists apitest.MustExec(actorCtx, t, s, input, &response, mutationDeleteCredential) // Second time it should fail errors := apitest.Exec(actorCtx, t, s, input, &response, mutationDeleteCredential) if len(errors) != 1 { t.Fatalf("expected single errors, but got none") } if have, want := errors[0].Message, fmt.Sprintf("user credential not found: [%d]", cred.ID); have != want { t.Fatalf("wrong error code. want=%q, have=%q", want, have) } }
explode_data.jsonl/13593
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 565 }
[ 2830, 3393, 6435, 21074, 11317, 48265, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 741, 197, 630, 20985, 1669, 2266, 19047, 741, 20939, 1669, 2927, 8840, 2234, 3506, 1155, 692, 25653, 2886, 1474, 27025, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestClone(t *testing.T) { a := ca(header(terminal("ws:", "", ""))) b := a.Clone() if a == b { t.Fatalf("Address of cloned terminal didn't change") } if &a.Subprotocols == &b.Subprotocols { t.Fatalf("Address of cloned subprotocols didn't change") } if &a.Header == &b.Header { t.Fatalf("Address of cloned header didn't change") } }
explode_data.jsonl/26629
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 143 }
[ 2830, 3393, 37677, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 2162, 25534, 7, 36327, 445, 8915, 12147, 7342, 1591, 5929, 2233, 1669, 264, 64463, 2822, 743, 264, 621, 293, 341, 197, 3244, 30762, 445, 4286, 315, 54119, 15022, 3207, 944, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestUnmarshal_WithUnmarshaler(t *testing.T) { type testStruct struct { Title testUnmarshalerString MaxConn testUnmarshalerString Ports testUnmarshalerString Servers testUnmarshalerString Table testUnmarshalerString Arraytable testUnmarshalerString ArrayOfStruct []testUnmarshalerStruct } data := loadTestData("unmarshal-unmarshaler.toml") var v testStruct if err := Unmarshal(data, &v); err != nil { t.Fatal(err) } actual := v expect := testStruct{ Title: `Unmarshaled: "testtitle"`, MaxConn: `Unmarshaled: 777`, Ports: `Unmarshaled: [8080, 8081, 8082]`, Servers: `Unmarshaled: [1, 2, 3]`, Table: "Unmarshaled: [table]\nname = \"alice\"", Arraytable: "Unmarshaled: [[arraytable]]\nname = \"alice\"\n[[arraytable]]\nname = \"bob\"", ArrayOfStruct: []testUnmarshalerStruct{ { Title: "Unmarshaled: [[array_of_struct]]\ntitle = \"Alice's Adventures in Wonderland\"\nauthor = \"Lewis Carroll\"", Author: "", }, }, } if !reflect.DeepEqual(actual, expect) { t.Errorf(`toml.Unmarshal(data, &v); v => %#v; want %#v`, actual, expect) } }
explode_data.jsonl/52961
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 513 }
[ 2830, 3393, 1806, 27121, 62, 2354, 1806, 27121, 261, 1155, 353, 8840, 836, 8, 341, 13158, 1273, 9422, 2036, 341, 197, 92233, 260, 1273, 1806, 27121, 261, 703, 198, 197, 197, 5974, 9701, 981, 1273, 1806, 27121, 261, 703, 198, 197, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSchemaInfo_Views(t *testing.T) { si := New(DBOptions{Driver: "postgres", Queryer: squirrel.NewStmtCacheProxy(db)}) views, err := si.Views("", "") if err != nil { t.Fatal(err) } if n := len(views); n != len(createViews()) { t.Errorf("Unexpected number of tables: %d", n) } // TODO: Should probably create a view in the schema creation and test it // here. }
explode_data.jsonl/41766
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 145 }
[ 2830, 3393, 8632, 1731, 62, 23217, 1155, 353, 8840, 836, 8, 341, 1903, 72, 1669, 1532, 32184, 3798, 90, 11349, 25, 330, 43070, 497, 11361, 261, 25, 90524, 7121, 31063, 8233, 16219, 9791, 59209, 197, 5072, 11, 1848, 1669, 4403, 37965, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRemoteServing(t *testing.T) { tests := []testRun{ // Test serving files from the test remote { Name: "index", URL: remoteURL + "", Status: http.StatusOK, Expected: `<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <title>Directory listing of /</title> </head> <body> <h1>Directory listing of /</h1> <a href="dir/">dir/</a><br /> <a href="file.txt">file.txt</a><br /> </body> </html> `, }, { Name: "notfound-index", URL: "[notfound]/", Status: http.StatusNotFound, Expected: `{ "error": "failed to list directory: directory not found", "input": null, "path": "", "status": 404 } `, }, { Name: "notfound", URL: remoteURL + "notfound", Status: http.StatusNotFound, Expected: `{ "error": "failed to find object: object not found", "input": null, "path": "notfound", "status": 404 } `, }, { Name: "dirnotfound", URL: remoteURL + "dirnotfound/", Status: http.StatusNotFound, Expected: `{ "error": "failed to list directory: directory not found", "input": null, "path": "dirnotfound", "status": 404 } `, }, { Name: "dir", URL: remoteURL + "dir/", Status: http.StatusOK, Expected: `<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <title>Directory listing of /dir</title> </head> <body> <h1>Directory listing of /dir</h1> <a href="file2.txt">file2.txt</a><br /> </body> </html> `, }, { Name: "file", URL: remoteURL + "file.txt", Status: http.StatusOK, Expected: "this is file1.txt\n", Headers: map[string]string{ "Content-Length": "18", }, }, { Name: "file with no slash after ]", URL: strings.TrimRight(remoteURL, "/") + "file.txt", Status: http.StatusOK, Expected: "this is file1.txt\n", Headers: map[string]string{ "Content-Length": "18", }, }, { Name: "file2", URL: remoteURL + "dir/file2.txt", Status: http.StatusOK, Expected: "this is dir/file2.txt\n", }, { Name: "file-head", URL: remoteURL + "file.txt", Method: "HEAD", Status: http.StatusOK, Expected: ``, Headers: map[string]string{ "Content-Length": "18", }, }, { Name: "file-range", URL: remoteURL + "file.txt", Status: http.StatusPartialContent, Range: "bytes=8-12", Expected: `file1`, }, { Name: "bad-remote", URL: "[notfoundremote:]/", Status: http.StatusInternalServerError, Expected: `{ "error": "failed to make Fs: didn't find section in config file", "input": null, "path": "/", "status": 500 } `, }} opt := newTestOpt() opt.Serve = true opt.Files = testFs testServer(t, tests, &opt) }
explode_data.jsonl/12960
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1223 }
[ 2830, 3393, 24703, 50, 19505, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1944, 6727, 515, 197, 197, 322, 3393, 13480, 3542, 504, 279, 1273, 8699, 198, 197, 197, 515, 298, 21297, 25, 256, 330, 1252, 756, 298, 79055, 25, 262, 86...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetMarginBorrowHistory(t *testing.T) { t.Parallel() tmNow := time.Now() _, err := f.GetMarginBorrowHistory(context.Background(), tmNow.AddDate(0, 0, 1), tmNow) if !errors.Is(err, errStartTimeCannotBeAfterEndTime) { t.Errorf("expected %s, got %s", errStartTimeCannotBeAfterEndTime, err) } if !areTestAPIKeysSet() { t.Skip() } _, err = f.GetMarginBorrowHistory(context.Background(), tmNow.AddDate(0, 0, -1), tmNow) if err != nil { t.Error(err) } }
explode_data.jsonl/15174
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 207 }
[ 2830, 3393, 1949, 21681, 33, 7768, 13424, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 3244, 76, 7039, 1669, 882, 13244, 741, 197, 6878, 1848, 1669, 282, 2234, 21681, 33, 7768, 13424, 5378, 19047, 3148, 197, 3244, 76, 7039, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestNewCmdList(t *testing.T) { io, _, _, _ := iostreams.Test() io.IsaTTY = true io.IsInTTY = true io.IsErrTTY = true fakeHTTP := httpmock.New() defer fakeHTTP.Verify(t) factory := &cmdutils.Factory{ IO: io, HttpClient: func() (*gitlab.Client, error) { a, err := api.TestClient(&http.Client{Transport: fakeHTTP}, "", "", false) if err != nil { return nil, err } return a.Lab(), err }, Config: func() (config.Config, error) { return config.NewBlankConfig(), nil }, BaseRepo: func() (glrepo.Interface, error) { return glrepo.New("OWNER", "REPO"), nil }, } t.Run("MergeRequest_NewCmdList", func(t *testing.T) { gotOpts := &ListOptions{} err := NewCmdList(factory, func(opts *ListOptions) error { gotOpts = opts return nil }).Execute() assert.Nil(t, err) assert.Equal(t, factory.IO, gotOpts.IO) gotBaseRepo, _ := gotOpts.BaseRepo() expectedBaseRepo, _ := factory.BaseRepo() assert.Equal(t, gotBaseRepo, expectedBaseRepo) }) }
explode_data.jsonl/48218
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 433 }
[ 2830, 3393, 3564, 15613, 852, 1155, 353, 8840, 836, 8, 341, 53112, 11, 8358, 8358, 716, 1669, 600, 7617, 82, 8787, 741, 53112, 4506, 64, 55544, 284, 830, 198, 53112, 4506, 641, 55544, 284, 830, 198, 53112, 4506, 7747, 55544, 284, 830,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestValidateRole(t *testing.T) { auth := NewAuthenticator(gTestBucket, nil) role, err := auth.NewRole("invalid:name", nil) assert.Equals(t, role, (User)(nil)) assert.True(t, err != nil) role, err = auth.NewRole("ValidName", nil) assert.True(t, role != nil) assert.Equals(t, err, nil) role, err = auth.NewRole("ValidName", nil) assert.True(t, role != nil) assert.Equals(t, err, nil) }
explode_data.jsonl/31549
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 158 }
[ 2830, 3393, 17926, 9030, 1155, 353, 8840, 836, 8, 341, 78011, 1669, 1532, 5087, 61393, 3268, 2271, 36018, 11, 2092, 340, 197, 5778, 11, 1848, 1669, 4166, 7121, 9030, 445, 11808, 57520, 497, 2092, 340, 6948, 16207, 1155, 11, 3476, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNormRand(t *testing.T) { for _, test := range []struct { mean []float64 cov []float64 }{ { mean: []float64{0, 0}, cov: []float64{ 1, 0, 0, 1, }, }, { mean: []float64{0, 0}, cov: []float64{ 1, 0.9, 0.9, 1, }, }, { mean: []float64{6, 7}, cov: []float64{ 5, 0.9, 0.9, 2, }, }, } { dim := len(test.mean) cov := mat.NewSymDense(dim, test.cov) n, ok := NewNormal(test.mean, cov, nil) if !ok { t.Errorf("bad covariance matrix") } nSamples := 1000000 samps := mat.NewDense(nSamples, dim, nil) for i := 0; i < nSamples; i++ { n.Rand(samps.RawRowView(i)) } estMean := make([]float64, dim) for i := range estMean { estMean[i] = stat.Mean(mat.Col(nil, i, samps), nil) } if !floats.EqualApprox(estMean, test.mean, 1e-2) { t.Errorf("Mean mismatch: want: %v, got %v", test.mean, estMean) } var estCov mat.SymDense stat.CovarianceMatrix(&estCov, samps, nil) if !mat.EqualApprox(&estCov, cov, 1e-2) { t.Errorf("Cov mismatch: want: %v, got %v", cov, &estCov) } } }
explode_data.jsonl/12021
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 577 }
[ 2830, 3393, 24993, 56124, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 1273, 1669, 2088, 3056, 1235, 341, 197, 2109, 5307, 3056, 3649, 21, 19, 198, 197, 1444, 859, 220, 3056, 3649, 21, 19, 198, 197, 59403, 197, 197, 515, 298, 2109, 530...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestCollection(t *testing.T) { tsEngine, _ := xorm.NewEngine("mysql", "root:root@tcp(127.0.0.1:3306)/radius?charset=utf8") tsEngine.ShowSQL(true) var managers []Manager tsEngine.Table("sys_user").Alias("sm"). Join("INNER", []string{"sys_user_role_rel", "smr"}, "sm.id = smr.sys_user_id"). Join("INNER", []string{"sys_role", "sr"}, "smr.role_id = sr.id"). Find(&managers) fmt.Printf("%#v", managers) }
explode_data.jsonl/28481
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 180 }
[ 2830, 3393, 6482, 1155, 353, 8840, 836, 8, 341, 57441, 4571, 11, 716, 1669, 856, 493, 7121, 4571, 445, 12272, 756, 197, 197, 1, 2888, 25, 2888, 31, 27161, 7, 16, 17, 22, 13, 15, 13, 15, 13, 16, 25, 18, 18, 15, 21, 5620, 26715,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetWithdraw(t *testing.T) { app := simapp.Setup(false) ctx := app.BaseApp.NewContext(false, tmproto.Header{Time: time.Now().UTC()}) addrs := simapp.AddTestAddrs(app, ctx, 2, sdk.NewInt(80000*1e6)) ok := app.OracleKeeper params := ok.GetLockedPoolParams(ctx) amount := sdk.Coins{sdk.NewInt64Coin("uctk", 1000)} dueBlock := ctx.BlockHeight() + params.LockedInBlocks withdraw := types.NewWithdraw(addrs[0], amount, dueBlock) ok.SetWithdraw(ctx, withdraw) withdraws := ok.GetAllWithdraws(ctx) require.Len(t, withdraws, 1) require.Equal(t, params.LockedInBlocks, withdraws[0].DueBlock) withdraws = ok.GetAllWithdrawsForExport(ctx) require.Len(t, withdraws, 1) require.Equal(t, params.LockedInBlocks, withdraws[0].DueBlock) }
explode_data.jsonl/9750
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 295 }
[ 2830, 3393, 1649, 92261, 1155, 353, 8840, 836, 8, 341, 28236, 1669, 1643, 676, 39820, 3576, 340, 20985, 1669, 906, 13018, 2164, 7121, 1972, 3576, 11, 17333, 15110, 15753, 90, 1462, 25, 882, 13244, 1005, 21183, 96503, 12718, 5428, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHostsComplete(t *testing.T) { ips, stop := Hosts("192.168.0.1/30") defer func() { stop <- nil }() receivedIPs := []string{} for ip := range ips { receivedIPs = append(receivedIPs, ip) } assert.Equal(t, receivedIPs, []string{"192.168.0.1", "192.168.0.2", "192.168.0.3"}) }
explode_data.jsonl/61066
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 126 }
[ 2830, 3393, 9296, 82, 12548, 1155, 353, 8840, 836, 8, 341, 197, 3077, 11, 2936, 1669, 16102, 82, 445, 16, 24, 17, 13, 16, 21, 23, 13, 15, 13, 16, 14, 18, 15, 1138, 16867, 2915, 368, 314, 2936, 9119, 2092, 50746, 17200, 8771, 329...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWikipediaConcurrently(t *testing.T) { m := NewStringMatcher([]string{"a", "ab", "bc", "bca", "c", "caa"}) wg := sync.WaitGroup{} wg.Add(3) go func() { defer wg.Done() hits := m.MatchThreadSafe([]byte("abccab")) assert(t, len(hits) == 4) assert(t, hits[0].Index == 0) assert(t, hits[1].Index == 1) assert(t, hits[2].Index == 2) assert(t, hits[3].Index == 4) }() go func() { defer wg.Done() hits := m.MatchThreadSafe([]byte("bccab")) assert(t, len(hits) == 4) assert(t, hits[0].Index == 2) assert(t, hits[1].Index == 4) assert(t, hits[2].Index == 0) assert(t, hits[3].Index == 1) }() go func() { defer wg.Done() hits := m.MatchThreadSafe([]byte("bccb")) assert(t, len(hits) == 2) assert(t, hits[0].Index == 2) assert(t, hits[1].Index == 4) }() wg.Wait() }
explode_data.jsonl/22653
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 386 }
[ 2830, 3393, 54, 14939, 1109, 58202, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1532, 703, 37554, 10556, 917, 4913, 64, 497, 330, 370, 497, 330, 8904, 497, 330, 65, 924, 497, 330, 66, 497, 330, 87734, 1, 8824, 72079, 1669, 12811, 2838...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFromHTTPRequest(t *testing.T) { const testURL = "http:://example.com/path?q=1" u, err := url.Parse(testURL) if err != nil { t.Fatal(err) } req := &HTTPRequest{ Request: &http.Request{ Method: "GET", URL: u, Header: map[string][]string{ "User-Agent": {"user-agent"}, "Referer": {"referer"}, }, }, RequestSize: 100, Status: 200, ResponseSize: 25, Latency: 100 * time.Second, LocalIP: "127.0.0.1", RemoteIP: "10.0.1.1", CacheHit: true, CacheValidatedWithOriginServer: true, } got := fromHTTPRequest(req) want := &logtypepb.HttpRequest{ RequestMethod: "GET", RequestUrl: testURL, RequestSize: 100, Status: 200, ResponseSize: 25, Latency: &durpb.Duration{Seconds: 100}, UserAgent: "user-agent", ServerIp: "127.0.0.1", RemoteIp: "10.0.1.1", Referer: "referer", CacheHit: true, CacheValidatedWithOriginServer: true, } if !proto.Equal(got, want) { t.Errorf("got %+v\nwant %+v", got, want) } }
explode_data.jsonl/33208
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 829 }
[ 2830, 3393, 3830, 63765, 1155, 353, 8840, 836, 8, 341, 4777, 1273, 3144, 284, 330, 1254, 486, 322, 8687, 905, 50976, 43782, 28, 16, 698, 10676, 11, 1848, 1669, 2515, 8937, 8623, 3144, 340, 743, 1848, 961, 2092, 341, 197, 3244, 26133, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNormalizeRollover(t *testing.T) { tests := []struct { underTest v1.JaegerEsRolloverSpec expected v1.JaegerEsRolloverSpec }{ {underTest: v1.JaegerEsRolloverSpec{}, expected: v1.JaegerEsRolloverSpec{Schedule: "0 0 * * *"}}, {underTest: v1.JaegerEsRolloverSpec{Image: "bla", Schedule: "lol"}, expected: v1.JaegerEsRolloverSpec{Image: "bla", Schedule: "lol"}}, } for _, test := range tests { normalizeRollover(&test.underTest) assert.Equal(t, test.expected, test.underTest) } }
explode_data.jsonl/21858
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 209 }
[ 2830, 3393, 87824, 49, 27961, 423, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 197, 7995, 2271, 348, 16, 3503, 64, 1878, 17360, 49, 27961, 423, 8327, 198, 197, 42400, 220, 348, 16, 3503, 64, 1878, 17360, 49, 279...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestClient_ModifyTenantRoute(t *testing.T) { client, fsClient := newTestClient() err := client.ModifyTenantRoute(testNID, &config.Route{ Receiver: "test_tenant_base_route", Routes: []*config.Route{ {Receiver: "slack"}, }, }) assert.NoError(t, err) fsClient.AssertCalled(t, "WriteFile", "test/alertmanager.yml", mock.Anything, mock.Anything) err = client.ModifyTenantRoute(testNID, &config.Route{ Receiver: "invalid_base_route", Routes: []*config.Route{ {Receiver: "slack"}, }, }) assert.EqualError(t, err, "route base receiver is incorrect (should be \"test_tenant_base_route\"). The base node should match nothing, then add routes as children of the base node") err = client.ModifyTenantRoute(testNID, &config.Route{ Receiver: "test", Routes: []*config.Route{{ Receiver: "nonexistent", }}, }) assert.Error(t, err) fsClient.AssertNumberOfCalls(t, "WriteFile", 1) }
explode_data.jsonl/63855
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 352 }
[ 2830, 3393, 2959, 71485, 1437, 71252, 4899, 1155, 353, 8840, 836, 8, 341, 25291, 11, 8619, 2959, 1669, 501, 2271, 2959, 741, 9859, 1669, 2943, 23127, 1437, 71252, 4899, 8623, 45, 915, 11, 609, 1676, 58004, 515, 197, 197, 25436, 25, 33...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAddNewFilmLimit(t *testing.T) { testTelegramClientInst, answerChan, _ := NewTestMovieBot("./test_data/test_data.sql") testTelegramClientInst.CFG = cfg.Config{FilmLimit: 1} updates := make(chan tgbotapi.Update) go testTelegramClientInst.AddFilmToHat(updates) updates <- tgbotapi.Update{Message: &tgbotapi.Message{Text: "", Chat: &tgbotapi.Chat{ID: 100}}} answer := <-answerChan expectedAnswer := "Вы привысили лимит фильмов на человека, вы сможете добавить фильм после того как какой-нибудь ваш фильм выберет шляпа. Значение лимита 1." if answer != expectedAnswer { t.Errorf(fmt.Sprintf("Not expected bot answer: %s, expected: %s", answer, expectedAnswer)) return } t.Logf("TestAddNewFilmLimit complete") }
explode_data.jsonl/19233
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 358 }
[ 2830, 3393, 2212, 3564, 51487, 16527, 1155, 353, 8840, 836, 8, 341, 18185, 72244, 2959, 8724, 11, 4226, 46019, 11, 716, 1669, 1532, 2271, 19668, 23502, 13988, 1944, 1769, 12697, 1769, 10045, 1138, 18185, 72244, 2959, 8724, 727, 12001, 284...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFormatNumberSI(t *testing.T) { assert.Equal(t, "125", FormatNumberSI(int(125))) assert.Equal(t, "1.3k", FormatNumberSI(int64(1317))) assert.Equal(t, "21.3M", FormatNumberSI(21317675)) assert.Equal(t, "45.7G", FormatNumberSI(45721317675)) assert.Equal(t, "", FormatNumberSI("test")) }
explode_data.jsonl/14324
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 125 }
[ 2830, 3393, 4061, 2833, 13817, 1155, 353, 8840, 836, 8, 341, 6948, 12808, 1155, 11, 330, 16, 17, 20, 497, 15042, 2833, 13817, 1548, 7, 16, 17, 20, 5929, 6948, 12808, 1155, 11, 330, 16, 13, 18, 74, 497, 15042, 2833, 13817, 1548, 21...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCreatePipeline_GetParametersError(t *testing.T) { store := NewFakeClientManagerOrFatal(util.NewFakeTimeForEpoch()) defer store.Close() manager := NewResourceManager(store) _, err := manager.CreatePipeline("pipeline1", "", []byte("I am invalid yaml")) assert.Equal(t, codes.InvalidArgument, err.(*util.UserError).ExternalStatusCode()) assert.Contains(t, err.Error(), "Failed to parse the parameter") }
explode_data.jsonl/28349
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 136 }
[ 2830, 3393, 4021, 34656, 13614, 9706, 1454, 1155, 353, 8840, 836, 8, 341, 57279, 1669, 1532, 52317, 2959, 2043, 2195, 62396, 67811, 7121, 52317, 1462, 2461, 44338, 2398, 16867, 3553, 10421, 741, 92272, 1669, 1532, 32498, 31200, 340, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMiddleware_GRPCBasicAuth(t *testing.T) { env.SetEnv(env.Env{BasicAuthUsername: basicUsername, BasicAuthPassword: basicPass}) mw := &Middleware{} t.Run("Testcase #1: Positive", func(t *testing.T) { ctx := metadata.NewIncomingContext(context.Background(), metadata.MD{ strings.ToLower(candihelper.HeaderAuthorization): []string{"Basic " + validBasicAuth}, }) assert.NotPanics(t, func() { mw.GRPCBasicAuth(ctx) }) }) t.Run("Testcase #2: Negative", func(t *testing.T) { ctx := metadata.NewIncomingContext(context.Background(), metadata.MD{ strings.ToLower(candihelper.HeaderAuthorization): []string{}, }) assert.Panics(t, func() { mw.GRPCBasicAuth(ctx) }) }) t.Run("Testcase #3: Negative", func(t *testing.T) { ctx := metadata.NewIncomingContext(context.Background(), metadata.MD{ strings.ToLower(candihelper.HeaderAuthorization): []string{"Basic xxx"}, }) assert.Panics(t, func() { mw.GRPCBasicAuth(ctx) }) }) }
explode_data.jsonl/44828
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 353 }
[ 2830, 3393, 24684, 17874, 4872, 15944, 5087, 1155, 353, 8840, 836, 8, 341, 57538, 4202, 14359, 16978, 81214, 90, 15944, 5087, 11115, 25, 6770, 11115, 11, 14625, 5087, 4876, 25, 6770, 12187, 3518, 2109, 86, 1669, 609, 24684, 31483, 3244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTerraformCommandCliArgs(t *testing.T) { t.Parallel() testCases := []struct { command []string expected string }{ { []string{"version"}, "terraform version", }, { []string{"version", "foo"}, "terraform version foo", }, { []string{"version", "foo", "bar", "baz"}, "terraform version foo bar baz", }, { []string{"version", "foo", "bar", "baz", "foobar"}, "terraform version foo bar baz foobar", }, } for _, testCase := range testCases { cmd := fmt.Sprintf("terragrunt %s --terragrunt-non-interactive --terragrunt-log-level debug --terragrunt-working-dir %s", strings.Join(testCase.command, " "), TEST_FIXTURE_EXTRA_ARGS_PATH) var ( stdout bytes.Buffer stderr bytes.Buffer ) runTerragruntRedirectOutput(t, cmd, &stdout, &stderr) output := stdout.String() errOutput := stderr.String() assert.True(t, strings.Contains(errOutput, testCase.expected) || strings.Contains(output, testCase.expected)) } }
explode_data.jsonl/10103
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 400 }
[ 2830, 3393, 51, 13886, 627, 4062, 87014, 4117, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 18185, 37302, 1669, 3056, 1235, 341, 197, 45566, 220, 3056, 917, 198, 197, 42400, 914, 198, 197, 59403, 197, 197, 515, 298, 197, 129...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestKustomizeBuildOptionsLoadRestrictor(t *testing.T) { Given(t). Path(guestbookPath). And(func() { errors.FailOnErr(fixture.Run("", "kubectl", "patch", "cm", "argocd-cm", "-n", fixture.ArgoCDNamespace, "-p", `{ "data": { "kustomize.buildOptions": "--load_restrictor none" } }`)) }). When(). PatchFile("kustomization.yaml", `[{"op": "replace", "path": "/resources/1", "value": "../guestbook_local/guestbook-ui-svc.yaml"}]`). Create(). Sync(). Then(). Expect(OperationPhaseIs(OperationSucceeded)). Expect(HealthIs(health.HealthStatusHealthy)). Expect(SyncStatusIs(SyncStatusCodeSynced)). Given(). And(func() { errors.FailOnErr(fixture.Run("", "kubectl", "patch", "cm", "argocd-cm", "-n", fixture.ArgoCDNamespace, "-p", `{ "data": { "kustomize.buildOptions": "" } }`)) }) }
explode_data.jsonl/37135
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 356 }
[ 2830, 3393, 42, 1450, 551, 11066, 3798, 5879, 50360, 849, 269, 1155, 353, 8840, 836, 8, 341, 9600, 2071, 1155, 4292, 197, 69640, 3268, 3045, 2190, 1820, 4292, 197, 197, 3036, 18552, 368, 341, 298, 73424, 57243, 1925, 7747, 94886, 16708,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1