text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestRunLoopAddSuccess(t *testing.T) { ctx, cancel := context.WithCancel(context.Background()) watcher, fakeWatcher := newFakeWatchServiceInstanceFunc(nil) updater, updated := newFakeUpdateServiceInstanceFunc(nil) getServiceClassFn := refs.NewFakeServiceClassGetterFunc(&data.ServiceClass{}, nil) getServiceBrokerFn := refs.NewFakeServiceBrokerGetterFunc(&data.ServiceBroker{}, nil) provisioner := fake.NewProvisioner() lifecycler := &fake.Lifecycler{ Provisioner: provisioner, } errCh := make(chan error) go func() { errCh <- RunLoop(ctx, watcher, updater, getServiceClassFn, getServiceBrokerFn, lifecycler) }() inst := new(data.ServiceInstance) inst.Kind = data.ServiceInstanceKind fakeWatcher.Add(inst) time.Sleep(100 * time.Millisecond) cancel() fakeWatcher.Stop() err := <-errCh assert.Equal(t, len(provisioner.Reqs), 1, "number of provision requests") assert.Equal(t, len(*updated), 2, "number of updated service instances") assert.Err(t, ErrCancelled, err) }
explode_data.jsonl/72426
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 355 }
[ 2830, 3393, 80520, 2212, 7188, 1155, 353, 8840, 836, 8, 341, 20985, 11, 9121, 1669, 2266, 26124, 9269, 5378, 19047, 2398, 6692, 28058, 11, 12418, 47248, 1669, 501, 52317, 14247, 1860, 2523, 9626, 27907, 340, 59810, 27463, 11, 6049, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWalletAddUTXO(t *testing.T) { chainID := ids.ID{1, 2, 3, 4, 5, 6, 7, 8, 9, 10} w, err := NewWallet(logging.NoLog{}, 12345, chainID, 0) if err != nil { t.Fatal(err) } utxo := &avax.UTXO{ UTXOID: avax.UTXOID{TxID: ids.Empty.Prefix(0)}, Asset: avax.Asset{ID: ids.Empty.Prefix(1)}, Out: &secp256k1fx.TransferOutput{ Amt: 1000, }, } w.AddUTXO(utxo) if balance := w.Balance(utxo.AssetID()); balance != 1000 { t.Fatalf("expected balance to be 1000, was %d", balance) } }
explode_data.jsonl/41621
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 247 }
[ 2830, 3393, 38259, 2212, 1381, 55, 46, 1155, 353, 8840, 836, 8, 341, 197, 8819, 915, 1669, 14151, 9910, 90, 16, 11, 220, 17, 11, 220, 18, 11, 220, 19, 11, 220, 20, 11, 220, 21, 11, 220, 22, 11, 220, 23, 11, 220, 24, 11, 220,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNegativeZero(t *testing.T) { m := make(map[float64]bool, 0) m[+0.0] = true m[math.Copysign(0.0, -1.0)] = true // should overwrite +0 entry if len(m) != 1 { t.Error("length wrong") } for k := range m { if math.Copysign(1.0, k) > 0 { t.Error("wrong sign") } } m = make(map[float64]bool, 0) m[math.Copysign(0.0, -1.0)] = true m[+0.0] = true // should overwrite -0.0 entry if len(m) != 1 { t.Error("length wrong") } for k := range m { if math.Copysign(1.0, k) < 0 { t.Error("wrong sign") } } }
explode_data.jsonl/19901
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 259 }
[ 2830, 3393, 38489, 17999, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1281, 9147, 95381, 21, 19, 96436, 11, 220, 15, 692, 2109, 58, 10, 15, 13, 15, 60, 284, 830, 198, 2109, 58, 10374, 727, 453, 1047, 622, 7, 15, 13, 15, 11, 481, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestListOrder(t *testing.T) { // Setup DB mock := test.SetupMockDB() // Setup HttpExpect router := action.RegisterAdminRoutes() server := httptest.NewServer(router) adminExpect := httpexpect.New(t, server.URL) test.KetoGock() test.KavachGock() gock.New(server.URL).EnableNetworking().Persist() defer gock.DisableNetworking() // ADMIN tests CommonListTests(t, mock, adminExpect) t.Run("get order list with user query parameter", func(t *testing.T) { mock.ExpectQuery(countQuery). WillReturnRows(sqlmock.NewRows([]string{"count"}).AddRow(len(orderlist))) mock.ExpectQuery(selectQuery). WithArgs(1). WillReturnRows(sqlmock.NewRows(OrderCols). AddRow(1, time.Now(), time.Now(), nil, 1, 1, orderlist[0]["user_id"], orderlist[0]["status"], orderlist[0]["payment_id"], orderlist[0]["razorpay_order_id"]). AddRow(2, time.Now(), time.Now(), nil, 1, 1, orderlist[1]["user_id"], orderlist[1]["status"], orderlist[1]["payment_id"], orderlist[1]["razorpay_order_id"])) payment.PaymentSelectMock(mock) currency.CurrencySelectMock(mock) associatedCollectionsSelectMock(mock) adminExpect.GET(basePath). WithHeaders(headers). WithQuery("user", "1"). Expect(). Status(http.StatusOK). JSON(). Object(). ContainsMap(map[string]interface{}{"total": len(orderlist)}). Value("nodes"). Array(). Element(0). Object(). ContainsMap(orderlist[0]) test.ExpectationsMet(t, mock) }) t.Run("invalid user query parameter", func(t *testing.T) { adminExpect.GET(basePath). WithHeaders(headers). WithQuery("user", "abc"). Expect(). Status(http.StatusBadRequest) }) server.Close() router = action.RegisterUserRoutes() server = httptest.NewServer(router) userExpect := httpexpect.New(t, server.URL) gock.New(server.URL).EnableNetworking().Persist() // USER tests CommonListTests(t, mock, userExpect) server.Close() }
explode_data.jsonl/9492
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 758 }
[ 2830, 3393, 852, 4431, 1155, 353, 8840, 836, 8, 1476, 197, 322, 18626, 5952, 198, 77333, 1669, 1273, 39820, 11571, 3506, 2822, 197, 322, 18626, 4823, 17536, 198, 67009, 1669, 1917, 19983, 7210, 26653, 741, 41057, 1669, 54320, 70334, 7121,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAbsCollection_Reduce(t *testing.T) { intColl := NewIntCollection([]int{1, 2, 3, 4}) sumMix := intColl.Reduce(func(carry IMix, item IMix) IMix { carryInt, _ := carry.ToInt() itemInt, _ := item.ToInt() return NewMix(carryInt + itemInt) }) // sumMix.DD() sum, err := sumMix.ToInt() if err != nil { t.Fatal(err.Error()) } if sum != 10 { t.Fatal("Reduce计算错误") } }
explode_data.jsonl/66440
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 177 }
[ 2830, 3393, 27778, 6482, 92940, 10521, 1155, 353, 8840, 836, 8, 341, 2084, 15265, 1669, 1532, 1072, 6482, 10556, 396, 90, 16, 11, 220, 17, 11, 220, 18, 11, 220, 19, 3518, 31479, 58083, 1669, 526, 15265, 20943, 10521, 18552, 1337, 1143...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestValidate(t *testing.T) { mount, err := CreateMount() assert.NoError(t, err) assert.NotNil(t, mount) defer assert.NoError(t, mount.Release()) t.Run("mountCurrentDir", func(t *testing.T) { path := mount.CurrentDir() assert.Equal(t, path, "") }) t.Run("mountChangeDir", func(t *testing.T) { err := mount.ChangeDir("someDir") assert.Error(t, err) assert.Equal(t, err, ErrNotConnected) }) t.Run("mountMakeDir", func(t *testing.T) { err := mount.MakeDir("someName", 0444) assert.Error(t, err) assert.Equal(t, err, ErrNotConnected) }) t.Run("mountRemoveDir", func(t *testing.T) { err := mount.RemoveDir("someDir") assert.Error(t, err) assert.Equal(t, err, ErrNotConnected) }) t.Run("mountLink", func(t *testing.T) { err := mount.Link("/", "/") assert.Error(t, err) assert.Equal(t, err, ErrNotConnected) }) t.Run("mountUnlink", func(t *testing.T) { err := mount.Unlink("someFile") assert.Error(t, err) assert.Equal(t, err, ErrNotConnected) }) t.Run("mountSymlink", func(t *testing.T) { err := mount.Symlink("/", "/") assert.Error(t, err) assert.Equal(t, err, ErrNotConnected) }) t.Run("mountReadlink", func(t *testing.T) { _, err := mount.Readlink("somePath") assert.Error(t, err) assert.Equal(t, err, ErrNotConnected) }) }
explode_data.jsonl/29518
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 555 }
[ 2830, 3393, 17926, 1155, 353, 8840, 836, 8, 341, 2109, 629, 11, 1848, 1669, 4230, 16284, 741, 6948, 35699, 1155, 11, 1848, 340, 6948, 93882, 1155, 11, 6470, 340, 16867, 2060, 35699, 1155, 11, 6470, 58693, 12367, 3244, 16708, 445, 16557,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDaemonRuntimeRoot(t *testing.T) { runtimeRoot, err := ioutil.TempDir("", "containerd-test-runtime-root") if err != nil { t.Fatal(err) } defer func() { if err != nil { os.RemoveAll(runtimeRoot) } }() configTOML := ` [plugins] [plugins.cri] stream_server_port = "0" ` client, _, cleanup := newDaemonWithConfig(t, configTOML) defer cleanup() ctx, cancel := testContext() defer cancel() // FIXME(AkihiroSuda): import locally frozen image? image, err := client.Pull(ctx, testImage, WithPullUnpack) if err != nil { t.Fatal(err) } id := t.Name() container, err := client.NewContainer(ctx, id, WithNewSpec(oci.WithImageConfig(image), withProcessArgs("top")), WithNewSnapshot(id, image), WithRuntime("io.containerd.runc.v1", &options.Options{ Root: runtimeRoot, })) if err != nil { t.Fatal(err) } defer container.Delete(ctx, WithSnapshotCleanup) task, err := container.NewTask(ctx, empty()) if err != nil { t.Fatal(err) } defer task.Delete(ctx) status, err := task.Wait(ctx) if err != nil { t.Fatal(err) } if err = task.Start(ctx); err != nil { t.Fatal(err) } stateJSONPath := filepath.Join(runtimeRoot, testNamespace, id, "state.json") if _, err = os.Stat(stateJSONPath); err != nil { t.Errorf("error while getting stat for %s: %v", stateJSONPath, err) } if err = task.Kill(ctx, syscall.SIGKILL); err != nil { t.Error(err) } <-status }
explode_data.jsonl/45766
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 564 }
[ 2830, 3393, 89177, 15123, 8439, 1155, 353, 8840, 836, 8, 341, 7000, 4466, 8439, 11, 1848, 1669, 43144, 65009, 6184, 19814, 330, 3586, 67, 16839, 68912, 39214, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 16867...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestScenarioOutlineExecutesAllTests(t *testing.T) { c := 0 suite := NewSuite(t, WithFeaturesPath("features/outline.feature")) suite.AddStep(`I add (\d+) and (\d+)`, add) suite.AddStep(`the result should equal (\d+)`, func(t StepTest, ctx Context, sum int) { c++ check(t, ctx, sum) }) suite.Run() if err := assert.Equals(2, c); err != nil { t.Errorf("expected to run %d times but %d got", 2, c) } }
explode_data.jsonl/69369
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 174 }
[ 2830, 3393, 54031, 60269, 10216, 2095, 2403, 18200, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 220, 15, 198, 96572, 1669, 1532, 28000, 1155, 11, 3085, 21336, 1820, 445, 20304, 14, 44130, 29591, 5455, 96572, 1904, 8304, 5809, 40, 912, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAllServersCache_CacheHit(t *testing.T) { srv := &hcloud.Server{ ID: 54321, Name: "cache-hit", PrivateNet: []hcloud.ServerPrivateNet{ { IP: net.ParseIP("10.0.0.3"), }, }, } cacheOps := newAllServersCacheOps(t, srv) tmpl := allServersCacheTestCase{ SetUp: func(t *testing.T, tt *allServersCacheTestCase) { tt.ServerClient. On("All", mock.Anything). Return([]*hcloud.Server{srv}, nil). Once() // Perform any cache op to initialize caches if _, err := tt.Cache.ByName(srv.Name); err != nil { t.Fatalf("SetUp: %v", err) } }, Assert: func(t *testing.T, tt *allServersCacheTestCase) { // All must be called only once. This call has happened during the // test SetUp method. All additional calls indicate an error. tt.ServerClient.AssertNumberOfCalls(t, "All", 1) }, Expected: srv, } runAllServersCacheTests(t, "Cache hit", tmpl, cacheOps) }
explode_data.jsonl/5918
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 392 }
[ 2830, 3393, 2403, 78139, 8233, 920, 1777, 19498, 1155, 353, 8840, 836, 8, 341, 1903, 10553, 1669, 609, 71, 12361, 22997, 515, 197, 29580, 25, 256, 220, 20, 19, 18, 17, 16, 345, 197, 21297, 25, 330, 9360, 69101, 756, 197, 197, 16787,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestOrderList(t *testing.T) { setup() defer teardown() httpmock.RegisterResponder("GET", fmt.Sprintf("https://fooshop.myshopify.com/%s/orders.json", client.pathPrefix), httpmock.NewBytesResponder(200, loadFixture("orders.json"))) orders, err := client.Order.List(nil) if err != nil { t.Errorf("Order.List returned error: %v", err) } // Check that orders were parsed if len(orders) != 1 { t.Errorf("Order.List got %v orders, expected: 1", len(orders)) } order := orders[0] orderTests(t, order) }
explode_data.jsonl/17985
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 196 }
[ 2830, 3393, 4431, 852, 1155, 353, 8840, 836, 8, 341, 84571, 741, 16867, 49304, 2822, 28080, 16712, 19983, 30884, 445, 3806, 497, 8879, 17305, 445, 2428, 1110, 824, 9267, 453, 12618, 8675, 1437, 905, 12627, 82, 82818, 4323, 497, 2943, 38...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBuildProxyPass(t *testing.T) { defaultBackend := "upstream-name" defaultHost := "example.com" for k, tc := range tmplFuncTestcases { loc := &ingress.Location{ Path: tc.Path, Rewrite: rewrite.Config{Target: tc.Target}, Backend: defaultBackend, XForwardedPrefix: tc.XForwardedPrefix, } if tc.SecureBackend { loc.BackendProtocol = "HTTPS" } backend := &ingress.Backend{ Name: defaultBackend, } if tc.Sticky { backend.SessionAffinity = ingress.SessionAffinityConfig{ AffinityType: "cookie", CookieSessionAffinity: ingress.CookieSessionAffinity{ Locations: map[string][]string{ defaultHost: {tc.Path}, }, }, } } backends := []*ingress.Backend{backend} pp := buildProxyPass(defaultHost, backends, loc) if !strings.EqualFold(tc.ProxyPass, pp) { t.Errorf("%s: expected \n'%v'\nbut returned \n'%v'", k, tc.ProxyPass, pp) } } }
explode_data.jsonl/80580
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 423 }
[ 2830, 3393, 11066, 16219, 12187, 1155, 353, 8840, 836, 8, 341, 11940, 29699, 1669, 330, 454, 4027, 11494, 698, 11940, 9296, 1669, 330, 8687, 905, 1837, 2023, 595, 11, 17130, 1669, 2088, 79839, 9626, 2271, 23910, 341, 197, 71128, 1669, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGetPodExecHookFromAnnotations(t *testing.T) { phases := []hookPhase{"", hookPhasePre, hookPhasePost} for _, phase := range phases { tests := []struct { name string annotations map[string]string expectedHook *v1.ExecHook }{ { name: "missing command annotation", expectedHook: nil, }, { name: "malformed command json array", annotations: map[string]string{ phasedKey(phase, podBackupHookCommandAnnotationKey): "[blarg", }, expectedHook: &v1.ExecHook{ Command: []string{"[blarg"}, }, }, { name: "valid command json array", annotations: map[string]string{ phasedKey(phase, podBackupHookCommandAnnotationKey): `["a","b","c"]`, }, expectedHook: &v1.ExecHook{ Command: []string{"a", "b", "c"}, }, }, { name: "command as a string", annotations: map[string]string{ phasedKey(phase, podBackupHookCommandAnnotationKey): "/usr/bin/foo", }, expectedHook: &v1.ExecHook{ Command: []string{"/usr/bin/foo"}, }, }, { name: "hook mode set to continue", annotations: map[string]string{ phasedKey(phase, podBackupHookCommandAnnotationKey): "/usr/bin/foo", phasedKey(phase, podBackupHookOnErrorAnnotationKey): string(v1.HookErrorModeContinue), }, expectedHook: &v1.ExecHook{ Command: []string{"/usr/bin/foo"}, OnError: v1.HookErrorModeContinue, }, }, { name: "hook mode set to fail", annotations: map[string]string{ phasedKey(phase, podBackupHookCommandAnnotationKey): "/usr/bin/foo", phasedKey(phase, podBackupHookOnErrorAnnotationKey): string(v1.HookErrorModeFail), }, expectedHook: &v1.ExecHook{ Command: []string{"/usr/bin/foo"}, OnError: v1.HookErrorModeFail, }, }, { name: "use the specified timeout", annotations: map[string]string{ phasedKey(phase, podBackupHookCommandAnnotationKey): "/usr/bin/foo", phasedKey(phase, podBackupHookTimeoutAnnotationKey): "5m3s", }, expectedHook: &v1.ExecHook{ Command: []string{"/usr/bin/foo"}, Timeout: metav1.Duration{Duration: 5*time.Minute + 3*time.Second}, }, }, { name: "invalid timeout is ignored", annotations: map[string]string{ phasedKey(phase, podBackupHookCommandAnnotationKey): "/usr/bin/foo", phasedKey(phase, podBackupHookTimeoutAnnotationKey): "invalid", }, expectedHook: &v1.ExecHook{ Command: []string{"/usr/bin/foo"}, }, }, { name: "use the specified container", annotations: map[string]string{ phasedKey(phase, podBackupHookContainerAnnotationKey): "some-container", phasedKey(phase, podBackupHookCommandAnnotationKey): "/usr/bin/foo", }, expectedHook: &v1.ExecHook{ Container: "some-container", Command: []string{"/usr/bin/foo"}, }, }, } for _, test := range tests { t.Run(fmt.Sprintf("%s (phase=%q)", test.name, phase), func(t *testing.T) { hook := getPodExecHookFromAnnotations(test.annotations, phase) assert.Equal(t, test.expectedHook, hook) }) } } }
explode_data.jsonl/40722
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1384 }
[ 2830, 3393, 1949, 23527, 10216, 31679, 3830, 21418, 1155, 353, 8840, 836, 8, 341, 197, 759, 2264, 1669, 3056, 20873, 30733, 4913, 497, 9704, 30733, 4703, 11, 9704, 30733, 4133, 532, 2023, 8358, 10262, 1669, 2088, 34430, 341, 197, 78216, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEnginePushQuery(t *testing.T) { vdr, _, sender, vm, te, gBlk := setup(t) sender.Default(true) blk := &Blk{ parent: gBlk, id: GenerateID(), status: choices.Processing, bytes: []byte{1}, } vm.ParseBlockF = func(b []byte) (snowman.Block, error) { if bytes.Equal(b, blk.Bytes()) { return blk, nil } return nil, errUnknownBytes } vm.GetBlockF = func(id ids.ID) (snowman.Block, error) { if id.Equals(blk.ID()) { return blk, nil } t.Fatal(errUnknownBytes) panic(errUnknownBytes) } chitted := new(bool) sender.ChitsF = func(inVdr ids.ShortID, requestID uint32, votes ids.Set) { if *chitted { t.Fatalf("Sent chit multiple times") } *chitted = true if !inVdr.Equals(vdr.ID()) { t.Fatalf("Asking wrong validator for preference") } if requestID != 20 { t.Fatalf("Wrong request id") } if votes.Len() != 1 { t.Fatal("votes should only have one element") } vote := votes.List()[0] if !blk.ID().Equals(vote) { t.Fatalf("Asking for wrong block") } } queried := new(bool) sender.PushQueryF = func(inVdrs ids.ShortSet, _ uint32, blkID ids.ID, blkBytes []byte) { if *queried { t.Fatalf("Asked multiple times") } *queried = true vdrSet := ids.ShortSet{} vdrSet.Add(vdr.ID()) if !inVdrs.Equals(vdrSet) { t.Fatalf("Asking wrong validator for preference") } if !blk.ID().Equals(blkID) { t.Fatalf("Asking for wrong block") } } te.PushQuery(vdr.ID(), 20, blk.ID(), blk.Bytes()) if !*chitted { t.Fatalf("Should have sent a chit to the peer") } if !*queried { t.Fatalf("Should have sent a query to the peer") } }
explode_data.jsonl/3562
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 738 }
[ 2830, 3393, 4571, 16644, 2859, 1155, 353, 8840, 836, 8, 341, 5195, 3612, 11, 8358, 4646, 11, 10995, 11, 1013, 11, 342, 4923, 74, 1669, 6505, 1155, 692, 1903, 1659, 13275, 3715, 692, 197, 34989, 1669, 609, 4923, 74, 515, 197, 24804, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCoinMultiSendGenerateOnly(t *testing.T) { kb, err := keys.NewKeyBaseFromDir(InitClientHome(t, "")) require.NoError(t, err) addr, seed := CreateAddr(t, name1, pw, kb) cleanup, _, _, port := InitializeTestLCD(t, 1, []sdk.AccAddress{addr}, true) defer cleanup() // generate only res, body, _ := doTransferWithGas(t, port, seed, "", memo, "", addr, "200000", 1, false, false, fees) require.Equal(t, http.StatusOK, res.StatusCode, body) var stdTx auth.StdTx require.Nil(t, cdc.UnmarshalJSON([]byte(body), &stdTx)) require.Equal(t, len(stdTx.Msgs), 1) require.Equal(t, stdTx.GetMsgs()[0].Route(), "bank") require.Equal(t, stdTx.GetMsgs()[0].GetSigners(), []sdk.AccAddress{addr}) require.Equal(t, 0, len(stdTx.Signatures)) require.Equal(t, memo, stdTx.Memo) require.NotZero(t, stdTx.Fee.Gas) require.IsType(t, stdTx.GetMsgs()[0], bank.MsgSend{}) require.Equal(t, addr, stdTx.GetMsgs()[0].(bank.MsgSend).FromAddress) }
explode_data.jsonl/25403
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 393 }
[ 2830, 3393, 41180, 20358, 11505, 31115, 7308, 1155, 353, 8840, 836, 8, 341, 16463, 65, 11, 1848, 1669, 6894, 7121, 1592, 3978, 3830, 6184, 7, 3803, 2959, 7623, 1155, 11, 77561, 17957, 35699, 1155, 11, 1848, 340, 53183, 11, 10320, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCliIndex(t *testing.T) { w := bytes.NewBufferString("\n") defaultManager.out = w defaultManager.commands = nil programArgs = []string{"foo", "test"} Register("foo", "bar").Help("A test command") Register("foo", "buu").Help("Another test command") defer func() { recover() t.Log(w.String()) }() Load() t.Fail() }
explode_data.jsonl/55722
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 127 }
[ 2830, 3393, 87014, 1552, 1155, 353, 8840, 836, 8, 341, 6692, 1669, 5820, 7121, 4095, 703, 4921, 77, 1138, 11940, 2043, 2532, 284, 289, 198, 11940, 2043, 33902, 284, 2092, 198, 197, 14906, 4117, 284, 3056, 917, 4913, 7975, 497, 330, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPreserveURLEncoding(t *testing.T) { cfg := newFakeKeycloakConfig() cfg.EnableLogging = true cfg.Resources = []*Resource{ { URL: "/api/v2/*", Methods: allHTTPMethods, Roles: []string{"dev"}, }, { URL: "/api/v1/auth*", Methods: allHTTPMethods, Roles: []string{"admin"}, }, { URL: "/api/v1/*", Methods: allHTTPMethods, WhiteListed: true, }, { URL: "/*", Methods: allHTTPMethods, Roles: []string{"user"}, }, } requests := []fakeRequest{ { URI: "/test", HasToken: true, Roles: []string{"nothing"}, ExpectedCode: http.StatusForbidden, }, { URI: "/", ExpectedCode: http.StatusUnauthorized, }, { // See KEYCLOAK-10864 URI: "/administrativeMonitor/hudson.diagnosis.ReverseProxySetupMonitor/testForReverseProxySetup/https%3A%2F%2Flocalhost%3A6001%2Fmanage/", ExpectedContentContains: `"uri":"/administrativeMonitor/hudson.diagnosis.ReverseProxySetupMonitor/testForReverseProxySetup/https%3A%2F%2Flocalhost%3A6001%2Fmanage/"`, HasToken: true, Roles: []string{"user"}, ExpectedProxy: true, ExpectedCode: http.StatusOK, }, { // See KEYCLOAK-11276 URI: "/iiif/2/edepot_local:ST%2F00001%2FST00005_00001.jpg/full/1000,/0/default.png", ExpectedContentContains: `"uri":"/iiif/2/edepot_local:ST%2F00001%2FST00005_00001.jpg/full/1000,/0/default.png"`, HasToken: true, Roles: []string{"user"}, ExpectedProxy: true, ExpectedCode: http.StatusOK, }, { // See KEYCLOAK-13315 URI: "/rabbitmqui/%2F/replicate-to-central", ExpectedContentContains: `"uri":"/rabbitmqui/%2F/replicate-to-central"`, HasToken: true, Roles: []string{"user"}, ExpectedProxy: true, ExpectedCode: http.StatusOK, }, { // should work URI: "/api/v1/auth", HasToken: true, Roles: []string{"admin"}, ExpectedProxy: true, ExpectedCode: http.StatusOK, }, { // should work URI: "/api/v1/auth?referer=https%3A%2F%2Fwww.example.com%2Fauth", ExpectedContentContains: `"uri":"/api/v1/auth?referer=https%3A%2F%2Fwww.example.com%2Fauth"`, HasToken: true, Roles: []string{"admin"}, ExpectedProxy: true, ExpectedCode: http.StatusOK, }, { URI: "/api/v1/auth?referer=https%3A%2F%2Fwww.example.com%2Fauth", HasToken: true, Roles: []string{"user"}, ExpectedCode: http.StatusForbidden, }, { // should work URI: "/api/v3/auth?referer=https%3A%2F%2Fwww.example.com%2Fauth", ExpectedContentContains: `"uri":"/api/v3/auth?referer=https%3A%2F%2Fwww.example.com%2Fauth"`, HasToken: true, Roles: []string{"user"}, ExpectedProxy: true, ExpectedCode: http.StatusOK, }, } newFakeProxy(cfg).RunTests(t, requests) }
explode_data.jsonl/14750
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1705 }
[ 2830, 3393, 14367, 5852, 1511, 867, 1016, 3700, 1155, 353, 8840, 836, 8, 341, 50286, 1669, 501, 52317, 1592, 88751, 2648, 741, 50286, 32287, 34575, 284, 830, 198, 50286, 21703, 284, 29838, 4783, 515, 197, 197, 515, 298, 79055, 25, 257, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeleteTenant(t *testing.T) { th.SetupHTTP() defer th.TeardownHTTP() mockDeleteTenantResponse(t) err := tenants.Delete(client.ServiceClient(), "2466f69cd4714d89a548a68ed97ffcd4").ExtractErr() th.AssertNoErr(t, err) }
explode_data.jsonl/82168
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 96 }
[ 2830, 3393, 6435, 71252, 1155, 353, 8840, 836, 8, 341, 70479, 39820, 9230, 741, 16867, 270, 94849, 37496, 9230, 2822, 77333, 6435, 71252, 2582, 1155, 692, 9859, 1669, 39916, 18872, 12805, 13860, 2959, 1507, 330, 17, 19, 21, 21, 69, 21, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCacheOnlyIfBodyRead(t *testing.T) { resetTest() { req, err := http.NewRequest("GET", s.server.URL, nil) if err != nil { t.Fatal(err) } resp, err := s.client.Do(req) if err != nil { t.Fatal(err) } if resp.Header.Get(XFromCache) != "" { t.Fatal("XFromCache header isn't blank") } // We do not read the body resp.Body.Close() } { req, err := http.NewRequest("GET", s.server.URL, nil) if err != nil { t.Fatal(err) } resp, err := s.client.Do(req) if err != nil { t.Fatal(err) } defer resp.Body.Close() if resp.Header.Get(XFromCache) != "" { t.Fatalf("XFromCache header isn't blank") } } }
explode_data.jsonl/77613
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 306 }
[ 2830, 3393, 8233, 7308, 2679, 5444, 4418, 1155, 353, 8840, 836, 8, 341, 70343, 2271, 741, 197, 515, 197, 24395, 11, 1848, 1669, 1758, 75274, 445, 3806, 497, 274, 12638, 20893, 11, 2092, 340, 197, 743, 1848, 961, 2092, 341, 298, 3244, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestFilenameCollision(t *testing.T) { file1 := "sample.md" file2 := "second.md" v := &bytes.Buffer{} log.SetOutput(v) src := "./*/index.md" dest := "folder" testFs := newTmpFS(t) copyFile(t, testFs, filepath.Join("testdata", file1), "folder1/index.md") copyFile(t, testFs, filepath.Join("testdata", file2), "folder2/index.md") copyFile(t, testFs, filepath.Join("testdata", file2), "folder3/index.md") err := testFs.Mkdir(dest, os.ModePerm) failOn(t, err, "create directory") err = mv(testFs, src, dest) require.NoError(t, err) require.Contains(t, v.String(), "same name", "A warning should be logged") require.False(t, dirExists(testFs, "folder1"), "First source directory exists") require.False(t, dirExists(testFs, "folder2"), "Second source directory exists") require.False(t, dirExists(testFs, "folder3"), "Third source directory exists") require.True(t, fileExists(testFs, filepath.Join(dest, "index.md")), "First file moved") require.True(t, fileExists(testFs, filepath.Join(dest, "index_1.md")), "Second file moved") require.True(t, fileExists(testFs, filepath.Join(dest, "index_2.md")), "Third file moved") }
explode_data.jsonl/36836
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 430 }
[ 2830, 3393, 20759, 32280, 1155, 353, 8840, 836, 8, 341, 17661, 16, 1669, 330, 13611, 21324, 698, 17661, 17, 1669, 330, 5569, 21324, 1837, 5195, 1669, 609, 9651, 22622, 16094, 6725, 4202, 5097, 3747, 692, 41144, 1669, 5933, 91537, 1252, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTargetSetRecreatesTargetGroupsEveryRun(t *testing.T) { verifyPresence := func(tgroups map[string]*config.TargetGroup, name string, present bool) { if _, ok := tgroups[name]; ok != present { msg := "" if !present { msg = "not " } t.Fatalf("'%s' should %sbe present in TargetSet.tgroups: %s", name, msg, tgroups) } } cfg := &config.ServiceDiscoveryConfig{} sOne := ` static_configs: - targets: ["foo:9090"] - targets: ["bar:9090"] ` if err := yaml.Unmarshal([]byte(sOne), cfg); err != nil { t.Fatalf("Unable to load YAML config sOne: %s", err) } called := make(chan struct{}) ts := NewTargetSet(&mockSyncer{ sync: func([]*config.TargetGroup) { called <- struct{}{} }, }) ctx, cancel := context.WithCancel(context.Background()) defer cancel() go ts.Run(ctx) ts.UpdateProviders(ProvidersFromConfig(*cfg, nil)) <-called verifyPresence(ts.tgroups, "static/0/0", true) verifyPresence(ts.tgroups, "static/0/1", true) sTwo := ` static_configs: - targets: ["foo:9090"] ` if err := yaml.Unmarshal([]byte(sTwo), cfg); err != nil { t.Fatalf("Unable to load YAML config sTwo: %s", err) } ts.UpdateProviders(ProvidersFromConfig(*cfg, nil)) <-called verifyPresence(ts.tgroups, "static/0/0", true) verifyPresence(ts.tgroups, "static/0/1", false) }
explode_data.jsonl/81004
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 523 }
[ 2830, 3393, 6397, 1649, 693, 58519, 6397, 22173, 11510, 6727, 1155, 353, 8840, 836, 8, 1476, 93587, 89169, 1669, 2915, 1155, 16753, 2415, 14032, 8465, 1676, 35016, 2808, 11, 829, 914, 11, 3042, 1807, 8, 341, 197, 743, 8358, 5394, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSpxBasic2(t *testing.T) { gopSpxTest(t, ` import ( "fmt" ) const ( Foo = 1 ) func bar() { } func onInit() { Foo bar fmt.Println("Hi") } `, ``, `package main import ( fmt "fmt" spx "github.com/goplus/gop/cl/internal/spx" ) const Foo = 1 type index struct { *spx.MyGame } func (this *index) bar() { } func (this *index) onInit() { Foo this.bar() fmt.Println("Hi") } `) }
explode_data.jsonl/78664
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 199 }
[ 2830, 3393, 50, 1767, 15944, 17, 1155, 353, 8840, 836, 8, 341, 3174, 453, 50, 1767, 2271, 1155, 11, 22074, 474, 2399, 197, 21871, 698, 692, 1024, 2399, 12727, 2624, 284, 220, 16, 198, 692, 2830, 3619, 368, 341, 630, 2830, 389, 3803,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestToArtifactKeyNoDataset(t *testing.T) { artifactKey := ToArtifactKey(nil, "artifactID-1") assert.Equal(t, artifactKey.DatasetProject, "") assert.Equal(t, artifactKey.DatasetDomain, "") assert.Equal(t, artifactKey.DatasetName, "") assert.Equal(t, artifactKey.DatasetVersion, "") assert.Equal(t, artifactKey.ArtifactID, "artifactID-1") }
explode_data.jsonl/8786
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 139 }
[ 2830, 3393, 1249, 85578, 1592, 2753, 33363, 1155, 353, 8840, 836, 8, 341, 197, 63722, 1592, 1669, 2014, 85578, 1592, 27907, 11, 330, 63722, 915, 12, 16, 1138, 6948, 12808, 1155, 11, 36639, 1592, 79356, 7849, 11, 14676, 6948, 12808, 1155...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_getTime(t *testing.T) { type args struct { timeType string } tests := []struct { name string args args want string }{ // TODO: Add test cases. } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := getTime(tt.args.timeType); got != tt.want { t.Errorf("getTime() = %v, want %v", got, tt.want) } }) } }
explode_data.jsonl/47729
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 164 }
[ 2830, 3393, 3062, 1462, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 21957, 929, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 31215, 2827, 198, 197, 50780, 914, 198, 197, 59403, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestHtpasswdFiveCreds(t *testing.T) { Convey("Five creds", t, func() { tests := map[string]string{ "michael": "scott", "jim": "halpert", "dwight": "shrute", "pam": "bessley", "creed": "bratton", } credString := strings.Builder{} for key, val := range tests { credString.WriteString(getCredString(key, val) + "\n") } func() { port := test.GetFreePort() baseURL := test.GetBaseURL(port) conf := config.New() conf.HTTP.Port = port htpasswdPath := test.MakeHtpasswdFileFromString(credString.String()) defer os.Remove(htpasswdPath) conf.HTTP.Auth = &config.AuthConfig{ HTPasswd: config.AuthHTPasswd{ Path: htpasswdPath, }, } ctlr := api.NewController(conf) ctlr.Config.Storage.RootDirectory = t.TempDir() go startServer(ctlr) defer stopServer(ctlr) test.WaitTillServerReady(baseURL) // with creds, should get expected status code for key, val := range tests { resp, _ := resty.R().SetBasicAuth(key, val).Get(baseURL + "/v2/") So(resp, ShouldNotBeNil) So(resp.StatusCode(), ShouldEqual, http.StatusOK) } // with invalid creds, it should fail resp, _ := resty.R().SetBasicAuth("chuck", "chuck").Get(baseURL + "/v2/") So(resp, ShouldNotBeNil) So(resp.StatusCode(), ShouldEqual, http.StatusUnauthorized) }() }) }
explode_data.jsonl/77682
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 587 }
[ 2830, 3393, 39, 790, 395, 6377, 37020, 34, 53369, 1155, 353, 8840, 836, 8, 341, 93070, 5617, 445, 37020, 73177, 497, 259, 11, 2915, 368, 341, 197, 78216, 1669, 2415, 14032, 30953, 515, 298, 197, 1, 76, 59708, 788, 330, 2388, 1716, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_MissionOnlineByTid(t *testing.T) { var ( c = context.TODO() err error tid = int16(160) plat = int16(1) mm []*activity.ActWithTP ) convey.Convey("MissionOnlineByTid", t, func(ctx convey.C) { defer gock.OffAll() url := d.ActOnlineByTypeURL httpMock("GET", url).Reply(200).JSON(` { "code": 0, "data": [ { "id": 10329, "oid": 0, "type": 4, "state": 1, "stime": "2018-10-14 14:32:00", "etime": "2018-12-17 00:00:00", "ctime": "2018-08-30 18:03:30", "mtime": "2018-10-17 18:27:44", "name": "这是一个", "author": "jinchenchen", "act_url": "", "lstime": "2018-09-08 00:00:00", "letime": "2018-12-01 00:00:00", "cover": "//uat-i0.hdslb.com/bfs/test/static/20181017/fb8f33d1a41042b9a1ebb515fdc19d94/nBljdwyCo.jpg", "dic": "sdf", "flag": "33", "uetime": "0000-00-00 00:00:00", "ustime": "0000-00-00 00:00:00", "level": "0", "h5_cover": "", "rank": "123", "like_limit": "1", "android_url": "", "ios_url": "", "fan_limit_max": "0", "fan_limit_min": "0", "tags": "", "types": "", "hot": 0, "bgm_id": 123, "paster_id": 123, "oids": "10110549|10110164|10110536|10110546", "screen_set": 1, "protocol": "活动说明" } ] } `) mm, err = d.MissionOnlineByTid(c, tid, plat) ctx.Convey("Then err should be nil.has should not be nil.", func(ctx convey.C) { ctx.So(err, convey.ShouldBeNil) ctx.So(mm, convey.ShouldNotBeNil) }) }) }
explode_data.jsonl/3442
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 839 }
[ 2830, 3393, 1245, 7558, 19598, 1359, 51, 307, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 1444, 262, 284, 2266, 90988, 741, 197, 9859, 220, 1465, 198, 197, 3244, 307, 220, 284, 526, 16, 21, 7, 16, 21, 15, 340, 197, 197, 42339, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDefaultProcessors(t *testing.T) { allFactories, err := Components() require.NoError(t, err) procFactories := allFactories.Processors tests := []struct { processor config.Type getConfigFn getProcessorConfigFn skipLifecycle bool }{ { processor: "attributes", getConfigFn: func() config.Processor { cfg := procFactories["attributes"].CreateDefaultConfig().(*attributesprocessor.Config) cfg.Actions = []attraction.ActionKeyValue{ {Key: "attribute1", Action: attraction.INSERT, Value: 123}, } return cfg }, }, { processor: "batch", }, { processor: "deltatorate", }, { processor: "filter", }, { processor: "groupbyattrs", }, { processor: "groupbytrace", }, { processor: "k8sattributes", skipLifecycle: true, // Requires a k8s API to communicate with }, { processor: "memory_limiter", getConfigFn: func() config.Processor { cfg := procFactories["memory_limiter"].CreateDefaultConfig().(*memorylimiterprocessor.Config) cfg.CheckInterval = 100 * time.Millisecond cfg.MemoryLimitMiB = 1024 * 1024 return cfg }, }, { processor: "metricstransform", }, { processor: "experimental_metricsgeneration", }, { processor: "probabilistic_sampler", }, { processor: "resourcedetection", }, { processor: "resource", getConfigFn: func() config.Processor { cfg := procFactories["resource"].CreateDefaultConfig().(*resourceprocessor.Config) cfg.AttributesActions = []attraction.ActionKeyValue{ {Key: "attribute1", Action: attraction.INSERT, Value: 123}, } return cfg }, }, { processor: "routing", skipLifecycle: true, // Requires external exporters to be configured to route data }, { processor: "span", getConfigFn: func() config.Processor { cfg := procFactories["span"].CreateDefaultConfig().(*spanprocessor.Config) cfg.Rename.FromAttributes = []string{"test-key"} return cfg }, }, { processor: "spanmetrics", skipLifecycle: true, // Requires a running exporter to convert data to/from }, { processor: "cumulativetodelta", }, { processor: "tail_sampling", }, { processor: "transform", }, } assert.Len(t, tests, len(procFactories), "All processors MUST be added to lifecycle tests") for _, tt := range tests { t.Run(string(tt.processor), func(t *testing.T) { factory, ok := procFactories[tt.processor] require.True(t, ok) assert.Equal(t, tt.processor, factory.Type()) assert.EqualValues(t, config.NewComponentID(tt.processor), factory.CreateDefaultConfig().ID()) if tt.skipLifecycle { t.Skip("Skipping lifecycle processor check for:", tt.processor) return } verifyProcessorLifecycle(t, factory, tt.getConfigFn) }) } }
explode_data.jsonl/27260
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1127 }
[ 2830, 3393, 3675, 7423, 1087, 1155, 353, 8840, 836, 8, 341, 50960, 17417, 2433, 11, 1848, 1669, 34085, 741, 17957, 35699, 1155, 11, 1848, 692, 197, 15782, 17417, 2433, 1669, 678, 17417, 2433, 29012, 1087, 271, 78216, 1669, 3056, 1235, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMinAgg(t *testing.T) { agg := Min.AggFunc() assert.Equal(t, Min, agg.AggType()) assert.Equal(t, 1.0, agg.Aggregate(1, 99.0)) assert.Equal(t, 1.0, agg.Aggregate(99.0, 1)) }
explode_data.jsonl/69052
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 6217, 9042, 70, 1155, 353, 8840, 836, 8, 341, 197, 15718, 1669, 3386, 875, 14398, 9626, 741, 6948, 12808, 1155, 11, 3386, 11, 50020, 875, 14398, 929, 2398, 6948, 12808, 1155, 11, 220, 16, 13, 15, 11, 50020, 49850, 14240, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEventMultiValueWithArrayUnpack(t *testing.T) { definition := `[{"name": "test", "type": "event", "inputs": [{"indexed": false, "name":"value1", "type":"uint8[2]"},{"indexed": false, "name":"value2", "type":"uint8"}]}]` type testStruct struct { Value1 [2]uint8 Value2 uint8 } abi, err := JSON(strings.NewReader(definition)) require.NoError(t, err) var b bytes.Buffer var i uint8 = 1 for ; i <= 3; i++ { b.Write(packNum(reflect.ValueOf(i))) } var rst testStruct require.NoError(t, abi.Unpack(&rst, "test", b.Bytes())) require.Equal(t, [2]uint8{1, 2}, rst.Value1) require.Equal(t, uint8(3), rst.Value2) }
explode_data.jsonl/43925
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 260 }
[ 2830, 3393, 1556, 20358, 1130, 2354, 1857, 1806, 4748, 1155, 353, 8840, 836, 8, 341, 7452, 4054, 1669, 77644, 4913, 606, 788, 330, 1944, 497, 330, 1313, 788, 330, 3087, 497, 330, 24941, 788, 61753, 97473, 788, 895, 11, 330, 606, 3252,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMSSQLReconnect(t *testing.T) { params := newConnParams() address, err := params.getConnAddress() if err != nil { t.Skipf("Skipping test: %v", err) } ln, err := net.Listen("tcp", "127.0.0.1:0") if err != nil { t.Fatal(err) } defer ln.Close() err = params.updateConnAddress(ln.Addr().String()) if err != nil { t.Fatal(err) } proxy := new(tcpProxy) go proxy.run(ln, address) db, sc, err := mssqlConnectWithParams(params) if err != nil { t.Fatal(err) } defer closeDB(t, db, sc, sc) testConn := func() error { var n int64 err := db.QueryRow("select count(*) from dbo.temp").Scan(&n) if err != nil { return err } if n != 1 { return fmt.Errorf("unexpected return value: should=1, is=%v", n) } return nil } db.Exec("drop table dbo.temp") exec(t, db, `create table dbo.temp (name varchar(50))`) exec(t, db, `insert into dbo.temp (name) values ('alex')`) err = testConn() if err != nil { t.Fatal(err) } proxy.pause() time.Sleep(100 * time.Millisecond) err = testConn() if err == nil { t.Fatal("database IO should fail, but succeeded") } proxy.restart() err = testConn() if err != nil { t.Fatal(err) } exec(t, db, "drop table dbo.temp") }
explode_data.jsonl/33563
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 528 }
[ 2830, 3393, 44, 1220, 3588, 693, 6459, 1155, 353, 8840, 836, 8, 341, 25856, 1669, 501, 9701, 4870, 741, 63202, 11, 1848, 1669, 3628, 670, 9701, 4286, 741, 743, 1848, 961, 2092, 341, 197, 3244, 57776, 69, 445, 85945, 1273, 25, 1018, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestValidateConfigMapUpdate(t *testing.T) { newConfigMap := func(version, name, namespace string, data map[string]string) core.ConfigMap { return core.ConfigMap{ ObjectMeta: metav1.ObjectMeta{ Name: name, Namespace: namespace, ResourceVersion: version, }, Data: data, } } var ( validConfigMap = newConfigMap("1", "validname", "validns", map[string]string{"key": "value"}) noVersion = newConfigMap("", "validname", "validns", map[string]string{"key": "value"}) ) cases := []struct { name string newCfg core.ConfigMap oldCfg core.ConfigMap isValid bool }{ { name: "valid", newCfg: validConfigMap, oldCfg: validConfigMap, isValid: true, }, { name: "invalid", newCfg: noVersion, oldCfg: validConfigMap, isValid: false, }, } for _, tc := range cases { errs := ValidateConfigMapUpdate(&tc.newCfg, &tc.oldCfg) if tc.isValid && len(errs) > 0 { t.Errorf("%v: unexpected error: %v", tc.name, errs) } if !tc.isValid && len(errs) == 0 { t.Errorf("%v: unexpected non-error", tc.name) } } }
explode_data.jsonl/1060
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 495 }
[ 2830, 3393, 17926, 2648, 2227, 4289, 1155, 353, 8840, 836, 8, 341, 8638, 2648, 2227, 1669, 2915, 37770, 11, 829, 11, 4473, 914, 11, 821, 2415, 14032, 30953, 8, 6200, 10753, 2227, 341, 197, 853, 6200, 10753, 2227, 515, 298, 23816, 1217...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCallFunc(t *testing.T) { var err error var conv = &KConv method := KDbug.GetMethod(conv, "BaseConvert") _, err = CallFunc(method, 0, "12345", "10", 16) if err != nil { println(err.Error()) } }
explode_data.jsonl/74248
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 88 }
[ 2830, 3393, 7220, 9626, 1155, 353, 8840, 836, 8, 341, 2405, 1848, 1465, 198, 2405, 5686, 284, 609, 42, 34892, 198, 42257, 1669, 62990, 2313, 2234, 3523, 54995, 11, 330, 3978, 12012, 5130, 197, 6878, 1848, 284, 7143, 9626, 17262, 11, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUserService_CreateUser(t *testing.T) { server := httptest.NewServer(http.HandlerFunc(func(writer http.ResponseWriter, request *http.Request) { apiName := "createUser" response, err := ReadData(apiName, "UserService") if err != nil { t.Errorf("Failed to read response data due to: %v", err) } fmt.Fprintf(writer, response[apiName]) })) defer server.Close() client := NewClient(server.URL, "APIKEY", "SECRETKEY", true) params := client.User.NewCreateUserParams("admin", "user.xyz.com", "firstname", "lastname", "password", "dummyUser") resp, err := client.User.CreateUser(params) if err != nil { t.Errorf("Failed to create user due to %v", err) return } if resp == nil || resp.Username != "dummyUser" { t.Errorf("Failed to create user") } }
explode_data.jsonl/60342
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 288 }
[ 2830, 3393, 60004, 34325, 1474, 1155, 353, 8840, 836, 8, 341, 41057, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 38356, 1758, 37508, 11, 1681, 353, 1254, 9659, 8, 341, 197, 54299, 675, 1669, 330, 3182, 1474, 698, 197, 21735, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestReportWorkflowResource_WorkflowCompleted_WorkflowNotFound(t *testing.T) { store, manager, run := initWithOneTimeRun(t) defer store.Close() workflow := util.NewWorkflow(&v1alpha1.Workflow{ ObjectMeta: v1.ObjectMeta{ Name: "non-existent-workflow", Namespace: "kubeflow", UID: types.UID(run.UUID), Labels: map[string]string{util.LabelKeyWorkflowRunId: run.UUID}, }, Status: v1alpha1.WorkflowStatus{Phase: v1alpha1.NodeFailed}, }) err := manager.ReportWorkflowResource(workflow) require.NotNil(t, err) assert.Equalf(t, codes.NotFound, err.(*util.UserError).ExternalStatusCode(), "Expected not found error, but got %s", err.Error()) assert.Contains(t, err.Error(), "Failed to add PersistedFinalState label") }
explode_data.jsonl/77059
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 287 }
[ 2830, 3393, 10361, 62768, 4783, 87471, 4965, 22724, 87471, 4965, 10372, 1155, 353, 8840, 836, 8, 341, 57279, 11, 6645, 11, 1598, 1669, 13864, 3966, 1462, 6727, 1155, 340, 16867, 3553, 10421, 741, 197, 56249, 1669, 4094, 7121, 62768, 2099,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNullState_Execute(t *testing.T) { _, followup, _, err := (&null{}).ExecuteInbound(&stateMachineMsg{}, "", &context{}) require.NoError(t, err) require.IsType(t, &noOp{}, followup) }
explode_data.jsonl/2094
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 77 }
[ 2830, 3393, 3280, 1397, 83453, 1155, 353, 8840, 836, 8, 341, 197, 6878, 1795, 454, 11, 8358, 1848, 1669, 15899, 2921, 6257, 568, 17174, 641, 10891, 2099, 2454, 21605, 6611, 22655, 7342, 609, 2147, 37790, 17957, 35699, 1155, 11, 1848, 34...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestNewRunCommandParseError(t *testing.T) { f := newFakeKoolRun([]builder.Command{}, errors.New("parse error")) cmd := NewRunCommand(f) cmd.SetArgs([]string{"script"}) if err := cmd.Execute(); err != nil { t.Errorf("unexpected error executing run command; error: %v", err) } if !f.out.(*shell.FakeOutputWriter).CalledError { t.Error("did not call Error for parse error") } expectedError := "parse error" if gotError := f.out.(*shell.FakeOutputWriter).Err.Error(); gotError != expectedError { t.Errorf("expecting error '%s', got '%s'", expectedError, gotError) } if !f.exiter.(*shell.FakeExiter).Exited() { t.Error("got an parse error, but command did not exit") } }
explode_data.jsonl/60854
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 248 }
[ 2830, 3393, 3564, 6727, 4062, 14463, 1454, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 52317, 42, 1749, 6727, 10556, 17850, 12714, 22655, 5975, 7121, 445, 6400, 1465, 5455, 25920, 1669, 1532, 6727, 4062, 955, 692, 25920, 4202, 4117, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestMissingOrInvalidOSArchRun(t *testing.T) { ctx := testutils.NewRktRunCtx() defer ctx.Cleanup() tests := getMissingOrInvalidTests(t, ctx) defer osArchTestRemoveImages(tests) for i, tt := range tests { t.Logf("Running test #%v: %v", i, tt.rktCmd) runRktAndCheckOutput(t, tt.rktCmd, tt.expectedLine, tt.expectError) } }
explode_data.jsonl/14979
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 142 }
[ 2830, 3393, 25080, 2195, 7928, 3126, 18727, 6727, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 1273, 6031, 7121, 49, 5840, 6727, 23684, 741, 16867, 5635, 727, 60639, 741, 78216, 1669, 633, 25080, 2195, 7928, 18200, 1155, 11, 5635, 340, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRgba(t *testing.T) { validate := New() s := "rgba(0,31,255,0.5)" errs := validate.Var(s, "rgba") Equal(t, errs, nil) s = "rgba(0,31,255,0.12)" errs = validate.Var(s, "rgba") Equal(t, errs, nil) s = "rgba(12%,55%,100%,0.12)" errs = validate.Var(s, "rgba") Equal(t, errs, nil) s = "rgba( 0, 31, 255, 0.5)" errs = validate.Var(s, "rgba") Equal(t, errs, nil) s = "rgba(12%,55,100%,0.12)" errs = validate.Var(s, "rgba") NotEqual(t, errs, nil) AssertError(t, errs, "", "", "", "", "rgba") s = "rgb(0, 31, 255)" errs = validate.Var(s, "rgba") NotEqual(t, errs, nil) AssertError(t, errs, "", "", "", "", "rgba") s = "rgb(1,349,275,0.5)" errs = validate.Var(s, "rgba") NotEqual(t, errs, nil) AssertError(t, errs, "", "", "", "", "rgba") s = "rgb(01,31,255,0.5)" errs = validate.Var(s, "rgba") NotEqual(t, errs, nil) AssertError(t, errs, "", "", "", "", "rgba") i := 1 errs = validate.Var(i, "rgba") NotEqual(t, errs, nil) AssertError(t, errs, "", "", "", "", "rgba") }
explode_data.jsonl/77320
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 532 }
[ 2830, 3393, 49, 56380, 1155, 353, 8840, 836, 8, 1476, 197, 7067, 1669, 1532, 2822, 1903, 1669, 330, 20400, 7, 15, 11, 18, 16, 11, 17, 20, 20, 11, 15, 13, 20, 12954, 9859, 82, 1669, 9593, 87968, 1141, 11, 330, 20400, 1138, 197, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSetImageCmdRolloutNotFound(t *testing.T) { tf, o := options.NewFakeArgoRolloutsOptions() defer tf.Cleanup() cmd := NewCmdSetImage(o) cmd.PersistentPreRunE = o.PersistentPreRunE cmd.SetArgs([]string{"does-not-exist", "guestbook=argoproj/rollouts-demo:yellow"}) err := cmd.Execute() assert.Error(t, err) stdout := o.Out.(*bytes.Buffer).String() stderr := o.ErrOut.(*bytes.Buffer).String() assert.Empty(t, stdout) assert.Equal(t, "Error: rollouts.argoproj.io \"does-not-exist\" not found\n", stderr) }
explode_data.jsonl/5008
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 209 }
[ 2830, 3393, 1649, 1906, 15613, 32355, 411, 10372, 1155, 353, 8840, 836, 8, 341, 3244, 69, 11, 297, 1669, 2606, 7121, 52317, 2735, 78, 32355, 11672, 3798, 741, 16867, 6409, 727, 60639, 2822, 25920, 1669, 1532, 15613, 1649, 1906, 10108, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetOwnedOrgsByUserIDDesc(t *testing.T) { assert.NoError(t, PrepareTestDatabase()) orgs, err := GetOwnedOrgsByUserIDDesc(5, "id") assert.NoError(t, err) if assert.Len(t, orgs, 2) { assert.EqualValues(t, 7, orgs[0].ID) assert.EqualValues(t, 6, orgs[1].ID) } orgs, err = GetOwnedOrgsByUserIDDesc(4, "id") assert.NoError(t, err) assert.Len(t, orgs, 0) }
explode_data.jsonl/71066
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 171 }
[ 2830, 3393, 1949, 57641, 42437, 82, 1359, 36899, 11065, 1155, 353, 8840, 836, 8, 341, 6948, 35699, 1155, 11, 31166, 2271, 5988, 12367, 87625, 82, 11, 1848, 1669, 2126, 57641, 42437, 82, 1359, 36899, 11065, 7, 20, 11, 330, 307, 1138, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_NoopDelete_DeletesAfterServiceAccountDeleted(t *testing.T) { env := BuildEnv(t) logger := Logger{} kapp := Kapp{t, env.Namespace, logger} kubectl := Kubectl{t, env.Namespace, logger} sas := ServiceAccounts{env.Namespace} name := "instl-pkg-noop-delete" cfgMapName := "configmap" appYaml := fmt.Sprintf(`--- apiVersion: kappctrl.k14s.io/v1alpha1 kind: App metadata: name: %s spec: serviceAccountName: kappctrl-e2e-ns-sa noopDelete: true fetch: - inline: paths: file.yml: | apiVersion: v1 kind: ConfigMap metadata: name: %s data: key: value template: - ytt: {} deploy: - kapp: {}`, name, cfgMapName) + sas.ForNamespaceYAML() cleanUpApp := func() { kapp.Run([]string{"delete", "-a", name}) } cleanUpConfigMap := func() { kubectl.Run([]string{"delete", "configmap", cfgMapName}) } cleanUpApp() defer cleanUpApp() defer cleanUpConfigMap() logger.Section("deploy", func() { kapp.RunWithOpts([]string{"deploy", "-f", "-", "-a", name}, RunOpts{StdinReader: strings.NewReader(appYaml)}) }) kubectl.Run([]string{"wait", "--for=condition=ReconcileSucceeded", "apps/" + name, "--timeout", "1m"}) logger.Section("delete Service Account and App", func() { kubectl.Run([]string{"delete", "serviceaccount", "kappctrl-e2e-ns-sa"}) cleanUpApp() }) logger.Section("check ConfigMap still exists after delete", func() { kubectl.Run([]string{"get", "configmap/" + cfgMapName}) }) }
explode_data.jsonl/36219
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 644 }
[ 2830, 3393, 36989, 453, 6435, 24597, 13881, 6025, 1860, 7365, 26039, 1155, 353, 8840, 836, 8, 341, 57538, 1669, 7854, 14359, 1155, 340, 17060, 1669, 9514, 16094, 16463, 676, 1669, 730, 676, 90, 83, 11, 6105, 46011, 11, 5925, 532, 16463,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestColorMConcat(t *testing.T) { var a, b ebiten.ColorM a.SetElement(1, 2, -1) a.Concat(b) if got, want := a.Element(1, 2), -1.0; got != want { t.Errorf("got: %f, want: %f", got, want) } }
explode_data.jsonl/48455
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 1636, 44, 78440, 1155, 353, 8840, 836, 8, 341, 2405, 264, 11, 293, 384, 4489, 268, 6669, 44, 198, 11323, 4202, 1691, 7, 16, 11, 220, 17, 11, 481, 16, 340, 11323, 67599, 1883, 340, 743, 2684, 11, 1366, 1669, 264, 20139,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestServer_Query_DropAndRecreateSeries(t *testing.T) { t.Parallel() s := OpenServer(NewConfig()) defer s.Close() test := tests.load(t, "drop_and_recreate_series") if err := s.CreateDatabaseAndRetentionPolicy(test.database(), NewRetentionPolicySpec(test.retentionPolicy(), 1, 0), true); err != nil { t.Fatal(err) } for i, query := range test.queries { t.Run(query.name, func(t *testing.T) { if i == 0 { if err := test.init(s); err != nil { t.Fatalf("test init failed: %s", err) } } if query.skip { t.Skipf("SKIP:: %s", query.name) } if err := query.Execute(s); err != nil { t.Error(query.Error(err)) } else if !query.success() { t.Error(query.failureMessage()) } }) } // Re-write data and test again. retest := tests.load(t, "drop_and_recreate_series_retest") for i, query := range retest.queries { t.Run(query.name, func(t *testing.T) { if i == 0 { if err := retest.init(s); err != nil { t.Fatalf("test init failed: %s", err) } } if query.skip { t.Skipf("SKIP:: %s", query.name) } if err := query.Execute(s); err != nil { t.Error(query.Error(err)) } else if !query.success() { t.Error(query.failureMessage()) } }) } }
explode_data.jsonl/61240
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 553 }
[ 2830, 3393, 5475, 48042, 1557, 887, 3036, 693, 3182, 25544, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 1903, 1669, 5264, 5475, 35063, 2648, 2398, 16867, 274, 10421, 2822, 18185, 1669, 7032, 5104, 1155, 11, 330, 6719, 8378, 12...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestDelete(t *testing.T) { catalogStore, err := store.Open("catalog_delete", store.DefaultOptions()) require.NoError(t, err) defer os.RemoveAll("catalog_delete") dataStore, err := store.Open("sqldata_delete", store.DefaultOptions()) require.NoError(t, err) defer os.RemoveAll("sqldata_delete") engine, err := NewEngine(catalogStore, dataStore, DefaultOptions().WithPrefix(sqlPrefix)) require.NoError(t, err) _, err = engine.ExecStmt("CREATE DATABASE db1", nil, true) require.NoError(t, err) _, err = engine.ExecStmt("DELETE FROM table1", nil, true) require.ErrorIs(t, err, ErrNoDatabaseSelected) err = engine.UseDatabase("db1") require.NoError(t, err) _, err = engine.ExecStmt(`CREATE TABLE table1 ( id INTEGER, title VARCHAR[50], active BOOLEAN, PRIMARY KEY id )`, nil, true) require.NoError(t, err) _, err = engine.ExecStmt("CREATE UNIQUE INDEX ON table1(title)", nil, true) require.NoError(t, err) _, err = engine.ExecStmt("CREATE INDEX ON table1(active)", nil, true) require.NoError(t, err) params, err := engine.InferParameters("DELETE FROM table1 WHERE active = @active") require.NoError(t, err) require.NotNil(t, params) require.Len(t, params, 1) require.Equal(t, params["active"], BooleanType) _, err = engine.ExecStmt("DELETE FROM table2", nil, true) require.ErrorIs(t, err, ErrTableDoesNotExist) _, err = engine.ExecStmt("DELETE FROM table1 WHERE name = 'name1'", nil, true) require.ErrorIs(t, err, ErrColumnDoesNotExist) t.Run("delete on empty table should complete without issues", func(t *testing.T) { summary, err := engine.ExecStmt("DELETE FROM table1", nil, true) require.NoError(t, err) require.NotNil(t, summary) require.Zero(t, summary.UpdatedRows) }) rowCount := 10 for i := 0; i < rowCount; i++ { _, err = engine.ExecStmt(fmt.Sprintf(` INSERT INTO table1 (id, title, active) VALUES (%d, 'title%d', %v)`, i, i, i%2 == 0), nil, true) require.NoError(t, err) } t.Run("deleting with contradiction should not produce any change", func(t *testing.T) { summary, err := engine.ExecStmt("DELETE FROM table1 WHERE false", nil, true) require.NoError(t, err) require.NotNil(t, summary) require.Zero(t, summary.UpdatedRows) }) t.Run("deleting active rows should remove half of the rows", func(t *testing.T) { summary, err := engine.ExecStmt("DELETE FROM table1 WHERE active = @active", map[string]interface{}{"active": true}, true) require.NoError(t, err) require.NotNil(t, summary) require.Equal(t, rowCount/2, summary.UpdatedRows) r, err := engine.QueryStmt("SELECT COUNT() FROM table1", nil, true) require.NoError(t, err) row, err := r.Read() require.NoError(t, err) require.Equal(t, int64(rowCount/2), row.Values[EncodeSelector("", "db1", "table1", "col0")].Value()) err = r.Close() require.NoError(t, err) r, err = engine.QueryStmt("SELECT COUNT() FROM table1 WHERE active", nil, true) require.NoError(t, err) row, err = r.Read() require.NoError(t, err) require.Equal(t, int64(0), row.Values[EncodeSelector("", "db1", "table1", "col0")].Value()) err = r.Close() require.NoError(t, err) }) }
explode_data.jsonl/64061
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1207 }
[ 2830, 3393, 6435, 1155, 353, 8840, 836, 8, 341, 1444, 7750, 6093, 11, 1848, 1669, 3553, 12953, 445, 26539, 11353, 497, 3553, 13275, 3798, 2398, 17957, 35699, 1155, 11, 1848, 340, 16867, 2643, 84427, 445, 26539, 11353, 5130, 8924, 6093, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInitTable(t *testing.T) { ctx := cdcContext.NewBackendContext4Test(true) p, tester := initProcessor4Test(ctx, t) var err error // init tick _, err = p.Tick(ctx, p.changefeed) require.Nil(t, err) tester.MustApplyPatches() p.changefeed.PatchTaskStatus(p.captureInfo.ID, func(status *model.TaskStatus) (*model.TaskStatus, bool, error) { status.Tables[1] = &model.TableReplicaInfo{StartTs: 20} status.Tables[2] = &model.TableReplicaInfo{StartTs: 30} return status, true, nil }) tester.MustApplyPatches() _, err = p.Tick(ctx, p.changefeed) require.Nil(t, err) tester.MustApplyPatches() require.NotNil(t, p.tables[1]) require.NotNil(t, p.tables[2]) }
explode_data.jsonl/81939
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 279 }
[ 2830, 3393, 3803, 2556, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 272, 7628, 1972, 7121, 29699, 1972, 19, 2271, 3715, 340, 3223, 11, 37111, 1669, 2930, 22946, 19, 2271, 7502, 11, 259, 340, 2405, 1848, 1465, 198, 197, 322, 2930, 9341,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPluginRunState(t *testing.T) { expect := ttesting.NewExpect(t) pluginState := NewPluginRunState() expect.Equal(PluginStateInitializing, pluginState.GetState()) pluginState.SetState(PluginStateWaiting) expect.Equal(PluginStateWaiting, pluginState.GetState()) pluginState.SetState(PluginStateActive) expect.Equal(PluginStateActive, pluginState.GetState()) pluginState.SetState(PluginStateStopping) expect.Equal(PluginStateStopping, pluginState.GetState()) var wg sync.WaitGroup pluginState.SetWorkerWaitGroup(&wg) pluginState.AddWorker() pluginState.AddWorker() done := new(int32) go func() { pluginState.WorkerDone() pluginState.WorkerDone() wg.Wait() atomic.StoreInt32(done, 1) }() // timeout for go routine. time.Sleep(500 * time.Millisecond) expect.Equal(atomic.LoadInt32(done), int32(1)) }
explode_data.jsonl/52119
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 297 }
[ 2830, 3393, 11546, 6727, 1397, 1155, 353, 8840, 836, 8, 341, 24952, 1669, 259, 8840, 7121, 17536, 1155, 340, 197, 9138, 1397, 1669, 1532, 11546, 6727, 1397, 2822, 24952, 12808, 7, 11546, 1397, 76775, 11, 9006, 1397, 2234, 1397, 12367, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetCertificateUser(t *testing.T) { defer leaktest.AfterTest(t)() // Nil TLS state. if _, err := security.GetCertificateUser(nil); err == nil { t.Error("unexpected success") } // No certificates. if _, err := security.GetCertificateUser(makeFakeTLSState(nil, nil)); err == nil { t.Error("unexpected success") } // Good request: single certificate. if name, err := security.GetCertificateUser(makeFakeTLSState([]string{"foo"}, []int{2})); err != nil { t.Error(err) } else if name != "foo" { t.Errorf("expected name: foo, got: %s", name) } // Request with multiple certs, but only one chain (eg: origin certs are client and CA). if name, err := security.GetCertificateUser(makeFakeTLSState([]string{"foo", "CA"}, []int{2})); err != nil { t.Error(err) } else if name != "foo" { t.Errorf("expected name: foo, got: %s", name) } // Always use the first certificate. if name, err := security.GetCertificateUser(makeFakeTLSState([]string{"foo", "bar"}, []int{2, 1})); err != nil { t.Error(err) } else if name != "foo" { t.Errorf("expected name: foo, got: %s", name) } }
explode_data.jsonl/23537
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 400 }
[ 2830, 3393, 1949, 33202, 1474, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 197, 322, 32274, 41654, 1584, 624, 743, 8358, 1848, 1669, 4763, 2234, 33202, 1474, 27907, 1215, 1848, 621, 2092, 341, 197, 3244,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestChangeStream_errorMissingResponseToken(t *testing.T) { t.Parallel() if testing.Short() { t.Skip() } skipIfBelow36(t) if os.Getenv("TOPOLOGY") != "replica_set" { t.Skip() } coll := createTestCollection(t, nil, nil) // Ensure the database is created. _, err := coll.InsertOne(context.Background(), bson.NewDocument(bson.EC.Int32("y", 1))) require.NoError(t, err) // Project out the response token changes, err := coll.Watch(context.Background(), []*bson.Document{ bson.NewDocument( bson.EC.SubDocumentFromElements("$project", bson.EC.Int32("_id", 0))), }) require.NoError(t, err) _, err = coll.InsertOne(context.Background(), bson.NewDocument(bson.EC.Int32("x", 1))) require.NoError(t, err) getNextChange(changes) require.Error(t, changes.Decode(bson.NewDocument())) }
explode_data.jsonl/68239
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 310 }
[ 2830, 3393, 4072, 3027, 4096, 25080, 2582, 3323, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 741, 197, 532, 1903, 13389, 2679, 38214, 18, 21, 1155, 692, 743, 2643, 64883, 445, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestStartAndWaitLaunchError(t *testing.T) { fakeRuntime := &mock.Runtime{} fakeRuntime.StartStub = func(_ *ccprovider.ChaincodeContainerInfo, _ []byte) error { return errors.New("Bad lunch; upset stomach") } code := getTarGZ(t, "src/dummy/dummy.go", []byte("code")) fakePackageProvider := &mock.PackageProvider{} fakePackageProvider.GetChaincodeCodePackageReturns(code, nil) launcher := &RuntimeLauncher{ Runtime: fakeRuntime, Registry: NewHandlerRegistry(false), StartupTimeout: 10 * time.Second, PackageProvider: fakePackageProvider, Metrics: NewLaunchMetrics(&disabled.Provider{}), } ccci := &ccprovider.ChaincodeContainerInfo{ Type: "GOLANG", Name: "testcc", Version: "0", ContainerType: "DOCKER", } //actual test - container launch gives error err := launcher.Launch(ccci) if err == nil { t.Fatalf("expected error but succeeded") } assert.EqualError(t, err, "error starting container: Bad lunch; upset stomach") }
explode_data.jsonl/74177
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 376 }
[ 2830, 3393, 3479, 92812, 32067, 1454, 1155, 353, 8840, 836, 8, 341, 1166, 726, 15123, 1669, 609, 16712, 16706, 16094, 1166, 726, 15123, 12101, 33838, 284, 2915, 2490, 353, 638, 19979, 98269, 1851, 4502, 1731, 11, 716, 3056, 3782, 8, 146...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestApixu_CurrentWithQueryError(t *testing.T) { a := &apixu{ config: Config{}, } res, err := a.Current(" ") assert.Nil(t, res) assert.Error(t, err) }
explode_data.jsonl/14940
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 73 }
[ 2830, 3393, 10611, 941, 84, 40735, 2354, 2859, 1454, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 609, 391, 941, 84, 515, 197, 25873, 25, 5532, 38837, 197, 630, 10202, 11, 1848, 1669, 264, 11517, 445, 14167, 6948, 59678, 1155, 11, 592, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSetHTTPClientTimeout(t *testing.T) { SetHTTPClientTimeout(50) if GetHTTPClientTimeout() != 50 { t.Errorf("Test 1: FAIL: bad value: %d", GetHTTPClientTimeout()) } SetHTTPClientTimeout(-1) if GetHTTPClientTimeout() != 50 { t.Errorf("Test 2: FAIL: bad value: %d", GetHTTPClientTimeout()) } }
explode_data.jsonl/48200
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 112 }
[ 2830, 3393, 1649, 9230, 2959, 7636, 1155, 353, 8840, 836, 8, 341, 22212, 9230, 2959, 7636, 7, 20, 15, 340, 743, 2126, 9230, 2959, 7636, 368, 961, 220, 20, 15, 341, 197, 3244, 13080, 445, 2271, 220, 16, 25, 33107, 25, 3873, 897, 25...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestReadConfigFile(t *testing.T) { config, err := ReadConfigFile(telemetryConfig) if err != nil { t.Errorf("Read telemetry config failed with error %v", err) } if config.ReportToHostIntervalInSeconds != 30 { t.Errorf("ReportToHostIntervalInSeconds not expected value. Got %d", config.ReportToHostIntervalInSeconds) } config, err = ReadConfigFile("a.config") if err == nil { t.Errorf("[Telemetry] Didn't throw not found error: %v", err) } config, err = ReadConfigFile("telemetry.go") if err == nil { t.Errorf("[Telemetry] Didn't report invalid telemetry config: %v", err) } }
explode_data.jsonl/49200
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 211 }
[ 2830, 3393, 4418, 2648, 1703, 1155, 353, 8840, 836, 8, 341, 25873, 11, 1848, 1669, 4457, 2648, 1703, 71847, 35958, 2648, 340, 743, 1848, 961, 2092, 341, 197, 3244, 13080, 445, 4418, 61037, 2193, 4641, 448, 1465, 1018, 85, 497, 1848, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestDeleteState(t *testing.T) { stateTestWrapper, state := createFreshDBAndConstructState(t) // Add keys state.TxBegin("txUuid") state.Set("chaincode1", "key1", []byte("value1")) state.Set("chaincode1", "key2", []byte("value2")) state.TxFinish("txUuid", true) state.getStateDelta() stateTestWrapper.persistAndClearInMemoryChanges(0) // confirm keys are present testutil.AssertEquals(t, stateTestWrapper.get("chaincode1", "key1", true), []byte("value1")) testutil.AssertEquals(t, stateTestWrapper.get("chaincode1", "key2", true), []byte("value2")) // Delete the State err := state.DeleteState() if err != nil { t.Fatalf("Error deleting the state: %s", err) } // confirm the values are empty testutil.AssertNil(t, stateTestWrapper.get("chaincode1", "key1", false)) testutil.AssertNil(t, stateTestWrapper.get("chaincode1", "key2", false)) testutil.AssertNil(t, stateTestWrapper.get("chaincode1", "key1", true)) testutil.AssertNil(t, stateTestWrapper.get("chaincode1", "key2", true)) // Confirm that we can now store new stuff in the state state.TxBegin("txUuid") state.Set("chaincode1", "key1", []byte("value1")) state.Set("chaincode1", "key2", []byte("value2")) state.TxFinish("txUuid", true) state.getStateDelta() stateTestWrapper.persistAndClearInMemoryChanges(1) // confirm keys are present testutil.AssertEquals(t, stateTestWrapper.get("chaincode1", "key1", true), []byte("value1")) testutil.AssertEquals(t, stateTestWrapper.get("chaincode1", "key2", true), []byte("value2")) }
explode_data.jsonl/69019
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 551 }
[ 2830, 3393, 6435, 1397, 1155, 353, 8840, 836, 8, 1476, 24291, 2271, 11542, 11, 1584, 1669, 1855, 55653, 3506, 3036, 28468, 1397, 1155, 692, 197, 322, 2691, 6894, 198, 24291, 81362, 11135, 445, 3998, 38431, 1138, 24291, 4202, 445, 8819, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDifferentFeePreferences(t *testing.T) { ctx := createSweeperTestContext(t) // Throughout this test, we'll be attempting to sweep three inputs, two // with the higher fee preference, and the last with the lower. We do // this to ensure the sweeper can broadcast distinct transactions for // each sweep with a different fee preference. lowFeePref := FeePreference{ ConfTarget: 12, } ctx.estimator.blocksToFee[lowFeePref.ConfTarget] = 5000 highFeePref := FeePreference{ ConfTarget: 6, } ctx.estimator.blocksToFee[highFeePref.ConfTarget] = 10000 input1 := spendableInputs[0] resultChan1, err := ctx.sweeper.SweepInput(input1, highFeePref) if err != nil { t.Fatal(err) } input2 := spendableInputs[1] resultChan2, err := ctx.sweeper.SweepInput(input2, highFeePref) if err != nil { t.Fatal(err) } input3 := spendableInputs[2] resultChan3, err := ctx.sweeper.SweepInput(input3, lowFeePref) if err != nil { t.Fatal(err) } // Start the sweeper's batch ticker, which should cause the sweep // transactions to be broadcast in order of high to low fee preference. ctx.tick() // The first transaction broadcast should be the one spending the higher // fee rate inputs. sweepTx1 := ctx.receiveTx() assertTxSweepsInputs(t, &sweepTx1, input1, input2) // The second should be the one spending the lower fee rate inputs. sweepTx2 := ctx.receiveTx() assertTxSweepsInputs(t, &sweepTx2, input3) // With the transactions broadcast, we'll mine a block to so that the // result is delivered to each respective client. ctx.backend.mine() resultChans := []chan Result{resultChan1, resultChan2, resultChan3} for _, resultChan := range resultChans { ctx.expectResult(resultChan, nil) } ctx.finish(1) }
explode_data.jsonl/34233
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 598 }
[ 2830, 3393, 69123, 41941, 14306, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 1855, 50, 896, 10436, 2271, 1972, 1155, 692, 197, 322, 45882, 419, 1273, 11, 582, 3278, 387, 19405, 311, 23146, 2326, 11127, 11, 1378, 198, 197, 322, 448, 279...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestGetResourceLimiter(t *testing.T) { gkeManagerMock := &gkeManagerMock{} resourceLimiter := cloudprovider.NewResourceLimiter( map[string]int64{cloudprovider.ResourceNameCores: 1, cloudprovider.ResourceNameMemory: 10000000}, map[string]int64{cloudprovider.ResourceNameCores: 10, cloudprovider.ResourceNameMemory: 100000000}) gke := &GkeCloudProvider{ gkeManager: gkeManagerMock, resourceLimiterFromFlags: resourceLimiter, } // Return default. gkeManagerMock.On("GetResourceLimiter").Return((*cloudprovider.ResourceLimiter)(nil), nil).Once() returnedResourceLimiter, err := gke.GetResourceLimiter() assert.NoError(t, err) assert.Equal(t, resourceLimiter, returnedResourceLimiter) // Return for GKE. resourceLimiterGKE := cloudprovider.NewResourceLimiter( map[string]int64{cloudprovider.ResourceNameCores: 2, cloudprovider.ResourceNameMemory: 20000000}, map[string]int64{cloudprovider.ResourceNameCores: 5, cloudprovider.ResourceNameMemory: 200000000}) gkeManagerMock.On("GetResourceLimiter").Return(resourceLimiterGKE, nil).Once() returnedResourceLimiterGKE, err := gke.GetResourceLimiter() assert.NoError(t, err) assert.Equal(t, returnedResourceLimiterGKE, resourceLimiterGKE) // Error in GceManager. gkeManagerMock.On("GetResourceLimiter").Return((*cloudprovider.ResourceLimiter)(nil), fmt.Errorf("some error")).Once() returnedResourceLimiter, err = gke.GetResourceLimiter() assert.Error(t, err) }
explode_data.jsonl/30479
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 495 }
[ 2830, 3393, 1949, 4783, 43, 17700, 1155, 353, 8840, 836, 8, 341, 3174, 440, 2043, 11571, 1669, 609, 70, 440, 2043, 11571, 16094, 50346, 43, 17700, 1669, 9437, 19979, 7121, 4783, 43, 17700, 1006, 197, 19567, 14032, 63025, 21, 19, 90, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCanTriggerJob(t *testing.T) { t.Parallel() org := "org" trustedUser := "trusted" untrustedUser := "untrusted" pcfg := &plugins.Configuration{ Triggers: []plugins.Trigger{{Repos: []string{org}}}, } pcfgGetter := func() *plugins.Configuration { return pcfg } ghc := &fakegithub.FakeClient{ OrgMembers: map[string][]string{org: {trustedUser}}, } pj := prowapi.ProwJob{ Spec: prowapi.ProwJobSpec{ Refs: &prowapi.Refs{ Org: org, Repo: "repo", Pulls: []prowapi.Pull{{Author: trustedUser}}, }, Type: prowapi.PresubmitJob, }, } testCases := []struct { name string user string expectAllowed bool }{ { name: "Unauthorized user can not rerun", user: untrustedUser, expectAllowed: false, }, { name: "Authorized user can re-run", user: trustedUser, expectAllowed: true, }, } log := logrus.NewEntry(logrus.StandardLogger()) for _, tc := range testCases { result, err := canTriggerJob(tc.user, pj, nil, ghc, pcfgGetter, log) if err != nil { t.Fatalf("error: %v", err) } if result != tc.expectAllowed { t.Errorf("got result %t, expected %t", result, tc.expectAllowed) } } }
explode_data.jsonl/66274
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 561 }
[ 2830, 3393, 6713, 17939, 12245, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 87625, 1669, 330, 1775, 698, 25583, 27145, 1474, 1669, 330, 83837, 698, 20479, 83837, 1474, 1669, 330, 359, 83837, 1837, 3223, 14072, 1669, 609, 18716, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRedis_Sscan(t *testing.T) { runOnRedis(t, func(client *Redis) { key := "list" var list []string for i := 0; i < 1550; i++ { list = append(list, randomStr(i)) } lens, err := client.Sadd(key, list) assert.Nil(t, err) assert.Equal(t, lens, 1550) var cursor uint64 = 0 sum := 0 for { _, _, err := NewRedis(client.Addr, "").Sscan(key, cursor, "", 100) assert.NotNil(t, err) keys, next, err := client.Sscan(key, cursor, "", 100) assert.Nil(t, err) sum += len(keys) if next == 0 { break } cursor = next } assert.Equal(t, sum, 1550) _, err = NewRedis(client.Addr, "").Del(key) assert.NotNil(t, err) _, err = client.Del(key) assert.Nil(t, err) }) }
explode_data.jsonl/39182
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 350 }
[ 2830, 3393, 48137, 1098, 16405, 1155, 353, 8840, 836, 8, 341, 56742, 1925, 48137, 1155, 11, 2915, 12805, 353, 48137, 8, 341, 197, 23634, 1669, 330, 1607, 698, 197, 2405, 1140, 3056, 917, 198, 197, 2023, 600, 1669, 220, 15, 26, 600, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestExtractDeclarationSuccess(t *testing.T) { value := gidlir.Record{ Name: "ExampleStruct", Fields: []gidlir.Field{ {Key: gidlir.FieldKey{Name: "s"}, Value: "foo"}, }, } decl, err := testSchema(t).ExtractDeclaration(value, nil) if err != nil { t.Fatalf("ExtractDeclaration failed: %s", err) } checkStruct(t, decl, "ExampleStruct", false) }
explode_data.jsonl/21388
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 148 }
[ 2830, 3393, 28959, 24489, 7188, 1155, 353, 8840, 836, 8, 341, 16309, 1669, 45863, 75, 404, 49959, 515, 197, 21297, 25, 330, 13314, 9422, 756, 197, 197, 8941, 25, 3056, 34849, 75, 404, 17087, 515, 298, 197, 90, 1592, 25, 45863, 75, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestExchangeRequest_NonBasicAuth(t *testing.T) { tr := &mockTransport{ rt: func(r *http.Request) (w *http.Response, err error) { headerAuth := r.Header.Get("Authorization") if headerAuth != "" { t.Errorf("Unexpected authorization header, %v is found.", headerAuth) } return nil, errors.New("no response") }, } c := &http.Client{Transport: tr} conf := &Config{ ClientID: "CLIENT_ID", Endpoint: Endpoint{ AuthURL: "https://accounts.google.com/auth", TokenURL: "https://accounts.google.com/token", }, } ctx := context.WithValue(context.Background(), HTTPClient, c) conf.Exchange(ctx, "code") }
explode_data.jsonl/25890
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 246 }
[ 2830, 3393, 31564, 1900, 1604, 263, 15944, 5087, 1155, 353, 8840, 836, 8, 341, 25583, 1669, 609, 16712, 27560, 515, 197, 55060, 25, 2915, 2601, 353, 1254, 9659, 8, 320, 86, 353, 1254, 12574, 11, 1848, 1465, 8, 341, 298, 20883, 5087, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFnStringBincorengArg(t *testing.T) { c := gs.New() c.Provide(func(i *int) bool { fmt.Printf("i=%d\n", *i) return false }, "${key.name:=int}") i := 5 c.Object(&i) err := c.Refresh() assert.Nil(t, err) }
explode_data.jsonl/17428
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 24911, 703, 28794, 2153, 968, 2735, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 28081, 7121, 741, 1444, 7763, 19448, 18552, 1956, 353, 396, 8, 1807, 341, 197, 11009, 19367, 445, 72, 7846, 67, 1699, 497, 353, 72, 340, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_SQLite_005(t *testing.T) { tmpdir, err := os.MkdirTemp("", "sqlite") if err != nil { t.Fatal(err) } defer os.RemoveAll(tmpdir) db, err := sqlite3.OpenPath(filepath.Join(tmpdir, "test.sqlite"), sqlite3.SQLITE_OPEN_CREATE, "") if err != nil { t.Error(err) } defer db.Close() if filename := db.Filename(""); filename == "" { t.Error("Unexpected return from Filename") } else { t.Log("Filename=", filename) } if statement, extra, err := db.Prepare("SELECT NULL; SELECT NULL"); err != nil { t.Error(err) } else { t.Log("Statement=", statement) t.Log("Extra=", extra) if err := statement.Finalize(); err != nil { t.Error(err) } } }
explode_data.jsonl/48729
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 279 }
[ 2830, 3393, 46625, 632, 62, 15, 15, 20, 1155, 353, 8840, 836, 8, 341, 20082, 3741, 11, 1848, 1669, 2643, 1321, 12438, 12151, 19814, 330, 37042, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 26133, 3964, 340, 197, 532, 16867, 2643, 84427...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestDocDeleteLastUpdateTime(t *testing.T) { ctx := context.Background() c, srv := newMock(t) wantReq := &pb.CommitRequest{ Database: "projects/projectID/databases/(default)", Writes: []*pb.Write{ { Operation: &pb.Write_Delete{"projects/projectID/databases/(default)/documents/C/d"}, CurrentDocument: &pb.Precondition{ ConditionType: &pb.Precondition_UpdateTime{aTimestamp2}, }, }}, } srv.addRPC(wantReq, commitResponseForSet) wr, err := c.Collection("C").Doc("d").Delete(ctx, LastUpdateTime(aTime2)) if err != nil { t.Fatal(err) } if !testEqual(wr, writeResultForSet) { t.Errorf("got %+v, want %+v", wr, writeResultForSet) } }
explode_data.jsonl/15814
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 277 }
[ 2830, 3393, 9550, 6435, 5842, 64299, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 1444, 11, 43578, 1669, 501, 11571, 1155, 340, 50780, 27234, 1669, 609, 16650, 53036, 1900, 515, 197, 197, 5988, 25, 330, 17161, 40118, 915...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestDockerLogger_LoggingNotSupported(t *testing.T) { ctu.DockerCompatible(t) t.Parallel() require := require.New(t) containerImage, containerImageName, containerImageTag := testContainerDetails() client, err := docker.NewClientFromEnv() if err != nil { t.Skip("docker unavailable:", err) } if img, err := client.InspectImage(containerImage); err != nil || img == nil { t.Log("image not found locally, downloading...") err = client.PullImage(docker.PullImageOptions{ Repository: containerImageName, Tag: containerImageTag, }, docker.AuthConfiguration{}) require.NoError(err, "failed to pull image") } containerConf := docker.CreateContainerOptions{ Config: &docker.Config{ Cmd: []string{ "sh", "-c", "touch ~/docklog; tail -f ~/docklog", }, Image: containerImage, }, HostConfig: &docker.HostConfig{ LogConfig: docker.LogConfig{ Type: "gelf", Config: map[string]string{ "gelf-address": "udp://localhost:12201", "mode": "non-blocking", "max-buffer-size": "4m", }, }, }, Context: context.Background(), } container, err := client.CreateContainer(containerConf) require.NoError(err) defer client.RemoveContainer(docker.RemoveContainerOptions{ ID: container.ID, Force: true, }) err = client.StartContainer(container.ID, nil) require.NoError(err) testutil.WaitForResult(func() (bool, error) { container, err = client.InspectContainer(container.ID) if err != nil { return false, err } if !container.State.Running { return false, fmt.Errorf("container not running") } return true, nil }, func(err error) { require.NoError(err) }) stdout := &noopCloser{bytes.NewBuffer(nil)} stderr := &noopCloser{bytes.NewBuffer(nil)} dl := NewDockerLogger(testlog.HCLogger(t)).(*dockerLogger) dl.stdout = stdout dl.stderr = stderr require.NoError(dl.Start(&StartOpts{ ContainerID: container.ID, })) select { case <-dl.doneCh: case <-time.After(10 * time.Second): require.Fail("timedout while waiting for docker_logging to terminate") } }
explode_data.jsonl/76334
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 790 }
[ 2830, 3393, 35, 13659, 7395, 62, 34575, 2623, 34636, 1155, 353, 8840, 836, 8, 341, 89216, 84, 909, 13659, 29161, 1155, 692, 3244, 41288, 7957, 741, 17957, 1669, 1373, 7121, 1155, 692, 53290, 1906, 11, 5476, 1906, 675, 11, 5476, 1906, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCheckError(t *testing.T) { testStderr(t, func() { checkError(nil) // should not exit }, "") err := errors.New("test") testStderr(t, func() { testExit(t, func() { checkError(err) }, 1) }, "git-last-modified: test\n") *flagVerbose = true defer func() { *flagVerbose = false }() testStderr(t, func() { testExit(t, func() { checkError(err) }, 1) }, fmt.Sprintf("git-last-modified: test%+v\n", err.(interface{ StackTrace() errors.StackTrace }).StackTrace())) }
explode_data.jsonl/7650
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 214 }
[ 2830, 3393, 3973, 1454, 1155, 353, 8840, 836, 8, 341, 18185, 22748, 615, 1155, 11, 2915, 368, 341, 197, 25157, 1454, 27907, 8, 442, 1265, 537, 4869, 198, 197, 2137, 85617, 9859, 1669, 5975, 7121, 445, 1944, 5130, 18185, 22748, 615, 11...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMarkerLen(t *testing.T) { assertLen(t, "text$0", 4, 0, 0) assertLen(t, "$1text$0", 0, 4, 0, 0) assertLen(t, "te$1xt$0", 2, 0, 2, 0, 0) assertLen(t, "errorContext: `${1:err}`, error: $0", 15, 0, 3, 10, 0, 0) assertLen(t, "errorContext: `${1:err}`, error: $1$0", 15, 0, 3, 10, 0, 3, 0, 0) assertLen(t, "$TM_SELECTED_TEXT$0", 0, 0, 0) assertLen(t, "${TM_SELECTED_TEXT:def}$0", 0, 3, 0, 0) }
explode_data.jsonl/60283
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 207 }
[ 2830, 3393, 20613, 11271, 1155, 353, 8840, 836, 8, 341, 6948, 11271, 1155, 11, 330, 1318, 3, 15, 497, 220, 19, 11, 220, 15, 11, 220, 15, 340, 6948, 11271, 1155, 11, 5201, 16, 1318, 3, 15, 497, 220, 15, 11, 220, 19, 11, 220, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInstructionIteratorValid(t *testing.T) { cnt := 0 script, _ := hex.DecodeString("61000000") it := NewInstructionIterator(script) for it.Next() { cnt++ } if err := it.Error(); err != nil { t.Errorf("Expected 2, but encountered error %v instead.", err) } if cnt != 2 { t.Errorf("Expected 2, but got %v instead.", cnt) } }
explode_data.jsonl/21934
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 134 }
[ 2830, 3393, 16664, 11951, 4088, 1155, 353, 8840, 836, 8, 341, 60553, 1669, 220, 15, 198, 86956, 11, 716, 1669, 12371, 56372, 703, 445, 21, 16, 15, 15, 15, 15, 15, 15, 5130, 23374, 1669, 1532, 16664, 11951, 42795, 340, 2023, 432, 185...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestInfo(t *testing.T) { setFlags() defer logging.swap(logging.newBuffers()) Info("test") if !contains(infoLog, "I", t) { t.Errorf("Info has wrong character: %q", contents(infoLog)) } if !contains(infoLog, "test", t) { t.Error("Info failed") } }
explode_data.jsonl/8424
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 109 }
[ 2830, 3393, 1731, 1155, 353, 8840, 836, 8, 341, 8196, 9195, 741, 16867, 8392, 54537, 51687, 4618, 36219, 2398, 197, 1731, 445, 1944, 1138, 743, 753, 13372, 14208, 2201, 11, 330, 40, 497, 259, 8, 341, 197, 3244, 13080, 445, 1731, 702, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUpdate(t *testing.T) { var ( messageInfo = schema.Message{} ) adminInfo, _ := tester.LoginAdmin() context := helper.Context{ Uid: adminInfo.Id, } userInfo, _ := tester.CreateUser() defer tester.DeleteUserByUserName(userInfo.Username) // 创建一个消息 { var ( title = "test" content = "test" ) r := message.Create(helper.Context{ Uid: adminInfo.Id, }, message.CreateMessageParams{ Uid: userInfo.Id, Title: title, Content: content, }) assert.Equal(t, schema.StatusSuccess, r.Status) assert.Equal(t, "", r.Message) n := model.Message{} assert.Nil(t, r.Decode(&n)) defer message.DeleteMessageById(n.Id) assert.Equal(t, title, n.Title) assert.Equal(t, content, n.Content) } // 更新这个刚添加的消息 { var ( newTitle = "new title" newContent = "new content" ) r := message.Update(context, messageInfo.Id, message.UpdateParams{ Title: &newTitle, Content: &newContent, }) assert.Equal(t, schema.StatusSuccess, r.Status) assert.Equal(t, "", r.Message) assert.Nil(t, r.Decode(&messageInfo)) assert.Equal(t, newTitle, messageInfo.Title) assert.Equal(t, newContent, messageInfo.Content) } }
explode_data.jsonl/58905
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 537 }
[ 2830, 3393, 4289, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 24753, 1731, 284, 10802, 8472, 16094, 197, 692, 64394, 1731, 11, 716, 1669, 37111, 32499, 7210, 2822, 28413, 1669, 13137, 9328, 515, 197, 15980, 307, 25, 3986, 1731, 6444,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestServiceGetProjects(t *testing.T) { ctx := context.TODO() t.Run("happy case", func(t *testing.T) { mockAdmin := mocks.AdminServiceClient{} mockAdmin.OnListProjectsMatch(ctx, mock.MatchedBy(func(req *admin.ProjectListRequest) bool { return req.Limit == 100 && req.Filters == "ne(state,1)" && req.SortBy.Key == "created_at" })).Return(&admin.Projects{ Projects: []*admin.Project{ { Id: "flytesnacks", }, { Id: "flyteexamples", }, }, }, nil) provider := serviceAdminProvider{ adminClient: &mockAdmin, } projects, err := provider.GetProjects(ctx) assert.NoError(t, err) assert.Len(t, projects.Projects, 2) }) t.Run("admin error", func(t *testing.T) { mockAdmin := mocks.AdminServiceClient{} mockAdmin.OnListProjectsMatch(ctx, mock.MatchedBy(func(req *admin.ProjectListRequest) bool { return req.Limit == 100 && req.Filters == "ne(state,1)" && req.SortBy.Key == "created_at" })).Return(nil, errFoo) provider := serviceAdminProvider{ adminClient: &mockAdmin, } _, err := provider.GetProjects(ctx) assert.EqualError(t, err, errFoo.Error()) }) }
explode_data.jsonl/67475
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 466 }
[ 2830, 3393, 1860, 1949, 29958, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 90988, 741, 3244, 16708, 445, 56521, 1142, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 77333, 7210, 1669, 68909, 39469, 1860, 2959, 16094, 197, 77333, 721...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestImageExport(t *testing.T) { testImageName := "coreos.com/rkt-image-export-test" expectManifest := strings.Replace(manifestExportTemplate, "IMG_NAME", testImageName, -1) tmpDir := createTempDirOrPanic("rkt-TestImageExport-") defer os.RemoveAll(tmpDir) tmpManifest, err := ioutil.TempFile(tmpDir, "manifest") if err != nil { panic(fmt.Sprintf("Cannot create temp manifest: %v", err)) } defer tmpManifest.Close() tmpManifestName := tmpManifest.Name() if err := ioutil.WriteFile(tmpManifestName, []byte(expectManifest), 0600); err != nil { panic(fmt.Sprintf("Cannot write to temp manifest: %v", err)) } defer os.Remove(tmpManifestName) testImage := patchTestACI("rkt-inspect-image-export.aci", "--manifest", tmpManifestName) defer os.Remove(testImage) ctx := testutils.NewRktRunCtx() defer ctx.Cleanup() testImageId := importImageAndFetchHash(t, ctx, testImage) testImageHash, err := getHash(testImage) if err != nil { panic(fmt.Sprintf("Error getting image hash: %v", err)) } tests := []struct { image string shouldFind bool expectedHash string }{ { testImageName, true, testImageHash, }, { testImageId, true, testImageHash, }, { "sha512-not-existed", false, "", }, { "some~random~aci~name", false, "", }, } for i, tt := range tests { outputAciPath := filepath.Join(tmpDir, fmt.Sprintf("exported-%d.aci", i)) runCmd := fmt.Sprintf("%s image export %s %s", ctx.Cmd(), tt.image, outputAciPath) t.Logf("Running 'image export' test #%v: %v", i, runCmd) spawnAndWaitOrFail(t, runCmd, tt.shouldFind) if !tt.shouldFind { continue } exportedHash, err := getHash(outputAciPath) if err != nil { t.Fatalf("Error getting exported image hash: %v", err) } if exportedHash != tt.expectedHash { t.Fatalf("Expected hash %q but got %s", tt.expectedHash, exportedHash) } } }
explode_data.jsonl/31456
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 782 }
[ 2830, 3393, 1906, 16894, 1155, 353, 8840, 836, 8, 341, 18185, 1906, 675, 1669, 330, 2153, 436, 905, 7382, 5840, 13746, 65827, 16839, 698, 24952, 38495, 1669, 9069, 20858, 60671, 6962, 16894, 7275, 11, 330, 30346, 4708, 497, 1273, 1906, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestCreateParams_Validate(t *testing.T) { tests := []struct { name string params CreateParams wantErr bool err error }{ { name: "validate should return all possible errors", params: CreateParams{ Email: "hi", }, err: &multierror.Error{ Errors: []error{ errors.New("api reference is required for command"), errors.New("user: create requires a username"), errors.New("user: create requires a password with a minimum of 8 characters"), errors.New("user: create requires at least 1 role"), errors.New("user: hi is not a valid email address format"), }, }, wantErr: true, }, { name: "validate should return an error when entered password is too short", params: CreateParams{ API: &api.API{}, UserName: "bob", Password: []byte("pass"), Roles: []string{platformAdminRole}, }, err: &multierror.Error{ Errors: []error{ errors.New("user: create requires a password with a minimum of 8 characters"), }, }, wantErr: true, }, { name: "validate should return an error when ece_platform_admin is used along other roles", params: CreateParams{ API: &api.API{}, UserName: "bob", Password: []byte("supersecretpass"), Roles: []string{platformAdminRole, platformViewerRole}, }, err: &multierror.Error{ Errors: []error{ errors.New("user: ece_platform_admin cannot be used in conjunction with other roles"), }, }, wantErr: true, }, { name: "validate should return an error when ece_platform_admin is used along other roles", params: CreateParams{ API: &api.API{}, UserName: "bob", Password: []byte("supersecretpass"), Roles: []string{deploymentsManagerRole, deploymentsViewerRole}, }, err: &multierror.Error{ Errors: []error{ errors.New("user: only one of ece_deployment_manager or ece_deployment_viewer can be chosen"), }, }, wantErr: true, }, { name: "validate should pass if all params are properly set", params: CreateParams{ API: &api.API{}, UserName: "bob", Email: "hi@example.com", Password: []byte("supersecretpass"), Roles: []string{platformViewerRole, deploymentsManagerRole}, }, wantErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { err := tt.params.Validate() if (err != nil) != tt.wantErr { t.Errorf("Validate() error = %v, wantErr %v", err, tt.wantErr) return } if tt.wantErr && tt.err == nil { t.Errorf("Validate() expected errors = '%v' but no errors returned", tt.err) } if tt.wantErr && err.Error() != tt.err.Error() { t.Errorf("Validate() expected errors = '%v' but got %v", tt.err, err) } }) } }
explode_data.jsonl/39121
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1177 }
[ 2830, 3393, 4021, 4870, 62, 17926, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 25856, 220, 4230, 4870, 198, 197, 50780, 7747, 1807, 198, 197, 9859, 257, 1465, 198, 197, 59403, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestValidateInitiator_FluxMonitorHappy(t *testing.T) { t.Parallel() store, cleanup := cltest.NewStore(t) defer cleanup() job := cltest.NewJob() var initr models.Initiator require.NoError(t, json.Unmarshal([]byte(validInitiator), &initr)) err := services.ValidateInitiator(initr, job, store) require.NoError(t, err) }
explode_data.jsonl/75334
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 127 }
[ 2830, 3393, 17926, 3803, 36122, 1400, 62859, 30098, 32847, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 57279, 11, 21290, 1669, 1185, 1944, 7121, 6093, 1155, 340, 16867, 21290, 2822, 68577, 1669, 1185, 1944, 7121, 12245, 741, 24...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBlkioSetThrottleWriteBpsDevice(t *testing.T) { helper := NewCgroupTestUtil("blkio", t) defer helper.cleanup() const ( throttleBefore = `8:0 1024` ) td := configs.NewThrottleDevice(8, 0, 2048) throttleAfter := td.String() helper.writeFileContents(map[string]string{ "blkio.throttle.write_bps_device": throttleBefore, }) helper.CgroupData.config.Resources.BlkioThrottleWriteBpsDevice = []*configs.ThrottleDevice{td} blkio := &BlkioGroup{} if err := blkio.Set(helper.CgroupPath, helper.CgroupData.config.Resources); err != nil { t.Fatal(err) } value, err := fscommon.GetCgroupParamString(helper.CgroupPath, "blkio.throttle.write_bps_device") if err != nil { t.Fatalf("Failed to parse blkio.throttle.write_bps_device - %s", err) } if value != throttleAfter { t.Fatal("Got the wrong value, set blkio.throttle.write_bps_device failed.") } }
explode_data.jsonl/45847
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 346 }
[ 2830, 3393, 4923, 74, 815, 1649, 1001, 27535, 7985, 33, 1690, 6985, 1155, 353, 8840, 836, 8, 341, 9598, 2947, 1669, 1532, 34, 4074, 2271, 2742, 445, 34989, 815, 497, 259, 340, 16867, 13137, 87689, 2822, 4777, 2399, 197, 70479, 27535, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGenerator_GenerateFile(t *testing.T) { type args struct { f model.File } tests := []struct { name string g *Generator args args wantErr bool }{ {"EmptyFile", &Generator{}, args{model.File{Package: "something"}}, false}, {"SimpleDataType", &Generator{}, args{ model.File{ Package: "simple", DataTypes: []model.DataType{ { Name: "MyDT", FQDTN: "org.ystia.datatypes.MyDT", Fields: []model.Field{ {Name: "F1", Type: "string"}, {Name: "F2", Type: "int"}, }, }, }, }, }, false}, {"FieldsTags", &Generator{}, args{ model.File{ Package: "simple", DataTypes: []model.DataType{ { Name: "MyDT", FQDTN: "org.ystia.datatypes.MyDT", Fields: []model.Field{ {Name: "F1", OriginalName: "f1", Type: "string"}, {Name: "F2", OriginalName: "my_f2", Type: "int"}, }, }, }, }, }, false}, {"WithImports", &Generator{}, args{ model.File{ Package: "simple", Imports: []string{"fmt", "time"}, DataTypes: []model.DataType{ { Name: "MyDT", FQDTN: "org.ystia.datatypes.MyDT", Fields: []model.Field{ {Name: "F1", OriginalName: "f1", Type: "string"}, {Name: "F2", OriginalName: "my_f2", Type: "time.Date"}, }, }, }, }, }, false}, {"DerivedDataType", &Generator{}, args{ model.File{ Package: "simple", DataTypes: []model.DataType{ { Name: "MyDT", FQDTN: "org.ystia.datatypes.MyDT", Fields: []model.Field{ {Name: "F1", Type: "string"}, {Name: "F2", Type: "int"}, }, }, { Name: "MyDerivedDT", FQDTN: "org.ystia.datatypes.MyDerivedDT", DerivedFrom: "MyDT", Fields: []model.Field{ {Name: "F3", Type: "[]string"}, }, }, }, }, }, false}, {"DerivedFromBuildtin", &Generator{}, args{ model.File{ Package: "simple", DataTypes: []model.DataType{ { Name: "MyDerivedDT", FQDTN: "org.ystia.datatypes.MyDerivedDT", DerivedFrom: "string", }, }, }, }, false}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { g := &Generator{} got, err := g.GenerateFile(tt.args.f) if (err != nil) != tt.wantErr { t.Errorf("Generator.GenerateFile() error = %v, wantErr %v", err, tt.wantErr) return } if err == nil { assert.Assert(t, golden.String(string(got), "golden/"+tt.name)) } }) } }
explode_data.jsonl/82463
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1342 }
[ 2830, 3393, 12561, 2646, 13220, 1703, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 1166, 1614, 8576, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 3174, 981, 353, 12561, 198, 197, 31215, 26...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestConfig_GetString(t *testing.T) { expectedConnectString := TestDefaultManagerConnectString() actualConnectString := configObj.GetString("state.manager.connect_string", "") assert.Equal(t, expectedConnectString, actualConnectString, "String fetched from config object matches expectations.") }
explode_data.jsonl/50528
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 78 }
[ 2830, 3393, 2648, 13614, 703, 1155, 353, 8840, 836, 8, 341, 42400, 14611, 703, 1669, 3393, 3675, 2043, 14611, 703, 2822, 88814, 14611, 703, 1669, 2193, 5261, 21166, 445, 2454, 32815, 10800, 3904, 497, 85617, 6948, 12808, 1155, 11, 3601, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestFirstLastNextPrev(t *testing.T) { t.Parallel() f := NewEtcdTestFixture(t) defer f.Cleanup() testKeyValues := []KV{ {"kb", "1"}, {"kc", "2"}, {"kda", "3"}, {"ke", "4"}, {"w", "w"}, } for _, kv := range testKeyValues { f.Put(kv.key, kv.val) } db, err := newEtcdBackend(f.BackendConfig()) require.NoError(t, err) apply := func(stm STM) error { // First/Last on valid multi item interval. kv, err := stm.First("k") require.NoError(t, err) require.Equal(t, &KV{"kb", "1"}, kv) kv, err = stm.Last("k") require.NoError(t, err) require.Equal(t, &KV{"ke", "4"}, kv) // First/Last on single item interval. kv, err = stm.First("w") require.NoError(t, err) require.Equal(t, &KV{"w", "w"}, kv) kv, err = stm.Last("w") require.NoError(t, err) require.Equal(t, &KV{"w", "w"}, kv) // Next/Prev on start/end. kv, err = stm.Next("k", "ke") require.NoError(t, err) require.Nil(t, kv) kv, err = stm.Prev("k", "kb") require.NoError(t, err) require.Nil(t, kv) // Next/Prev in the middle. kv, err = stm.Next("k", "kc") require.NoError(t, err) require.Equal(t, &KV{"kda", "3"}, kv) kv, err = stm.Prev("k", "ke") require.NoError(t, err) require.Equal(t, &KV{"kda", "3"}, kv) // Delete first item, then add an item before the // deleted one. Check that First/Next will "jump" // over the deleted item and return the new first. stm.Del("kb") stm.Put("ka", "0") kv, err = stm.First("k") require.NoError(t, err) require.Equal(t, &KV{"ka", "0"}, kv) kv, err = stm.Prev("k", "kc") require.NoError(t, err) require.Equal(t, &KV{"ka", "0"}, kv) // Similarly test that a new end is returned if // the old end is deleted first. stm.Del("ke") stm.Put("kf", "5") kv, err = stm.Last("k") require.NoError(t, err) require.Equal(t, &KV{"kf", "5"}, kv) kv, err = stm.Next("k", "kda") require.NoError(t, err) require.Equal(t, &KV{"kf", "5"}, kv) // Overwrite one in the middle. stm.Put("kda", "6") kv, err = stm.Next("k", "kc") require.NoError(t, err) require.Equal(t, &KV{"kda", "6"}, kv) // Add three in the middle, then delete one. stm.Put("kdb", "7") stm.Put("kdc", "8") stm.Put("kdd", "9") stm.Del("kdc") // Check that stepping from first to last returns // the expected sequence. var kvs []KV curr, err := stm.First("k") require.NoError(t, err) for curr != nil { kvs = append(kvs, *curr) curr, err = stm.Next("k", curr.key) require.NoError(t, err) } expected := []KV{ {"ka", "0"}, {"kc", "2"}, {"kda", "6"}, {"kdb", "7"}, {"kdd", "9"}, {"kf", "5"}, } require.Equal(t, expected, kvs) // Similarly check that stepping from last to first // returns the expected sequence. kvs = []KV{} curr, err = stm.Last("k") require.NoError(t, err) for curr != nil { kvs = append(kvs, *curr) curr, err = stm.Prev("k", curr.key) require.NoError(t, err) } expected = reverseKVs(expected) require.Equal(t, expected, kvs) return nil } err = RunSTM(db.cli, apply) require.NoError(t, err) require.Equal(t, "0", f.Get("ka")) require.Equal(t, "2", f.Get("kc")) require.Equal(t, "6", f.Get("kda")) require.Equal(t, "7", f.Get("kdb")) require.Equal(t, "9", f.Get("kdd")) require.Equal(t, "5", f.Get("kf")) require.Equal(t, "w", f.Get("w")) }
explode_data.jsonl/24938
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1610 }
[ 2830, 3393, 5338, 5842, 5847, 33528, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 1166, 1669, 1532, 31860, 4385, 69356, 1155, 340, 16867, 282, 727, 60639, 2822, 18185, 1592, 6227, 1669, 3056, 82707, 515, 197, 197, 4913, 21310, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBuzzerDriverTone(t *testing.T) { d := initTestBuzzerDriver(newGpioTestAdaptor()) gobottest.Assert(t, d.Tone(100, 0.01), nil) }
explode_data.jsonl/5909
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 58 }
[ 2830, 3393, 33, 91447, 11349, 51, 603, 1155, 353, 8840, 836, 8, 341, 2698, 1669, 2930, 2271, 33, 91447, 11349, 1755, 38, 11917, 2271, 2589, 32657, 2398, 3174, 674, 1716, 477, 11711, 1155, 11, 294, 836, 603, 7, 16, 15, 15, 11, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestIsBlank(t *testing.T) { a := String.IsBlank("") b := String.IsBlank(" ") c := String.IsBlank(" ") d := String.IsBlank(" aaa ") e := String.IsBlank(" aaa") f := String.IsBlank("aaa") assert.Equal(t, true, a, "The two item should be the same.") assert.Equal(t, true, b, "The two item should be the same.") assert.Equal(t, true, c, "The two item should be the same.") assert.Equal(t, false, d, "The two item should be the same.") assert.Equal(t, false, e, "The two item should be the same.") assert.Equal(t, false, f, "The two item should be the same.") }
explode_data.jsonl/46919
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 233 }
[ 2830, 3393, 3872, 22770, 1155, 353, 8840, 836, 8, 1476, 11323, 1669, 923, 4506, 22770, 31764, 2233, 1669, 923, 4506, 22770, 445, 14167, 1444, 1669, 923, 4506, 22770, 445, 257, 14167, 2698, 1669, 923, 4506, 22770, 445, 220, 83465, 256, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReplicateRepository_BadRepository(t *testing.T) { t.Parallel() for _, tc := range []struct { desc string invalidSource bool invalidTarget bool error func(testing.TB, error) }{ { desc: "target invalid", invalidTarget: true, }, { desc: "source invalid", invalidSource: true, error: func(t testing.TB, actual error) { testhelper.RequireGrpcError(t, actual, codes.NotFound) require.Contains(t, actual.Error(), "rpc error: code = NotFound desc = GetRepoPath: not a git repository:") }, }, { desc: "both invalid", invalidSource: true, invalidTarget: true, error: func(t testing.TB, actual error) { require.Equal(t, ErrInvalidSourceRepository, actual) }, }, } { t.Run(tc.desc, func(t *testing.T) { cfgBuilder := testcfg.NewGitalyCfgBuilder(testcfg.WithStorages("default", "target")) cfg := cfgBuilder.Build(t) testhelper.BuildGitalyHooks(t, cfg) testhelper.BuildGitalySSH(t, cfg) serverSocketPath := runRepositoryServerWithConfig(t, cfg, nil, testserver.WithDisablePraefect()) cfg.SocketPath = serverSocketPath client := newRepositoryClient(t, cfg, serverSocketPath) sourceRepo, _ := gittest.CloneRepo(t, cfg, cfg.Storages[0]) targetRepo, targetRepoPath := gittest.CloneRepo(t, cfg, cfg.Storages[1], gittest.CloneRepoOpts{ RelativePath: sourceRepo.RelativePath, }) var invalidRepos []*gitalypb.Repository if tc.invalidSource { invalidRepos = append(invalidRepos, sourceRepo) } if tc.invalidTarget { invalidRepos = append(invalidRepos, targetRepo) } locator := config.NewLocator(cfg) for _, invalidRepo := range invalidRepos { invalidRepoPath, err := locator.GetPath(invalidRepo) require.NoError(t, err) // delete git data so make the repo invalid for _, path := range []string{"refs", "objects", "HEAD"} { require.NoError(t, os.RemoveAll(filepath.Join(invalidRepoPath, path))) } } ctx, cancel := testhelper.Context() defer cancel() md := testhelper.GitalyServersMetadataFromCfg(t, cfg) injectedCtx := metadata.NewOutgoingContext(ctx, md) _, err := client.ReplicateRepository(injectedCtx, &gitalypb.ReplicateRepositoryRequest{ Repository: targetRepo, Source: sourceRepo, }) if tc.error != nil { tc.error(t, err) return } require.NoError(t, err) gittest.Exec(t, cfg, "-C", targetRepoPath, "fsck") }) } }
explode_data.jsonl/64813
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1065 }
[ 2830, 3393, 18327, 48795, 4624, 1668, 329, 4624, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 41653, 688, 914, 198, 197, 197, 11808, 3608, 1807, 198, 197, 197, 11808, 6397, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewGitResource(t *testing.T) { testGitHandler, err := handler.NewGitHandler("private-git-repo", handler.GitAuthConfig{ PrivateSSHKey: "private-ssh-key", SkipHostKeyChecking: true, Username: "admin", Password: "pwd", }, gitresource.CheckoutOptions{ Branch: "master", CommitID: "", }, bm) assert.NoError(t, err) tests := []struct { sourceInfo string gitResource *GitResource err error }{ { `{ "repository": "private-git-repo, }`, nil, errors.New("SourceInfo could not be unmarshalled for source type Git: " + "invalid character '\\n' in string literal"), }, { `{ "repository": "private-git-repo", "getOptions": "test" }`, nil, errors.New("SourceInfo could not be unmarshalled for source type Git: " + "getOptions is not specified in the right format"), }, { `{ "repository": "private-git-repo", "getOptions": "test" }`, nil, errors.New("SourceInfo could not be unmarshalled for source type Git: " + "getOptions is not specified in the right format"), }, { `{ "repository": "http:// test" }`, nil, errors.New("Invalid repository url format: parse \"http:// test\": invalid character \" \" in host name"), }, { `{ "repository": "private-git-repo", "privateSSHKey": "private-ssh-key", "skipHostKeyChecking": true, "username": "admin", "password": "pwd", "getOptions": "branch:master" }`, &GitResource{ context: contextMock, Handler: testGitHandler, }, nil, }, } for _, test := range tests { gitResource, err := NewGitResource(contextMock, test.sourceInfo, bm) if test.err != nil { assert.Nil(t, gitResource) assert.Error(t, err, getString(test)) assert.EqualError(t, err, test.err.Error()) } else { assert.NoError(t, err, getString(test)) assert.Equal(t, test.gitResource, gitResource, getString(test)) } } }
explode_data.jsonl/5080
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 859 }
[ 2830, 3393, 3564, 46562, 4783, 1155, 353, 8840, 836, 8, 341, 18185, 46562, 3050, 11, 1848, 1669, 7013, 7121, 46562, 3050, 445, 1996, 81749, 5504, 5368, 497, 7013, 1224, 275, 5087, 2648, 515, 197, 197, 16787, 62419, 1592, 25, 981, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestTrustedIPs(t *testing.T) { tests := []struct { name string trustedIPs []string reverseProxy bool realClientIPHeader string req *http.Request expectTrusted bool }{ // Check unconfigured behavior. { name: "Default", trustedIPs: nil, reverseProxy: false, realClientIPHeader: "X-Real-IP", // Default value req: func() *http.Request { req, _ := http.NewRequest("GET", "/", nil) return req }(), expectTrusted: false, }, // Check using req.RemoteAddr (Options.ReverseProxy == false). { name: "WithRemoteAddr", trustedIPs: []string{"127.0.0.1"}, reverseProxy: false, realClientIPHeader: "X-Real-IP", // Default value req: func() *http.Request { req, _ := http.NewRequest("GET", "/", nil) req.RemoteAddr = "127.0.0.1:43670" return req }(), expectTrusted: true, }, // Check ignores req.RemoteAddr match when behind a reverse proxy / missing header. { name: "IgnoresRemoteAddrInReverseProxyMode", trustedIPs: []string{"127.0.0.1"}, reverseProxy: true, realClientIPHeader: "X-Real-IP", // Default value req: func() *http.Request { req, _ := http.NewRequest("GET", "/", nil) req.RemoteAddr = "127.0.0.1:44324" return req }(), expectTrusted: false, }, // Check successful trusting of localhost in IPv4. { name: "TrustsLocalhostInReverseProxyMode", trustedIPs: []string{"127.0.0.0/8", "::1"}, reverseProxy: true, realClientIPHeader: "X-Forwarded-For", req: func() *http.Request { req, _ := http.NewRequest("GET", "/", nil) req.Header.Add("X-Forwarded-For", "127.0.0.1") return req }(), expectTrusted: true, }, // Check successful trusting of localhost in IPv6. { name: "TrustsIP6LocalostInReverseProxyMode", trustedIPs: []string{"127.0.0.0/8", "::1"}, reverseProxy: true, realClientIPHeader: "X-Forwarded-For", req: func() *http.Request { req, _ := http.NewRequest("GET", "/", nil) req.Header.Add("X-Forwarded-For", "::1") return req }(), expectTrusted: true, }, // Check does not trust random IPv4 address. { name: "DoesNotTrustRandomIP4Address", trustedIPs: []string{"127.0.0.0/8", "::1"}, reverseProxy: true, realClientIPHeader: "X-Forwarded-For", req: func() *http.Request { req, _ := http.NewRequest("GET", "/", nil) req.Header.Add("X-Forwarded-For", "12.34.56.78") return req }(), expectTrusted: false, }, // Check does not trust random IPv6 address. { name: "DoesNotTrustRandomIP6Address", trustedIPs: []string{"127.0.0.0/8", "::1"}, reverseProxy: true, realClientIPHeader: "X-Forwarded-For", req: func() *http.Request { req, _ := http.NewRequest("GET", "/", nil) req.Header.Add("X-Forwarded-For", "::2") return req }(), expectTrusted: false, }, // Check respects correct header. { name: "RespectsCorrectHeaderInReverseProxyMode", trustedIPs: []string{"127.0.0.0/8", "::1"}, reverseProxy: true, realClientIPHeader: "X-Forwarded-For", req: func() *http.Request { req, _ := http.NewRequest("GET", "/", nil) req.Header.Add("X-Real-IP", "::1") return req }(), expectTrusted: false, }, // Check doesn't trust if garbage is provided. { name: "DoesNotTrustGarbageInReverseProxyMode", trustedIPs: []string{"127.0.0.0/8", "::1"}, reverseProxy: true, realClientIPHeader: "X-Forwarded-For", req: func() *http.Request { req, _ := http.NewRequest("GET", "/", nil) req.Header.Add("X-Forwarded-For", "adsfljk29242as!!") return req }(), expectTrusted: false, }, // Check doesn't trust if garbage is provided (no reverse-proxy). { name: "DoesNotTrustGarbage", trustedIPs: []string{"127.0.0.0/8", "::1"}, reverseProxy: false, realClientIPHeader: "X-Real-IP", req: func() *http.Request { req, _ := http.NewRequest("GET", "/", nil) req.RemoteAddr = "adsfljk29242as!!" return req }(), expectTrusted: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { opts := baseTestOptions() opts.UpstreamServers = options.UpstreamConfig{ Upstreams: []options.Upstream{ { ID: "static", Path: "/", Static: true, }, }, } opts.TrustedIPs = tt.trustedIPs opts.ReverseProxy = tt.reverseProxy opts.RealClientIPHeader = tt.realClientIPHeader err := validation.Validate(opts) assert.NoError(t, err) proxy, err := NewOAuthProxy(opts, func(string) bool { return true }) assert.NoError(t, err) rw := httptest.NewRecorder() proxy.ServeHTTP(rw, tt.req) if tt.expectTrusted { assert.Equal(t, 200, rw.Code) } else { assert.Equal(t, 403, rw.Code) } }) } }
explode_data.jsonl/36423
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2427 }
[ 2830, 3393, 1282, 27145, 3298, 82, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 2290, 914, 198, 197, 25583, 27145, 3298, 82, 260, 3056, 917, 198, 197, 17200, 4450, 16219, 981, 1807, 198, 197, 91874, 2959, 32...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMaxWithArrayNumericInput(t *testing.T) { //Test Data d1 := []int{8, 3, 4, 44, 0} n1 := []int{} d2 := []int8{3, 3, 5, 9, 1} n2 := []int8{} d3 := []int16{4, 5, 4, 33, 2} n3 := []int16{} d4 := []int32{5, 3, 21, 15, 3} n4 := []int32{} d5 := []int64{9, 3, 9, 1, 2} n5 := []int64{} //Calls r1 := MaxInt(d1) c1 := MaxInt(n1) r2 := MaxInt8(d2) c2 := MaxInt8(n2) r3 := MaxInt16(d3) c3 := MaxInt16(n3) r4 := MaxInt32(d4) c4 := MaxInt32(n4) r5 := MaxInt64(d5) c5 := MaxInt64(n5) // Assertions assert.Equal(t, int(44), r1, "It should return the max value in array") assert.Equal(t, nil, c1, "It should return nil") assert.Equal(t, int8(9), r2, "It should return the max value in array") assert.Equal(t, nil, c2, "It should return nil") assert.Equal(t, int16(33), r3, "It should return the max value in array") assert.Equal(t, nil, c3, "It should return nil") assert.Equal(t, int32(21), r4, "It should return the max value in array") assert.Equal(t, nil, c4, "It should return nil") assert.Equal(t, int64(9), r5, "It should return the max value in array") assert.Equal(t, nil, c5, "It should return nil") }
explode_data.jsonl/5181
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 530 }
[ 2830, 3393, 5974, 2354, 1857, 36296, 2505, 1155, 353, 8840, 836, 8, 341, 197, 322, 2271, 2885, 198, 2698, 16, 1669, 3056, 396, 90, 23, 11, 220, 18, 11, 220, 19, 11, 220, 19, 19, 11, 220, 15, 532, 9038, 16, 1669, 3056, 396, 16094...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTransportKeepAlives(t *testing.T) { defer afterTest(t) ts := httptest.NewServer(hostPortHandler) defer ts.Close() c := ts.Client() for _, disableKeepAlive := range []bool{false, true} { c.Transport.(*Transport).DisableKeepAlives = disableKeepAlive fetch := func(n int) string { res, err := c.Get(ts.URL) if err != nil { t.Fatalf("error in disableKeepAlive=%v, req #%d, GET: %v", disableKeepAlive, n, err) } body, err := ioutil.ReadAll(res.Body) if err != nil { t.Fatalf("error in disableKeepAlive=%v, req #%d, ReadAll: %v", disableKeepAlive, n, err) } return string(body) } body1 := fetch(1) body2 := fetch(2) bodiesDiffer := body1 != body2 if bodiesDiffer != disableKeepAlive { t.Errorf("error in disableKeepAlive=%v. unexpected bodiesDiffer=%v; body1=%q; body2=%q", disableKeepAlive, bodiesDiffer, body1, body2) } } }
explode_data.jsonl/14071
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 377 }
[ 2830, 3393, 27560, 19434, 2101, 1886, 1155, 353, 8840, 836, 8, 341, 16867, 1283, 2271, 1155, 340, 57441, 1669, 54320, 70334, 7121, 5475, 19973, 7084, 3050, 340, 16867, 10591, 10421, 2822, 1444, 1669, 10591, 11716, 741, 2023, 8358, 11156, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestBuildConfigWithDifferentTriggerType(t *testing.T) { // this buildconfig has different (than ImageChangeTrigger) trigger defined buildcfg := mockBuildConfig("registry.com/namespace/imagename1", "", "", "") buildcfg.Triggers[0].Type = buildapi.GenericWebHookBuildTriggerType imageStream := mockImageStream("testImageRepo2", "registry.com/namespace/imagename2", map[string]string{"testTag2": "newImageID123"}) image := mockImage("testImage@id", "registry.com/namespace/imagename@id") controller := mockImageChangeController(buildcfg, imageStream, image) bcInstantiator := controller.BuildConfigInstantiator.(*buildConfigInstantiator) bcUpdater := bcInstantiator.buildConfigUpdater err := controller.HandleImageRepo(imageStream) if err != nil { t.Errorf("Unexpected error %v from HandleImageRepo", err) } if len(bcInstantiator.name) != 0 { t.Error("New build generated when a different repository was updated!") } if bcUpdater.buildcfg != nil { t.Error("BuildConfig was updated when a different trigger was defined!") } }
explode_data.jsonl/69175
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 330 }
[ 2830, 3393, 11066, 2648, 2354, 69123, 17939, 929, 1155, 353, 8840, 836, 8, 341, 197, 322, 419, 1936, 1676, 702, 2155, 320, 53795, 4654, 4072, 17939, 8, 8183, 4512, 198, 69371, 14072, 1669, 7860, 11066, 2648, 445, 29172, 905, 14, 2231, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestVerifyChart(t *testing.T) { v, err := VerifyChart("testdata/signtest-0.1.0.tgz", "testdata/helm-test-key.pub") if err != nil { t.Fatal(err) } // The verification is tested at length in the provenance package. Here, // we just want a quick sanity check that the v is not empty. if len(v.FileHash) == 0 { t.Error("Digest missing") } }
explode_data.jsonl/1369
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 133 }
[ 2830, 3393, 32627, 14488, 1155, 353, 8840, 836, 8, 341, 5195, 11, 1848, 1669, 25429, 14488, 445, 92425, 2687, 343, 406, 477, 12, 15, 13, 16, 13, 15, 734, 46589, 497, 330, 92425, 7530, 23162, 16839, 16173, 47773, 1138, 743, 1848, 961, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMarshalTestingAssist(t *testing.T) { a := new(messages.Bounce) a.Timestamp = primitives.NewTimestampNow() b := new(messages.Bounce) TestMarshaling(a, b, 0, t) TestMarshaling(a, b, 10, t) }
explode_data.jsonl/2724
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 84 }
[ 2830, 3393, 55438, 16451, 5615, 380, 1155, 353, 8840, 836, 8, 341, 11323, 1669, 501, 55705, 1785, 9734, 340, 11323, 49024, 284, 71194, 7121, 20812, 7039, 741, 2233, 1669, 501, 55705, 1785, 9734, 340, 73866, 79712, 6132, 2877, 11, 293, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestConcurrentConns(t *testing.T) { listener := Statsd{ Log: testutil.Logger{}, Protocol: "tcp", ServiceAddress: "localhost:8125", AllowedPendingMessages: 10000, MaxTCPConnections: 2, } acc := &testutil.Accumulator{} require.NoError(t, listener.Start(acc)) defer listener.Stop() time.Sleep(time.Millisecond * 250) _, err := net.Dial("tcp", "127.0.0.1:8125") assert.NoError(t, err) _, err = net.Dial("tcp", "127.0.0.1:8125") assert.NoError(t, err) // Connection over the limit: conn, err := net.Dial("tcp", "127.0.0.1:8125") assert.NoError(t, err) net.Dial("tcp", "127.0.0.1:8125") assert.NoError(t, err) _, err = conn.Write([]byte(testMsg)) assert.NoError(t, err) time.Sleep(time.Millisecond * 100) assert.Zero(t, acc.NFields()) }
explode_data.jsonl/14359
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 383 }
[ 2830, 3393, 1109, 3231, 1109, 4412, 1155, 353, 8840, 836, 8, 341, 14440, 798, 1669, 29927, 67, 515, 197, 24201, 25, 503, 1273, 1314, 12750, 38837, 197, 197, 20689, 25, 2290, 330, 27161, 756, 197, 91619, 4286, 25, 260, 330, 8301, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTokenExpire(t *testing.T) { testKey := "testKey" claims := jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.RegisteredClaims{ ExpiresAt: jwt.NewNumericDate(time.Now().Add(time.Millisecond)), }) token, err := claims.SignedString([]byte(testKey)) if err != nil { panic(err) } token = fmt.Sprintf(bearerFormat, token) time.Sleep(time.Second) next := func(ctx context.Context, req interface{}) (interface{}, error) { t.Log(req) return "reply", nil } ctx := transport.NewServerContext(context.Background(), &Transport{reqHeader: newTokenHeader(authorizationKey, token)}) server := Server(func(token *jwt.Token) (interface{}, error) { return []byte(testKey), nil }, WithSigningMethod(jwt.SigningMethodHS256))(next) _, err2 := server(ctx, "test expire token") if !errors.Is(ErrTokenExpired, err2) { t.Errorf("except error %v, but got %v", ErrTokenExpired, err2) } }
explode_data.jsonl/47109
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 333 }
[ 2830, 3393, 3323, 8033, 554, 1155, 353, 8840, 836, 8, 341, 18185, 1592, 1669, 330, 1944, 1592, 698, 197, 48561, 1669, 24589, 7121, 2354, 51133, 3325, 9306, 41152, 287, 3523, 11961, 17, 20, 21, 11, 24589, 19983, 291, 51133, 515, 197, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestS2(t *testing.T) { type S2 struct { Ids []string } testJsonFromStruct(t, S2{}, `{ "swagger.S2": { "id": "swagger.S2", "required": [ "Ids" ], "properties": { "Ids": { "type": "array", "description": "", "items": { "$ref": "string" }, "format": "" } } } }`) }
explode_data.jsonl/39921
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 178 }
[ 2830, 3393, 50, 17, 1155, 353, 8840, 836, 8, 341, 13158, 328, 17, 2036, 341, 197, 197, 12701, 3056, 917, 198, 197, 532, 18185, 5014, 3830, 9422, 1155, 11, 328, 17, 22655, 1565, 515, 220, 330, 74755, 808, 17, 788, 341, 256, 330, 30...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConfigReading(t *testing.T) { // non-existing file: conf, err := ReadFromFile("/heaven/trees/apple.ymL") require.Error(t, err) require.Contains(t, err.Error(), "failed to open file") require.Nil(t, conf) // bad content: _, err = ReadFromFile(testConfigs.configFileBadContent) require.Error(t, err) // empty config (must not fail) conf, err = ReadFromFile(testConfigs.configFileNoContent) require.NoError(t, err) require.NotNil(t, conf) require.True(t, conf.Auth.Enabled()) require.True(t, conf.Proxy.Enabled()) require.True(t, conf.SSH.Enabled()) require.False(t, conf.Kube.Enabled()) // static config conf, err = ReadFromFile(testConfigs.configFile) require.NoError(t, err) require.Empty(t, cmp.Diff(conf, &FileConfig{ Version: defaults.TeleportConfigVersionV1, Global: Global{ NodeName: NodeName, AuthServers: []string{"auth0.server.example.org:3024", "auth1.server.example.org:3024"}, Limits: ConnectionLimits{ MaxConnections: 100, MaxUsers: 5, Rates: ConnectionRates, }, Logger: Log{ Output: "stderr", Severity: "INFO", Format: LogFormat{ Output: "text", }, }, Storage: backend.Config{ Type: "bolt", }, DataDir: "/path/to/data", CAPin: apiutils.Strings([]string{"rsa256:123", "rsa256:456"}), }, Auth: Auth{ Service: Service{ defaultEnabled: true, EnabledFlag: "Yeah", ListenAddress: "tcp://auth", }, LicenseFile: "lic.pem", DisconnectExpiredCert: types.NewBoolOption(true), ClientIdleTimeout: types.Duration(17 * time.Second), WebIdleTimeout: types.Duration(19 * time.Second), RoutingStrategy: types.RoutingStrategy_MOST_RECENT, }, SSH: SSH{ Service: Service{ defaultEnabled: true, EnabledFlag: "true", ListenAddress: "tcp://ssh", }, Labels: Labels, Commands: CommandLabels, }, Proxy: Proxy{ Service: Service{ defaultEnabled: true, EnabledFlag: "yes", ListenAddress: "tcp://proxy_ssh_addr", }, KeyFile: "/etc/teleport/proxy.key", CertFile: "/etc/teleport/proxy.crt", KeyPairs: []KeyPair{ { PrivateKey: "/etc/teleport/proxy.key", Certificate: "/etc/teleport/proxy.crt", }, }, WebAddr: "tcp://web_addr", TunAddr: "reverse_tunnel_address:3311", }, Kube: Kube{ Service: Service{ EnabledFlag: "yes", ListenAddress: "tcp://kube", }, KubeClusterName: "kube-cluster", PublicAddr: apiutils.Strings([]string{"kube-host:1234"}), }, Apps: Apps{ Service: Service{ EnabledFlag: "yes", }, Apps: []*App{ { Name: "foo", URI: "http://127.0.0.1:8080", PublicAddr: "foo.example.com", StaticLabels: Labels, DynamicLabels: CommandLabels, }, }, ResourceMatchers: []ResourceMatcher{ { Labels: map[string]apiutils.Strings{ "*": {"*"}, }, }, }, }, Databases: Databases{ Service: Service{ EnabledFlag: "yes", }, Databases: []*Database{ { Name: "postgres", Protocol: defaults.ProtocolPostgres, URI: "localhost:5432", StaticLabels: Labels, DynamicLabels: CommandLabels, }, }, ResourceMatchers: []ResourceMatcher{ { Labels: map[string]apiutils.Strings{ "*": {"*"}, }, }, }, AWSMatchers: []AWSMatcher{ { Types: []string{"rds"}, Regions: []string{"us-west-1", "us-east-1"}, Tags: map[string]apiutils.Strings{ "a": {"b"}, }, }, { Types: []string{"rds"}, Regions: []string{"us-central-1"}, Tags: map[string]apiutils.Strings{ "c": {"d"}, }, }, }, }, Metrics: Metrics{ Service: Service{ ListenAddress: "tcp://metrics", EnabledFlag: "yes", }, KeyPairs: []KeyPair{ { PrivateKey: "/etc/teleport/proxy.key", Certificate: "/etc/teleport/proxy.crt", }, }, CACerts: []string{"/etc/teleport/ca.crt"}, GRPCServerLatency: true, GRPCClientLatency: true, }, WindowsDesktop: WindowsDesktopService{ Service: Service{ EnabledFlag: "yes", ListenAddress: "tcp://windows_desktop", }, PublicAddr: apiutils.Strings([]string{"winsrv.example.com:3028", "no-port.winsrv.example.com"}), Hosts: apiutils.Strings([]string{"win.example.com:3389", "no-port.win.example.com"}), }, }, cmp.AllowUnexported(Service{}))) require.True(t, conf.Auth.Configured()) require.True(t, conf.Auth.Enabled()) require.True(t, conf.Proxy.Configured()) require.True(t, conf.Proxy.Enabled()) require.True(t, conf.SSH.Configured()) require.True(t, conf.SSH.Enabled()) require.True(t, conf.Kube.Configured()) require.True(t, conf.Kube.Enabled()) require.True(t, conf.Apps.Configured()) require.True(t, conf.Apps.Enabled()) require.True(t, conf.Databases.Configured()) require.True(t, conf.Databases.Enabled()) require.True(t, conf.Metrics.Configured()) require.True(t, conf.Metrics.Enabled()) require.True(t, conf.WindowsDesktop.Configured()) require.True(t, conf.WindowsDesktop.Enabled()) // good config from file conf, err = ReadFromFile(testConfigs.configFileStatic) require.NoError(t, err) require.NotNil(t, conf) checkStaticConfig(t, conf) // good config from base64 encoded string conf, err = ReadFromString(base64.StdEncoding.EncodeToString([]byte(StaticConfigString))) require.NoError(t, err) require.NotNil(t, conf) checkStaticConfig(t, conf) }
explode_data.jsonl/47160
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2501 }
[ 2830, 3393, 2648, 31899, 1155, 353, 8840, 836, 8, 341, 197, 322, 2477, 49357, 1034, 510, 67850, 11, 1848, 1669, 4457, 43633, 4283, 383, 5276, 5523, 7858, 75603, 13, 1600, 43, 1138, 17957, 6141, 1155, 11, 1848, 340, 17957, 11545, 1155, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRevokeLease(t *testing.T) { f := newFixture(t) defer f.Close() attrs := &subnet.LeaseAttrs{ PublicIP: mustParseIP4("1.1.1.1"), } l, err := f.sm.AcquireLease(f.ctx, "_", attrs) if err != nil { t.Fatalf("AcquireLease failed: %v", err) } if err := f.sm.RevokeLease(f.ctx, "_", l.Subnet); err != nil { t.Fatalf("RevokeLease failed: %v", err) } _, err = f.sm.WatchLease(f.ctx, "_", l.Subnet, nil) if err == nil { t.Fatalf("Revoked lease found") } }
explode_data.jsonl/69659
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 693, 7621, 2304, 519, 1155, 353, 8840, 836, 8, 341, 1166, 1669, 501, 18930, 1155, 340, 16867, 282, 10421, 2822, 197, 20468, 1669, 609, 88636, 11824, 519, 53671, 515, 197, 73146, 3298, 25, 1969, 14463, 3298, 19, 445, 16, 13...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestParseListEnv(t *testing.T) { const envKey = "ELASTIC_APM_TEST_LIST" os.Unsetenv(envKey) defer os.Unsetenv(envKey) defaultList := []string{"foo", "bar"} list := apmconfig.ParseListEnv(envKey, ",", defaultList) assert.Equal(t, defaultList, list) os.Setenv(envKey, "a") list = apmconfig.ParseListEnv(envKey, ",", defaultList) assert.Equal(t, []string{"a"}, list) os.Setenv(envKey, "a,b") list = apmconfig.ParseListEnv(envKey, ",", defaultList) assert.Equal(t, []string{"a", "b"}, list) os.Setenv(envKey, ",a , b,") list = apmconfig.ParseListEnv(envKey, ",", defaultList) assert.Equal(t, []string{"a", "b"}, list) os.Setenv(envKey, ",") list = apmconfig.ParseListEnv(envKey, ",", defaultList) assert.Len(t, list, 0) os.Setenv(envKey, "a| b") list = apmconfig.ParseListEnv(envKey, "|", defaultList) assert.Equal(t, []string{"a", "b"}, list) os.Setenv(envKey, "a b") list = apmconfig.ParseListEnv(envKey, ",", defaultList) assert.Equal(t, []string{"a b"}, list) }
explode_data.jsonl/63045
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 436 }
[ 2830, 3393, 14463, 852, 14359, 1155, 353, 8840, 836, 8, 341, 4777, 6105, 1592, 284, 330, 2749, 6349, 1317, 1566, 8795, 11641, 11899, 698, 25078, 10616, 746, 3160, 16978, 1592, 340, 16867, 2643, 10616, 746, 3160, 16978, 1592, 692, 11940, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPushTypeFor(t *testing.T) { sidecar := &model.Proxy{Type: model.SidecarProxy} gateway := &model.Proxy{Type: model.Router} tests := []struct { name string proxy *model.Proxy configTypes []resource.GroupVersionKind expect map[XdsType]bool }{ { name: "configTypes is empty", proxy: sidecar, configTypes: nil, expect: map[XdsType]bool{CDS: true, EDS: true, LDS: true, RDS: true}, }, { name: "configTypes is empty", proxy: gateway, configTypes: nil, expect: map[XdsType]bool{CDS: true, EDS: true, LDS: true, RDS: true}, }, { name: "sidecar updated for sidecar proxy", proxy: sidecar, configTypes: []resource.GroupVersionKind{gvk.Sidecar}, expect: map[XdsType]bool{CDS: true, EDS: true, LDS: true, RDS: true}, }, { name: "sidecar updated for gateway proxy", proxy: gateway, configTypes: []resource.GroupVersionKind{gvk.Sidecar}, expect: map[XdsType]bool{}, }, { name: "quotaSpec updated for sidecar proxy", proxy: sidecar, configTypes: []resource.GroupVersionKind{gvk.QuotaSpec}, expect: map[XdsType]bool{LDS: true, RDS: true}, }, { name: "quotaSpec updated for gateway", proxy: gateway, configTypes: []resource.GroupVersionKind{gvk.QuotaSpec}, expect: map[XdsType]bool{}, }, { name: "authorizationpolicy updated", proxy: sidecar, configTypes: []resource.GroupVersionKind{gvk.AuthorizationPolicy}, expect: map[XdsType]bool{LDS: true}, }, { name: "authorizationpolicy updated", proxy: gateway, configTypes: []resource.GroupVersionKind{gvk.AuthorizationPolicy}, expect: map[XdsType]bool{LDS: true}, }, { name: "unknown type updated", proxy: sidecar, configTypes: []resource.GroupVersionKind{{Kind: "unknown"}}, expect: map[XdsType]bool{CDS: true, EDS: true, LDS: true, RDS: true}, }, { name: "unknown type updated", proxy: gateway, configTypes: []resource.GroupVersionKind{}, expect: map[XdsType]bool{CDS: true, EDS: true, LDS: true, RDS: true}, }, { name: "gateway and virtualservice updated for gateway proxy", proxy: gateway, configTypes: []resource.GroupVersionKind{gvk.Gateway, gvk.VirtualService}, expect: map[XdsType]bool{LDS: true, RDS: true}, }, { name: "virtualservice and destinationrule updated", proxy: sidecar, configTypes: []resource.GroupVersionKind{gvk.DestinationRule, gvk.VirtualService}, expect: map[XdsType]bool{CDS: true, EDS: true, LDS: true, RDS: true}, }, { name: "requestauthentication updated", proxy: sidecar, configTypes: []resource.GroupVersionKind{gvk.RequestAuthentication}, expect: map[XdsType]bool{LDS: true}, }, { name: "requestauthentication updated", proxy: gateway, configTypes: []resource.GroupVersionKind{gvk.RequestAuthentication}, expect: map[XdsType]bool{LDS: true}, }, { name: "peerauthentication updated", proxy: sidecar, configTypes: []resource.GroupVersionKind{gvk.PeerAuthentication}, expect: map[XdsType]bool{CDS: true, EDS: true, LDS: true}, }, { name: "peerauthentication updated", proxy: gateway, configTypes: []resource.GroupVersionKind{gvk.PeerAuthentication}, expect: map[XdsType]bool{CDS: true, EDS: true, LDS: true}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { cfgs := map[model.ConfigKey]struct{}{} for _, kind := range tt.configTypes { cfgs[model.ConfigKey{ Kind: kind, Name: "name", Namespace: "ns", }] = struct{}{} } pushEv := &XdsEvent{configsUpdated: cfgs} out := PushTypeFor(tt.proxy, pushEv) if !reflect.DeepEqual(out, tt.expect) { t.Errorf("expected: %v, but got %v", tt.expect, out) } }) } }
explode_data.jsonl/29797
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1842 }
[ 2830, 3393, 16644, 929, 2461, 1155, 353, 8840, 836, 8, 341, 93764, 6918, 1669, 609, 2528, 75200, 90, 929, 25, 1614, 93135, 6918, 16219, 532, 3174, 12043, 1669, 609, 2528, 75200, 90, 929, 25, 1614, 31413, 630, 78216, 1669, 3056, 1235, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRequestInfo(t *testing.T) { rtParams := &NewRuntimeParameters{ RuntimeID: "bb", ConcurrentMode: false, StreamMode: false, WaitRuntimeAliveTimeout: 3, Resource: &api.Resource{}, } rt := NewRuntimeInfo(rtParams) req := NewRequestInfo("aa", rt) req.SetInitTime(1, 2) if req.InitStartTimeMS != 1 || req.InitDoneTimeMS != 2 { t.Errorf("init time error: [%d-%d]", req.InitStartTimeMS, req.InitDoneTimeMS) } req.InvokeResult(StatusSuccess, "result") }
explode_data.jsonl/54733
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 239 }
[ 2830, 3393, 1900, 1731, 1155, 353, 8840, 836, 8, 341, 55060, 4870, 1669, 609, 3564, 15123, 9706, 515, 197, 68321, 915, 25, 2290, 330, 6066, 756, 197, 93070, 3231, 3636, 25, 688, 895, 345, 197, 197, 3027, 3636, 25, 1060, 895, 345, 19...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSpan_EndTimestamp(t *testing.T) { ms := NewSpan() assert.EqualValues(t, Timestamp(0), ms.EndTimestamp()) testValEndTimestamp := Timestamp(1234567890) ms.SetEndTimestamp(testValEndTimestamp) assert.EqualValues(t, testValEndTimestamp, ms.EndTimestamp()) }
explode_data.jsonl/63270
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 12485, 49953, 20812, 1155, 353, 8840, 836, 8, 341, 47691, 1669, 1532, 12485, 741, 6948, 12808, 6227, 1155, 11, 32758, 7, 15, 701, 9829, 18569, 20812, 2398, 18185, 2208, 3727, 20812, 1669, 32758, 7, 16, 17, 18, 19, 20, 21, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDefaulting(t *testing.T) { tests := []struct { name string structName string object DefaultedInfoRequest expected DefaultedInfoRequest }{ { name: "DeployKey: empty", structName: "DeployKey", object: &DeployKeyInfo{}, expected: &DeployKeyInfo{ ReadOnly: BoolVar(true), }, }, { name: "DeployKey: don't set if non-nil (default)", structName: "DeployKey", object: &DeployKeyInfo{ ReadOnly: BoolVar(true), }, expected: &DeployKeyInfo{ ReadOnly: BoolVar(true), }, }, { name: "DeployKey: don't set if non-nil (non-default)", structName: "DeployKey", object: &DeployKeyInfo{ ReadOnly: BoolVar(false), }, expected: &DeployKeyInfo{ ReadOnly: BoolVar(false), }, }, { name: "Repository: empty", structName: "Repository", object: &RepositoryInfo{}, expected: &RepositoryInfo{ Visibility: RepositoryVisibilityVar(RepositoryVisibilityPrivate), DefaultBranch: StringVar("main"), }, }, { name: "Repository: don't set if non-nil (default)", structName: "Repository", object: &RepositoryInfo{ Visibility: RepositoryVisibilityVar(RepositoryVisibilityPrivate), DefaultBranch: StringVar("main"), }, expected: &RepositoryInfo{ Visibility: RepositoryVisibilityVar(RepositoryVisibilityPrivate), DefaultBranch: StringVar("main"), }, }, { name: "Repository: don't set if non-nil (non-default)", structName: "Repository", object: &RepositoryInfo{ Visibility: RepositoryVisibilityVar(RepositoryVisibilityInternal), DefaultBranch: StringVar("main"), }, expected: &RepositoryInfo{ Visibility: RepositoryVisibilityVar(RepositoryVisibilityInternal), DefaultBranch: StringVar("main"), }, }, { name: "TeamAccess: empty", structName: "TeamAccess", object: &TeamAccessInfo{}, expected: &TeamAccessInfo{ Permission: RepositoryPermissionVar(RepositoryPermissionPull), }, }, { name: "TeamAccess: don't set if non-nil (default)", structName: "Repository", object: &TeamAccessInfo{ Permission: RepositoryPermissionVar(RepositoryPermissionPull), }, expected: &TeamAccessInfo{ Permission: RepositoryPermissionVar(RepositoryPermissionPull), }, }, { name: "TeamAccess: don't set if non-nil (non-default)", structName: "TeamAccess", object: &TeamAccessInfo{ Permission: RepositoryPermissionVar(RepositoryPermissionPush), }, expected: &TeamAccessInfo{ Permission: RepositoryPermissionVar(RepositoryPermissionPush), }, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { tt.object.Default() if !reflect.DeepEqual(tt.object, tt.expected) { t.Errorf("%s.Default(): got %v, expected %v", tt.structName, tt.object, tt.expected) } }) } }
explode_data.jsonl/26985
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1210 }
[ 2830, 3393, 3675, 287, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 981, 914, 198, 197, 6472, 675, 914, 198, 197, 35798, 257, 7899, 291, 1731, 1900, 198, 197, 42400, 256, 7899, 291, 1731, 1900, 198, 197, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMaybeRemove(t *testing.T) { defer leaktest.AfterTest(t)() cfg := TestStoreConfig(nil) stopper := stop.NewStopper() defer stopper.Stop(context.Background()) store := createTestStoreWithoutStart(t, stopper, testStoreOpts{createSystemRanges: true}, &cfg) // Add a queue to the scanner before starting the store and running the scanner. // This is necessary to avoid data race. fq := &fakeRangeQueue{ maybeRemovedRngs: make(chan roachpb.RangeID), } store.scanner.AddQueues(fq) if err := store.Start(context.Background(), stopper); err != nil { t.Fatal(err) } store.WaitForInit() repl, err := store.GetReplica(1) if err != nil { t.Error(err) } if err := store.RemoveReplica(context.Background(), repl, repl.Desc().NextReplicaID, RemoveOptions{ DestroyData: true, }); err != nil { t.Error(err) } // MaybeRemove is called. removedRng := <-fq.maybeRemovedRngs if removedRng != repl.RangeID { t.Errorf("Unexpected removed range %v", removedRng) } }
explode_data.jsonl/111
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 356 }
[ 2830, 3393, 21390, 13021, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 50286, 1669, 3393, 6093, 2648, 27907, 340, 62644, 712, 1669, 2936, 7121, 10674, 712, 741, 16867, 2936, 712, 30213, 5378, 19047, 2398, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestUnmarshalCamelCaseKey(t *testing.T) { var x struct { FooBar int B int } if err := Unmarshal(testCamelCaseKeyToml, &x); err != nil { t.Fatal(err) } if x.FooBar != 10 { t.Fatal("Did not set camelCase'd key") } }
explode_data.jsonl/46347
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 1806, 27121, 25406, 301, 4207, 1592, 1155, 353, 8840, 836, 8, 341, 2405, 856, 2036, 341, 197, 12727, 2624, 3428, 526, 198, 197, 12791, 414, 526, 198, 197, 630, 743, 1848, 1669, 1230, 27121, 8623, 25406, 301, 4207, 1592, 24...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestLoadPrometheus(t *testing.T) { tests := []struct { name string Input *Config expectedResult string err string }{ { name: "basic valid configuration", Input: &Config{ Prometheus: &PrometheusCfg{ Enabled: true, Level: "full", ListenAddr: "127.0.0.1", ListenPort: 6060, }, Cscli: &CscliCfg{}, }, expectedResult: "http://127.0.0.1:6060", }, } for idx, test := range tests { err := test.Input.LoadPrometheus() if err == nil && test.err != "" { fmt.Printf("TEST '%s': NOK\n", test.name) t.Fatalf("%d/%d expected error, didn't get it", idx, len(tests)) } else if test.err != "" { if !strings.HasPrefix(fmt.Sprintf("%s", err), test.err) { fmt.Printf("TEST '%s': NOK\n", test.name) t.Fatalf("%d/%d expected '%s' got '%s'", idx, len(tests), test.err, fmt.Sprintf("%s", err)) } } isOk := assert.Equal(t, test.expectedResult, test.Input.Cscli.PrometheusUrl) if !isOk { t.Fatalf("test '%s' failed\n", test.name) } else { fmt.Printf("TEST '%s': OK\n", test.name) } } }
explode_data.jsonl/23339
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 552 }
[ 2830, 3393, 5879, 35186, 39705, 1155, 353, 8840, 836, 8, 1476, 78216, 1669, 3056, 1235, 341, 197, 11609, 1843, 914, 198, 197, 66588, 688, 353, 2648, 198, 197, 42400, 2077, 914, 198, 197, 9859, 310, 914, 198, 197, 59403, 197, 197, 515,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestBindTemplate(t *testing.T) { env := environment(map[string]interface{}{ "a": StringType, "b": NumberType, "c": BoolType, "d": NewListType(StringType), "e": NewOutputType(StringType), "f": NewOutputType(NumberType), "g": NewOutputType(BoolType), "h": NewOutputType(NewListType(StringType)), "i": NewPromiseType(StringType), "j": NewPromiseType(NumberType), "k": NewPromiseType(BoolType), "l": NewPromiseType(NewListType(StringType)), }) scope := env.scope() cases := []exprTestCase{ // Unwrapped interpolations {x: `"${0}"`, t: NumberType, xt: &LiteralValueExpression{}}, {x: `"${true}"`, t: BoolType, xt: &LiteralValueExpression{}}, {x: `"${d}"`, t: NewListType(StringType), xt: &ScopeTraversalExpression{}}, {x: `"${e}"`, t: NewOutputType(StringType), xt: &ScopeTraversalExpression{}}, {x: `"${i}"`, t: NewPromiseType(StringType), xt: &ScopeTraversalExpression{}}, // Simple interpolations {x: `"v: ${a}"`, t: StringType}, {x: `"v: ${b}"`, t: StringType}, {x: `"v: ${c}"`, t: StringType}, {x: `"v: ${d}"`, t: StringType}, // Template control expressions {x: `"%{if c} v: ${a} %{endif}"`, t: StringType}, {x: `"%{for v in d} v: ${v} %{endfor}"`, t: StringType}, // Lifted operations {x: `"v: ${e}"`, t: NewOutputType(StringType)}, {x: `"v: ${f}"`, t: NewOutputType(StringType)}, {x: `"v: ${g}"`, t: NewOutputType(StringType)}, {x: `"v: ${h}"`, t: NewOutputType(StringType)}, {x: `"%{if g} v: ${a} %{endif}"`, t: NewOutputType(StringType)}, {x: `"%{for v in h} v: ${v} %{endfor}"`, t: NewOutputType(StringType)}, {x: `"v: ${i}"`, t: NewPromiseType(StringType)}, {x: `"v: ${j}"`, t: NewPromiseType(StringType)}, {x: `"v: ${k}"`, t: NewPromiseType(StringType)}, {x: `"v: ${l}"`, t: NewPromiseType(StringType)}, {x: `"%{if k} v: ${a} %{endif}"`, t: NewPromiseType(StringType)}, {x: `"%{for v in l} v: ${v} %{endfor}"`, t: NewPromiseType(StringType)}, } for _, c := range cases { t.Run(c.x, func(t *testing.T) { expr, diags := BindExpressionText(c.x, scope, hcl.Pos{}) assert.Len(t, diags, 0) assert.Equal(t, c.t, expr.Type()) var ok bool switch c.xt.(type) { case *LiteralValueExpression: _, ok = expr.(*LiteralValueExpression) case *ScopeTraversalExpression: _, ok = expr.(*ScopeTraversalExpression) default: _, ok = expr.(*TemplateExpression) assert.Equal(t, c.x, fmt.Sprintf("%v", expr)) } assert.True(t, ok) }) } }
explode_data.jsonl/42572
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1119 }
[ 2830, 3393, 9950, 7275, 1155, 353, 8840, 836, 8, 341, 57538, 1669, 4573, 9147, 14032, 31344, 67066, 197, 197, 56693, 788, 93635, 345, 197, 197, 1, 65, 788, 5624, 929, 345, 197, 197, 96946, 788, 12608, 929, 345, 197, 197, 44917, 788, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestEtcdUpdateEndpoints(t *testing.T) { ctx := api.NewDefaultContext() fakeClient := tools.NewFakeEtcdClient(t) fakeClient.TestIndex = true registry := NewTestEtcdRegistry(fakeClient) endpoints := api.Endpoints{ TypeMeta: api.TypeMeta{ID: "foo"}, Endpoints: []string{"baz", "bar"}, } key, _ := makeServiceEndpointsKey(ctx, "foo") fakeClient.Set(key, runtime.EncodeOrDie(latest.Codec, &api.Endpoints{}), 0) err := registry.UpdateEndpoints(ctx, &endpoints) if err != nil { t.Errorf("unexpected error: %v", err) } response, err := fakeClient.Get(key, false, false) if err != nil { t.Fatalf("Unexpected error %v", err) } var endpointsOut api.Endpoints err = latest.Codec.DecodeInto([]byte(response.Node.Value), &endpointsOut) if !reflect.DeepEqual(endpoints, endpointsOut) { t.Errorf("Unexpected endpoints: %#v, expected %#v", endpointsOut, endpoints) } }
explode_data.jsonl/8174
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 336 }
[ 2830, 3393, 31860, 4385, 4289, 80786, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 6330, 7121, 3675, 1972, 741, 1166, 726, 2959, 1669, 7375, 7121, 52317, 31860, 4385, 2959, 1155, 340, 1166, 726, 2959, 8787, 1552, 284, 830, 198, 197, 29172...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4