text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestNewHTTPClientCert(t *testing.T) { server := httptest.NewUnstartedServer( http.HandlerFunc( func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", `text/plain; version=0.0.4`) w.Write([]byte{}) }, ), ) tlsConfig := newTLSConfig("server", t) tlsConfig.ClientAuth = tls.RequireAndVerifyClientCert tlsConfig.ClientCAs = tlsConfig.RootCAs server.TLS = tlsConfig server.StartTLS() defer server.Close() cfg := config_util.HTTPClientConfig{ TLSConfig: config_util.TLSConfig{ CAFile: caCertPath, CertFile: "testdata/client.cer", KeyFile: "testdata/client.key", }, } c, err := config_util.NewClientFromConfig(cfg, "test") if err != nil { t.Fatal(err) } _, err = c.Get(server.URL) if err != nil { t.Fatal(err) } }
explode_data.jsonl/16315
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 340 }
[ 2830, 3393, 3564, 9230, 2959, 36934, 1155, 353, 8840, 836, 8, 341, 41057, 1669, 54320, 70334, 7121, 1806, 46723, 5475, 1006, 197, 28080, 89164, 1006, 298, 29244, 3622, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 571, 6692, 15753, 100...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestImagePullSecret(t *testing.T) { cases := []struct { name string secret string valid bool }{ { name: "single entry with auth", secret: `{"auths":{"example.com":{"auth":"authorization value"}}}`, valid: true, }, { name: "single entry with credsStore", secret: `{"auths":{"example.com":{"credsStore":"creds store value"}}}`, valid: true, }, { name: "empty", secret: `{}`, valid: false, }, { name: "no auths", secret: `{"not-auths":{"example.com":{"auth":"authorization value"}}}`, valid: false, }, { name: "no auth or credsStore", secret: `{"auths":{"example.com":{"unrequired-field":"value"}}}`, valid: false, }, { name: "additional fields", secret: `{"auths":{"example.com":{"auth":"authorization value","other-field":"other field value"}}}`, valid: true, }, { name: "no entries", secret: `{"auths":{}}`, valid: false, }, { name: "multiple valid entries", secret: `{"auths":{"example.com":{"auth":"authorization value"},"other-example.com":{"auth":"other auth value"}}}`, valid: true, }, { name: "mix of valid and invalid entries", secret: `{"auths":{"example.com":{"auth":"authorization value"},"other-example.com":{"unrequired-field":"value"}}}`, valid: false, }, } for _, tc := range cases { t.Run(tc.name, func(t *testing.T) { err := ImagePullSecret(tc.secret) if tc.valid { assert.NoError(t, err) } else { assert.Error(t, err) } }) } }
explode_data.jsonl/56440
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 648 }
[ 2830, 3393, 1906, 36068, 19773, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 11609, 256, 914, 198, 197, 197, 20474, 914, 198, 197, 56322, 220, 1807, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 256, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestWebSocketReverseProxyFromWSSClient(t *testing.T) { wsEcho := newTLSServer(websocket.Handler(func(ws *websocket.Conn) { io.Copy(ws, ws) })) defer wsEcho.Close() p := newWebSocketTestProxy(wsEcho.URL, true, 30*time.Second) echoProxy := newTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { p.ServeHTTP(w, r) })) defer echoProxy.Close() // Set up WebSocket client url := strings.Replace(echoProxy.URL, "https://", "wss://", 1) wsCfg, err := websocket.NewConfig(url, echoProxy.URL) if err != nil { t.Fatal(err) } wsCfg.TlsConfig = &tls.Config{InsecureSkipVerify: true} ws, err := websocket.DialConfig(wsCfg) if err != nil { t.Fatal(err) } defer ws.Close() // Send test message trialMsg := "Is it working?" if sendErr := websocket.Message.Send(ws, trialMsg); sendErr != nil { t.Fatal(sendErr) } // It should be echoed back to us var actualMsg string if rcvErr := websocket.Message.Receive(ws, &actualMsg); rcvErr != nil { t.Fatal(rcvErr) } if actualMsg != trialMsg { t.Errorf("Expected '%s' but got '%s' instead", trialMsg, actualMsg) } }
explode_data.jsonl/64233
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 458 }
[ 2830, 3393, 61238, 45695, 16219, 3830, 54, 1220, 2959, 1155, 353, 8840, 836, 8, 341, 6692, 82, 74994, 1669, 501, 13470, 1220, 2836, 39769, 9556, 31010, 18552, 57786, 353, 83208, 50422, 8, 341, 197, 53112, 31770, 57786, 11, 17624, 340, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRunInitMasterChecks(t *testing.T) { var tests = []struct { cfg *kubeadmapi.MasterConfiguration expected bool }{ { cfg: &kubeadmapi.MasterConfiguration{ API: kubeadm.API{AdvertiseAddress: "foo"}, }, expected: false, }, } for _, rt := range tests { actual := RunInitMasterChecks(rt.cfg) if (actual == nil) != rt.expected { t.Errorf( "failed RunInitMasterChecks:\n\texpected: %t\n\t actual: %t", rt.expected, (actual != nil), ) } } }
explode_data.jsonl/20506
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 224 }
[ 2830, 3393, 6727, 3803, 18041, 49820, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 3056, 1235, 341, 197, 50286, 414, 353, 74, 392, 3149, 76, 2068, 71202, 7688, 198, 197, 42400, 1807, 198, 197, 59403, 197, 197, 515, 298, 50286, 25, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestValidateImportVolumeSanBackend(t *testing.T) { const ( backendName = "backend01" scName = "sc01" volumeName = "volume01" originalName = "origVolume01" backendProtocol = config.Block ) orchestrator, volumeConfig := importVolumeSetup(t, backendName, scName, volumeName, originalName, backendProtocol) _, err := orchestrator.AddVolume(ctx(), volumeConfig) if err != nil { t.Fatal("Unable to add volume: ", err) } // The volume exists on the backend with the original name since we added it above. // Set volumeConfig.InternalName to the expected volumeName post import. volumeConfig.InternalName = volumeName // Create VolumeConfig objects for the remaining error conditions protocolVolConfig := volumeConfig.ConstructClone() protocolVolConfig.Protocol = config.File ext4RawBlockFSVolConfig := volumeConfig.ConstructClone() ext4RawBlockFSVolConfig.VolumeMode = config.RawBlock ext4RawBlockFSVolConfig.Protocol = config.Block ext4RawBlockFSVolConfig.FileSystem = "ext4" for _, c := range []struct { name string volumeConfig *storage.VolumeConfig valid bool error string }{ {name: "protocol", volumeConfig: protocolVolConfig, valid: false, error: "incompatible with the backend"}, { name: "invalidFS", volumeConfig: ext4RawBlockFSVolConfig, valid: false, error: "cannot create raw-block volume", }, } { // The test code err = orchestrator.validateImportVolume(ctx(), c.volumeConfig) if err != nil { if c.valid { t.Errorf("%s: unexpected error %v", c.name, err) } else { if !strings.Contains(err.Error(), c.error) { t.Errorf("%s: expected %s but received error %v", c.name, c.error, err) } } } else if !c.valid { t.Errorf("%s: expected error but passed test", c.name) } } cleanup(t, orchestrator) }
explode_data.jsonl/62738
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 693 }
[ 2830, 3393, 17926, 11511, 18902, 23729, 29699, 1155, 353, 8840, 836, 8, 341, 4777, 2399, 197, 197, 20942, 675, 257, 284, 330, 20942, 15, 16, 698, 197, 29928, 675, 688, 284, 330, 2388, 15, 16, 698, 197, 5195, 4661, 675, 414, 284, 330...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestLaunchIDFromVerifiedClientIDQueryPaginated(t *testing.T) { keeper, ctx := keepertest.Monitoringc(t) wctx := sdk.WrapSDKContext(ctx) msgs := createNLaunchIDFromVerifiedClientID(keeper, ctx, 5) request := func(next []byte, offset, limit uint64, total bool) *types.QueryAllLaunchIDFromVerifiedClientIDRequest { return &types.QueryAllLaunchIDFromVerifiedClientIDRequest{ Pagination: &query.PageRequest{ Key: next, Offset: offset, Limit: limit, CountTotal: total, }, } } t.Run("ByOffset", func(t *testing.T) { step := 2 for i := 0; i < len(msgs); i += step { resp, err := keeper.LaunchIDFromVerifiedClientIDAll(wctx, request(nil, uint64(i), uint64(step), false)) require.NoError(t, err) require.LessOrEqual(t, len(resp.LaunchIDFromVerifiedClientID), step) require.Subset(t, nullify.Fill(msgs), nullify.Fill(resp.LaunchIDFromVerifiedClientID), ) } }) t.Run("ByKey", func(t *testing.T) { step := 2 var next []byte for i := 0; i < len(msgs); i += step { resp, err := keeper.LaunchIDFromVerifiedClientIDAll(wctx, request(next, 0, uint64(step), false)) require.NoError(t, err) require.LessOrEqual(t, len(resp.LaunchIDFromVerifiedClientID), step) require.Subset(t, nullify.Fill(msgs), nullify.Fill(resp.LaunchIDFromVerifiedClientID), ) next = resp.Pagination.NextKey } }) t.Run("Total", func(t *testing.T) { resp, err := keeper.LaunchIDFromVerifiedClientIDAll(wctx, request(nil, 0, 0, true)) require.NoError(t, err) require.Equal(t, len(msgs), int(resp.Pagination.Total)) require.ElementsMatch(t, nullify.Fill(msgs), nullify.Fill(resp.LaunchIDFromVerifiedClientID), ) }) t.Run("InvalidRequest", func(t *testing.T) { _, err := keeper.LaunchIDFromVerifiedClientIDAll(wctx, nil) require.ErrorIs(t, err, status.Error(codes.InvalidArgument, "invalid request")) }) }
explode_data.jsonl/22540
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 784 }
[ 2830, 3393, 32067, 915, 3830, 54558, 2959, 915, 2859, 47712, 15479, 1155, 353, 8840, 836, 8, 341, 197, 18861, 11, 5635, 1669, 2506, 83386, 1321, 30314, 287, 66, 1155, 340, 6692, 3773, 1669, 45402, 38968, 31534, 1972, 7502, 340, 21169, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTxnDurations(t *testing.T) { defer leaktest.AfterTest(t)() defer log.Scope(t).Close(t) s, metrics, cleanupFn := setupMetricsTest(t) manual := s.Manual defer cleanupFn() const puts = 10 const incr int64 = 1000 for i := 0; i < puts; i++ { key := roachpb.Key(fmt.Sprintf("key-txn-durations-%d", i)) if err := s.DB.Txn(context.Background(), func(ctx context.Context, txn *kv.Txn) error { if err := txn.Put(ctx, key, []byte("val")); err != nil { return err } manual.Increment(incr) return nil }); err != nil { t.Fatal(err) } } checkTxnMetrics(t, metrics, "txn durations", puts, 0, 0, 0) hist := metrics.Durations // The clock is a bit odd in these tests, so I can't test the mean without // introducing spurious errors or being overly lax. // // TODO(cdo): look into cause of variance. if a, e := hist.TotalCount(), int64(puts); a != e { t.Fatalf("durations %d != expected %d", a, e) } // Metrics lose fidelity, so we can't compare incr directly. if min, thresh := hist.Min(), incr-10; min < thresh { t.Fatalf("min %d < %d", min, thresh) } }
explode_data.jsonl/76897
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 446 }
[ 2830, 3393, 31584, 77, 35, 21449, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 16867, 1487, 77940, 1155, 568, 7925, 1155, 340, 1903, 11, 16734, 11, 21290, 24911, 1669, 6505, 27328, 2271, 1155, 340, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestOrphanReject(t *testing.T) { t.Parallel() harness, outputs, err := newPoolHarness(&chaincfg.MainNetParams) if err != nil { t.Fatalf("unable to create test pool: %v", err) } tc := &testContext{t, harness} // Create a chain of transactions rooted with the first spendable output // provided by the harness. maxOrphans := uint32(harness.txPool.cfg.Policy.MaxOrphanTxs) chainedTxns, err := harness.CreateTxChain(outputs[0], maxOrphans+1) if err != nil { t.Fatalf("unable to create transaction chain: %v", err) } // Ensure orphans are rejected when the allow orphans flag is not set. for _, tx := range chainedTxns[1:] { acceptedTxns, err := harness.txPool.ProcessTransaction(tx, false, false, 0) if err == nil { t.Fatalf("ProcessTransaction: did not fail on orphan "+ "%v when allow orphans flag is false", tx.Hash()) } expectedErr := RuleError{} if reflect.TypeOf(err) != reflect.TypeOf(expectedErr) { t.Fatalf("ProcessTransaction: wrong error got: <%T> %v, "+ "want: <%T>", err, err, expectedErr) } code, extracted := extractRejectCode(err) if !extracted { t.Fatalf("ProcessTransaction: failed to extract reject "+ "code from error %q", err) } if code != protos.RejectDuplicate { t.Fatalf("ProcessTransaction: unexpected reject code "+ "-- got %v, want %v", code, protos.RejectDuplicate) } // Ensure no transactions were reported as accepted. if len(acceptedTxns) != 0 { t.Fatal("ProcessTransaction: reported %d accepted "+ "transactions from failed orphan attempt", len(acceptedTxns)) } // Ensure the transaction is not in the orphan pool, not in the // transaction pool, and not reported as available testPoolMembership(tc, tx, false, false) } }
explode_data.jsonl/53796
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 621 }
[ 2830, 3393, 2195, 9943, 78413, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 9598, 23518, 11, 16275, 11, 1848, 1669, 501, 10551, 74248, 2099, 8819, 14072, 23873, 6954, 4870, 340, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestGocloak_GetUserGroups(t *testing.T) { t.Parallel() cfg := GetConfig(t) client := NewClientWithDebug(t) token := GetAdminToken(t, client) tearDownUser, userID := CreateUser(t, client) defer tearDownUser() tearDownGroup, groupID := CreateGroup(t, client) defer tearDownGroup() err := client.AddUserToGroup( token.AccessToken, cfg.GoCloak.Realm, userID, groupID, ) FailIfErr(t, err, "AddUserToGroup failed") groups, err := client.GetUserGroups( token.AccessToken, cfg.GoCloak.Realm, userID) FailIfErr(t, err, "GetUserGroups failed") FailIf( t, len(groups) == 0, "User is not in the Group") AssertEquals( t, groupID, groups[0].ID) }
explode_data.jsonl/79560
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 289 }
[ 2830, 3393, 38, 509, 385, 585, 13614, 1474, 22173, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 50286, 1669, 2126, 2648, 1155, 340, 25291, 1669, 1532, 2959, 2354, 7939, 1155, 340, 43947, 1669, 2126, 7210, 3323, 1155, 11, 2943, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHostQueueWriteBatches(t *testing.T) { for _, opts := range []Options{ newHostQueueTestOptions().SetUseV2BatchAPIs(false), newHostQueueTestOptions().SetUseV2BatchAPIs(true), } { t.Run(fmt.Sprintf("useV2: %v", opts.UseV2BatchAPIs()), func(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() mockConnPool := NewMockconnectionPool(ctrl) opts := newHostQueueTestOptions() queue := newTestHostQueue(opts) queue.connPool = mockConnPool // Open mockConnPool.EXPECT().Open() queue.Open() assert.Equal(t, statusOpen, queue.status) // Prepare callback for writes var ( results []hostQueueResult wg sync.WaitGroup ) callback := func(r interface{}, err error) { results = append(results, hostQueueResult{r, err}) wg.Done() } // Prepare writes writes := []*writeOperation{ testWriteOp("testNs", "foo", 1.0, 1000, rpc.TimeType_UNIX_SECONDS, callback), testWriteOp("testNs", "bar", 2.0, 2000, rpc.TimeType_UNIX_SECONDS, callback), testWriteOp("testNs", "baz", 3.0, 3000, rpc.TimeType_UNIX_SECONDS, callback), testWriteOp("testNs", "qux", 4.0, 4000, rpc.TimeType_UNIX_SECONDS, callback), } wg.Add(len(writes)) for i, write := range writes[:3] { assert.NoError(t, queue.Enqueue(write)) assert.Equal(t, i+1, queue.Len()) // Sleep some so that we can ensure flushing is not happening until queue is full time.Sleep(20 * time.Millisecond) } // Prepare mocks for flush mockClient := rpc.NewMockTChanNode(ctrl) if opts.UseV2BatchAPIs() { writeBatch := func(ctx thrift.Context, req *rpc.WriteBatchRawV2Request) { for i, write := range writes { assert.Equal(t, req.Elements[i].NameSpace, 0) assert.Equal(t, req.Elements[i].ID, write.request.ID) assert.Equal(t, req.Elements[i].Datapoint, write.request.Datapoint) } } mockClient.EXPECT().WriteBatchRawV2(gomock.Any(), gomock.Any()).Do(writeBatch).Return(nil) } else { writeBatch := func(ctx thrift.Context, req *rpc.WriteBatchRawRequest) { for i, write := range writes { assert.Equal(t, req.Elements[i].ID, write.request.ID) assert.Equal(t, req.Elements[i].Datapoint, write.request.Datapoint) } } mockClient.EXPECT().WriteBatchRaw(gomock.Any(), gomock.Any()).Do(writeBatch).Return(nil) } mockConnPool.EXPECT().NextClient().Return(mockClient, &noopPooledChannel{}, nil) // Final write will flush assert.NoError(t, queue.Enqueue(writes[3])) assert.Equal(t, 0, queue.Len()) // Wait for all writes wg.Wait() // Assert writes successful assert.Equal(t, len(writes), len(results)) for _, result := range results { assert.Nil(t, result.err) } // Close var closeWg sync.WaitGroup closeWg.Add(1) mockConnPool.EXPECT().Close().Do(func() { closeWg.Done() }) queue.Close() closeWg.Wait() }) } }
explode_data.jsonl/54463
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1273 }
[ 2830, 3393, 9296, 7554, 7985, 33, 9118, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 12185, 1669, 2088, 3056, 3798, 515, 197, 8638, 9296, 7554, 2271, 3798, 1005, 1649, 10253, 53, 17, 21074, 7082, 82, 3576, 1326, 197, 8638, 9296, 7554, 22...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAppCalls(t *testing.T) { assert, require := assert.New(t), require.New(t) for i, c := range GetClients() { // get an offset of height to avoid racing and guessing s, err := c.Status() require.Nil(err, "%d: %+v", i, err) // sh is start height or status height sh := s.SyncInfo.LatestBlockHeight // look for the future h := sh + 2 _, err = c.Block(&h) assert.NotNil(err) // no block yet // write something k, v, tx := MakeTxKV() bres, err := c.BroadcastTxCommit(tx) require.Nil(err, "%d: %+v", i, err) require.True(bres.DeliverTx.IsOK()) txh := bres.Height apph := txh + 1 // this is where the tx will be applied to the state // wait before querying if err := client.WaitForHeight(c, apph, nil); err != nil { t.Error(err) } _qres, err := c.ABCIQueryWithOptions("/key", k, client.ABCIQueryOptions{Trusted: true}) qres := _qres.Response if assert.Nil(err) && assert.True(qres.IsOK()) { // assert.Equal(k, data.GetKey()) // only returned for proofs assert.EqualValues(v, qres.Value) } // make sure we can lookup the tx with proof ptx, err := c.Tx(bres.Hash, true) require.Nil(err, "%d: %+v", i, err) assert.EqualValues(txh, ptx.Height) assert.EqualValues(tx, ptx.Tx) // and we can even check the block is added block, err := c.Block(&apph) require.Nil(err, "%d: %+v", i, err) appHash := block.BlockMeta.Header.LastAppHash assert.True(len(appHash) > 0) assert.EqualValues(apph, block.BlockMeta.Header.Height) // now check the results blockResults, err := c.BlockResults(&txh) require.Nil(err, "%d: %+v", i, err) assert.Equal(txh, blockResults.Height) if assert.Equal(1, len(blockResults.Results.DeliverTx)) { // check success code assert.EqualValues(0, blockResults.Results.DeliverTx[0].Code) } // check blockchain info, now that we know there is info info, err := c.BlockchainInfo(apph, apph) require.Nil(err, "%d: %+v", i, err) assert.True(info.LastHeight >= apph) if assert.Equal(1, len(info.BlockMetas)) { lastMeta := info.BlockMetas[0] assert.EqualValues(apph, lastMeta.Header.Height) bMeta := block.BlockMeta assert.Equal(bMeta.Header.LastAppHash, lastMeta.Header.LastAppHash) assert.Equal(bMeta.BlockID, lastMeta.BlockID) } // and get the corresponding commit with the same apphash commit, err := c.Commit(&apph) require.Nil(err, "%d: %+v", i, err) cappHash := commit.Header.LastAppHash assert.Equal(appHash, cappHash) assert.NotNil(commit.Commit) // compare the commits (note Commit(2) has commit from Block(3)) h = apph - 1 commit2, err := c.Commit(&h) require.Nil(err, "%d: %+v", i, err) assert.Equal(block.Block.LastCommit, commit2.Commit) // and we got a proof that works! _pres, err := c.ABCIQueryWithOptions("/key", k, client.ABCIQueryOptions{Trusted: false}) pres := _pres.Response assert.Nil(err) assert.True(pres.IsOK()) } }
explode_data.jsonl/48947
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1179 }
[ 2830, 3393, 2164, 55292, 1155, 353, 8840, 836, 8, 341, 6948, 11, 1373, 1669, 2060, 7121, 1155, 701, 1373, 7121, 1155, 340, 2023, 600, 11, 272, 1669, 2088, 2126, 47174, 368, 1476, 197, 197, 322, 633, 458, 4347, 315, 2608, 311, 5648, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestWriteWithUnitializedStdWriter(t *testing.T) { writer := StdWriter{ Writer: nil, prefix: Stdout, sizeBuf: make([]byte, 4), } n, err := writer.Write([]byte("Something here")) if n != 0 || err == nil { t.Fatalf("Should fail when given an uncomplete or uninitialized StdWriter") } }
explode_data.jsonl/52153
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 114 }
[ 2830, 3393, 7985, 2354, 1806, 1559, 1506, 22748, 6492, 1155, 353, 8840, 836, 8, 341, 38959, 1669, 42517, 6492, 515, 197, 197, 6492, 25, 220, 2092, 345, 197, 3223, 5060, 25, 220, 42517, 411, 345, 197, 13832, 15064, 25, 1281, 10556, 378...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNormalizeError(t *testing.T) { expected := "Typical Error" if s := normalizeErr("-ERR '" + expected + "'"); s != expected { t.Fatalf("Expected '%s', got '%s'", expected, s) } expected = "Trim Surrounding Spaces" if s := normalizeErr("-ERR '" + expected + "' "); s != expected { t.Fatalf("Expected '%s', got '%s'", expected, s) } expected = "Trim Surrounding Spaces Without Quotes" if s := normalizeErr("-ERR " + expected + " "); s != expected { t.Fatalf("Expected '%s', got '%s'", expected, s) } expected = "Error Without Quotes" if s := normalizeErr("-ERR " + expected); s != expected { t.Fatalf("Expected '%s', got '%s'", expected, s) } expected = "Error With Quote Only On Left" if s := normalizeErr("-ERR '" + expected); s != expected { t.Fatalf("Expected '%s', got '%s'", expected, s) } expected = "Error With Quote Only On Right" if s := normalizeErr("-ERR " + expected + "'"); s != expected { t.Fatalf("Expected '%s', got '%s'", expected, s) } }
explode_data.jsonl/44917
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 396 }
[ 2830, 3393, 87824, 1454, 1155, 353, 8840, 836, 8, 341, 42400, 1669, 330, 12834, 938, 4600, 698, 743, 274, 1669, 21694, 7747, 13645, 2650, 7127, 488, 3601, 488, 7178, 5038, 274, 961, 3601, 341, 197, 3244, 30762, 445, 18896, 7677, 82, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestGetConfigTx(t *testing.T) { rl := NewRAMLedger(10) for i := 0; i < 5; i++ { rl.Append(ordererledger.CreateNextBlock(rl, []*cb.Envelope{makeNormalTx(provisional.TestChainID, i)})) } rl.Append(ordererledger.CreateNextBlock(rl, []*cb.Envelope{makeConfigTx(provisional.TestChainID, 5)})) ctx := makeConfigTx(provisional.TestChainID, 6) rl.Append(ordererledger.CreateNextBlock(rl, []*cb.Envelope{ctx})) block := ordererledger.CreateNextBlock(rl, []*cb.Envelope{makeNormalTx(provisional.TestChainID, 7)}) block.Metadata.Metadata[cb.BlockMetadataIndex_LAST_CONFIGURATION] = utils.MarshalOrPanic(&cb.Metadata{Value: utils.MarshalOrPanic(&cb.LastConfig{Index: 7})}) rl.Append(block) pctx := getConfigTx(rl) if !reflect.DeepEqual(ctx, pctx) { t.Fatalf("Did not select most recent config transaction") } }
explode_data.jsonl/32473
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 327 }
[ 2830, 3393, 1949, 2648, 31584, 1155, 353, 8840, 836, 8, 341, 197, 2381, 1669, 1532, 49, 31102, 291, 1389, 7, 16, 15, 340, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 20, 26, 600, 1027, 341, 197, 197, 2381, 8982, 19385, 261, 50704...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetAccountWithdrawalFee(t *testing.T) { resp, err := o.GetAccountWithdrawalFee("") if areTestAPIKeysSet() { if err != nil { t.Error(err) } if len(resp) == 0 { t.Error("Expected fees") } } else if !areTestAPIKeysSet() && err == nil { t.Error("Expecting an error when no keys are set") } }
explode_data.jsonl/30143
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 132 }
[ 2830, 3393, 1949, 7365, 92261, 278, 41941, 1155, 353, 8840, 836, 8, 341, 34653, 11, 1848, 1669, 297, 2234, 7365, 92261, 278, 41941, 31764, 743, 525, 2271, 7082, 8850, 1649, 368, 341, 197, 743, 1848, 961, 2092, 341, 298, 3244, 6141, 39...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestDockerCoordinator_ConcurrentPulls(t *testing.T) { t.Parallel() image := "foo" imageID := uuid.Generate() mapping := map[string]string{imageID: image} // Add a delay so we can get multiple queued up mock := newMockImageClient(mapping, 10*time.Millisecond) config := &dockerCoordinatorConfig{ logger: testlog.Logger(t), cleanup: true, client: mock, removeDelay: 100 * time.Millisecond, } // Create a coordinator coordinator := NewDockerCoordinator(config) id := "" for i := 0; i < 10; i++ { go func() { id, _ = coordinator.PullImage(image, nil, uuid.Generate(), nil) }() } testutil.WaitForResult(func() (bool, error) { p := mock.pulled[image] if p >= 10 { return false, fmt.Errorf("Wrong number of pulls: %d", p) } // Check the reference count if references := coordinator.imageRefCount[id]; len(references) != 10 { return false, fmt.Errorf("Got reference count %d; want %d", len(references), 10) } // Ensure there is no pull future if len(coordinator.pullFutures) != 0 { return false, fmt.Errorf("Pull future exists after pull finished") } return true, nil }, func(err error) { t.Fatalf("err: %v", err) }) }
explode_data.jsonl/63470
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 461 }
[ 2830, 3393, 35, 13659, 64304, 15100, 3231, 36068, 82, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 31426, 1669, 330, 7975, 698, 31426, 915, 1669, 16040, 57582, 741, 2109, 3629, 1669, 2415, 14032, 30953, 90, 1805, 915, 25, 2168,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestMethodHelp(t *testing.T) { t.Parallel() tests := []struct { name string method string reflectType reflect.Type defaults map[int]reflect.Value resultTypes []interface{} help string }{ { name: "command with no args or results", method: "test", reflectType: func() reflect.Type { type s struct{} return reflect.TypeOf((*s)(nil)) }(), help: "test\n\ntest--synopsis\n\n" + "help-arguments:\nhelp-arguments-none\n\n" + "help-result:\nhelp-result-nothing\n", }, { name: "command with no args and one primitive result", method: "test", reflectType: func() reflect.Type { type s struct{} return reflect.TypeOf((*s)(nil)) }(), resultTypes: []interface{}{(*int64)(nil)}, help: "test\n\ntest--synopsis\n\n" + "help-arguments:\nhelp-arguments-none\n\n" + "help-result:\nn (json-type-numeric) test--result0\n", }, { name: "command with no args and two results", method: "test", reflectType: func() reflect.Type { type s struct{} return reflect.TypeOf((*s)(nil)) }(), resultTypes: []interface{}{(*int64)(nil), nil}, help: "test\n\ntest--synopsis\n\n" + "help-arguments:\nhelp-arguments-none\n\n" + "help-result (test--condition0):\nn (json-type-numeric) test--result0\n\n" + "help-result (test--condition1):\nhelp-result-nothing\n", }, { name: "command with primitive arg and no results", method: "test", reflectType: func() reflect.Type { type s struct { Field bool } return reflect.TypeOf((*s)(nil)) }(), help: "test field\n\ntest--synopsis\n\n" + "help-arguments:\n1. field (json-type-bool, help-required) test-field\n\n" + "help-result:\nhelp-result-nothing\n", }, { name: "command with primitive optional and no results", method: "test", reflectType: func() reflect.Type { type s struct { Field *bool } return reflect.TypeOf((*s)(nil)) }(), help: "test (field)\n\ntest--synopsis\n\n" + "help-arguments:\n1. field (json-type-bool, help-optional) test-field\n\n" + "help-result:\nhelp-result-nothing\n", }, } xT := func(key string) string { return key } t.Logf("Running %d tests", len(tests)) for i, test := range tests { help := btcjson.TestMethodHelp(xT, test.reflectType, test.defaults, test.method, test.resultTypes) if help != test.help { t.Errorf("Test #%d (%s) unexpected help - got:\n%v\n"+ "want:\n%v", i, test.name, help, test.help) continue } } }
explode_data.jsonl/14413
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1116 }
[ 2830, 3393, 3523, 12689, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 78216, 1669, 3056, 1235, 341, 197, 11609, 286, 914, 198, 197, 42257, 414, 914, 198, 197, 197, 34913, 929, 8708, 10184, 198, 197, 11940, 82, 262, 2415, 186...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAsyncGetRequest(t *testing.T) { t.Log("Sending GET async request... (expected http code: 200)") req := NewRequest() ch := make(chan *AsyncResponse) for i := 0; i <= 100; i++ { req.AsyncGet("http://httpbin.org/get", ch) } for i := 0; i <= 100; i++ { aRes := <-ch if aRes.Err != nil { t.Error(aRes.Err) } if aRes.Resp.GetStatusCode() != 200 { t.Error( "For", "GET http://httpbin.org/get", "expected", 200, "got", aRes.Resp.GetStatusCode(), ) } } }
explode_data.jsonl/74145
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 224 }
[ 2830, 3393, 6525, 1949, 1900, 1155, 353, 8840, 836, 8, 341, 3244, 5247, 445, 49282, 7890, 3312, 1681, 1112, 320, 7325, 1758, 2038, 25, 220, 17, 15, 15, 95377, 24395, 1669, 1532, 1900, 741, 23049, 1669, 1281, 35190, 353, 6525, 2582, 69...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestAltKeyring_SaveMultisig(t *testing.T) { keyring, err := New(t.Name(), BackendTest, t.TempDir(), nil) require.NoError(t, err) mnemonic1, _, err := keyring.NewMnemonic("key1", English, sdk.FullFundraiserPath, hd.Secp256k1) require.NoError(t, err) mnemonic2, _, err := keyring.NewMnemonic("key2", English, sdk.FullFundraiserPath, hd.Secp256k1) require.NoError(t, err) key := "multi" pub := multisig.NewLegacyAminoPubKey( 2, []types.PubKey{ &secp256k1.PubKey{Key: mnemonic1.GetPubKey().Bytes()}, &secp256k1.PubKey{Key: mnemonic2.GetPubKey().Bytes()}, }, ) info, err := keyring.SaveMultisig(key, pub) require.Nil(t, err) require.Equal(t, pub, info.GetPubKey()) require.Equal(t, key, info.GetName()) list, err := keyring.List() require.NoError(t, err) require.Len(t, list, 3) }
explode_data.jsonl/73459
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 356 }
[ 2830, 3393, 26017, 1592, 12640, 78746, 40404, 285, 343, 1155, 353, 8840, 836, 8, 341, 23634, 12640, 11, 1848, 1669, 1532, 1155, 2967, 1507, 55260, 2271, 11, 259, 65009, 6184, 1507, 2092, 340, 17957, 35699, 1155, 11, 1848, 692, 2109, 707...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewMutation(t *testing.T) { mutation := NewMutation("column", "+=", 1) mutationStr, _ := json.Marshal(mutation) expected := `["column","+=",1]` if string(mutationStr) != expected { t.Error("mutation is not correctly formatted") } }
explode_data.jsonl/37850
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 89 }
[ 2830, 3393, 3564, 53998, 1155, 353, 8840, 836, 8, 341, 2109, 22705, 1669, 1532, 53998, 445, 6229, 497, 6630, 21369, 220, 16, 340, 2109, 22705, 2580, 11, 716, 1669, 2951, 37271, 1255, 22705, 340, 42400, 1669, 1565, 1183, 6229, 57126, 213...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSnapshotDeleteTypeCheck(t *testing.T) { t.Parallel() runner := testenv.NewInProcRunner(t) e := testenv.NewCLITest(t, testenv.RepoFormatNotImportant, runner) defer e.RunAndExpectSuccess(t, "repo", "disconnect") e.RunAndExpectSuccess(t, "repo", "create", "filesystem", "--path", e.RepoDir) lines := e.RunAndExpectSuccess(t, "manifest", "ls") if len(lines) != 2 { t.Fatalf("Expected 2 line global policy + maintenance config output for manifest ls") } for _, line := range lines { fields := strings.Fields(line) manifestID := fields[0] typeField := fields[5] typeVal := strings.TrimPrefix(typeField, "type:") if typeVal == "maintenance" { continue } if typeVal != "policy" { t.Fatalf("Expected global policy manifest on a fresh repo") } e.RunAndExpectFailure(t, "snapshot", "delete", manifestID) } }
explode_data.jsonl/61158
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 319 }
[ 2830, 3393, 15009, 6435, 929, 3973, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 41736, 1669, 1273, 3160, 7121, 641, 24508, 19486, 1155, 340, 7727, 1669, 1273, 3160, 7121, 3140, 952, 477, 1155, 11, 1273, 3160, 2817, 5368,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestPaymentsService_Cancel(t *testing.T) { setup() defer teardown() id := "tr_WDqYK6vllg" _ = tClient.WithAuthenticationValue("test_token") tMux.HandleFunc("/v2/payments/"+id, func(w http.ResponseWriter, r *http.Request) { testHeader(t, r, AuthHeader, "Bearer test_token") testMethod(t, r, "DELETE") if _, ok := r.Header[AuthHeader]; !ok { w.WriteHeader(http.StatusUnauthorized) } w.WriteHeader(http.StatusOK) _, _ = w.Write([]byte(testdata.CancelPaymentResponse)) }) res, err := tClient.Payments.Cancel(id) if err != nil { t.Fatalf("%+v", err) } if res.ID != id { t.Errorf("mismatching info. want %v, got %v", id, res.ID) } }
explode_data.jsonl/16984
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 290 }
[ 2830, 3393, 87646, 1860, 97485, 1155, 353, 8840, 836, 8, 341, 84571, 741, 16867, 49304, 741, 15710, 1669, 330, 376, 2763, 35, 80, 56, 42, 21, 85, 654, 70, 698, 197, 62, 284, 259, 2959, 26124, 19297, 1130, 445, 1944, 6458, 1138, 3244...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPrepareIdempotency(t *testing.T) { t.Parallel() conn := mustConnectString(t, os.Getenv("PGX_TEST_DATABASE")) defer closeConn(t, conn) for i := 0; i < 2; i++ { _, err := conn.Prepare(context.Background(), "test", "select 42::integer") if err != nil { t.Fatalf("%d. Unable to prepare statement: %v", i, err) } var n int32 err = conn.QueryRow(context.Background(), "test").Scan(&n) if err != nil { t.Errorf("%d. Executing prepared statement failed: %v", i, err) } if n != int32(42) { t.Errorf("%d. Prepared statement did not return expected value: %v", i, n) } } _, err := conn.Prepare(context.Background(), "test", "select 'fail'::varchar") if err == nil { t.Fatalf("Prepare statement with same name but different SQL should have failed but it didn't") return } }
explode_data.jsonl/40015
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 316 }
[ 2830, 3393, 50590, 764, 3262, 354, 2251, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 32917, 1669, 1969, 14611, 703, 1155, 11, 2643, 64883, 445, 11383, 55, 11641, 45510, 5455, 16867, 3265, 9701, 1155, 11, 4534, 692, 2023, 600,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestRejectsUnreferencedImagePullSecrets(t *testing.T) { ns := "myns" admit := NewServiceAccount() informerFactory := informers.NewSharedInformerFactory(nil, controller.NoResyncPeriodFunc()) admit.SetExternalKubeInformerFactory(informerFactory) admit.LimitSecretReferences = true admit.RequireAPIToken = false // Add the default service account for the ns into the cache informerFactory.Core().V1().ServiceAccounts().Informer().GetStore().Add(&corev1.ServiceAccount{ ObjectMeta: metav1.ObjectMeta{ Name: DefaultServiceAccountName, Namespace: ns, }, }) pod := &api.Pod{ Spec: api.PodSpec{ ImagePullSecrets: []api.LocalObjectReference{{Name: "foo"}}, }, } attrs := admission.NewAttributesRecord(pod, nil, api.Kind("Pod").WithVersion("version"), ns, "myname", api.Resource("pods").WithVersion("version"), "", admission.Create, false, nil) err := admit.Admit(attrs) if err == nil { t.Errorf("Expected rejection for using a secret the service account does not reference") } }
explode_data.jsonl/61353
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 342 }
[ 2830, 3393, 78413, 82, 1806, 41160, 5767, 1906, 36068, 19773, 82, 1155, 353, 8840, 836, 8, 341, 84041, 1669, 330, 76, 1872, 82, 1837, 98780, 1763, 1669, 1532, 1860, 7365, 741, 17430, 34527, 4153, 1669, 6051, 388, 7121, 16997, 641, 34527...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAssert_String(t *testing.T) { ins := &vm.Assert{Left: "0", Right: "1", Final: "2", Op: "==", Pos: "pos"} assert.Equal(t, "assert($0 == $1)", ins.String()) }
explode_data.jsonl/9147
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 72 }
[ 2830, 3393, 8534, 31777, 1155, 353, 8840, 836, 8, 341, 197, 1330, 1669, 609, 7338, 11711, 90, 5415, 25, 330, 15, 497, 10083, 25, 330, 16, 497, 13023, 25, 330, 17, 497, 10672, 25, 98651, 497, 18876, 25, 330, 966, 16707, 6948, 12808, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestUpdateL7Policy(t *testing.T) { th.SetupHTTP() defer th.TeardownHTTP() HandleL7PolicyUpdateSuccessfully(t) client := fake.ServiceClient() newName := "NewL7PolicyName" redirectURL := "http://www.new-example.com" actual, err := l7policies.Update(client, "8a1412f0-4c32-4257-8b07-af4770b604fd", l7policies.UpdateOpts{ Name: &newName, Action: l7policies.ActionRedirectToURL, RedirectURL: &redirectURL, }).Extract() if err != nil { t.Fatalf("Unexpected Update error: %v", err) } th.CheckDeepEquals(t, L7PolicyUpdated, *actual) }
explode_data.jsonl/79637
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 247 }
[ 2830, 3393, 4289, 43, 22, 13825, 1155, 353, 8840, 836, 8, 341, 70479, 39820, 9230, 741, 16867, 270, 94849, 37496, 9230, 741, 197, 6999, 43, 22, 13825, 4289, 35959, 1155, 692, 25291, 1669, 12418, 13860, 2959, 741, 8638, 675, 1669, 330, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNewMetric(t *testing.T) { name := "test.metric" ts := uint64(1e9) value := 2.0 tags := []string{"tag:value"} metric := newMetric(name, ts, value, tags) assert.Equal(t, "test.metric", *metric.Metric) // Assert timestamp conversion from uint64 ns to float64 s assert.Equal(t, 1.0, *metric.Points[0][0]) // Assert value assert.Equal(t, 2.0, *metric.Points[0][1]) // Assert tags assert.Equal(t, []string{"tag:value"}, metric.Tags) }
explode_data.jsonl/46795
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 186 }
[ 2830, 3393, 3564, 54310, 1155, 353, 8840, 836, 8, 341, 11609, 1669, 330, 1944, 85816, 698, 57441, 1669, 2622, 21, 19, 7, 16, 68, 24, 340, 16309, 1669, 220, 17, 13, 15, 198, 3244, 2032, 1669, 3056, 917, 4913, 4578, 62856, 63159, 2109...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestHostStyleBucketGetBucketLocation(t *testing.T) { s := s3.New(unit.Session) req, _ := s.GetBucketLocationRequest(&s3.GetBucketLocationInput{ Bucket: aws.String("bucket"), }) req.Build() require.NoError(t, req.Error) u, _ := url.Parse(req.HTTPRequest.URL.String()) assert.NotContains(t, u.Host, "bucket") assert.Contains(t, u.Path, "bucket") }
explode_data.jsonl/9972
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 147 }
[ 2830, 3393, 9296, 2323, 36018, 1949, 36018, 4707, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 274, 18, 7121, 24144, 20674, 340, 24395, 11, 716, 1669, 274, 2234, 36018, 4707, 1900, 2099, 82, 18, 2234, 36018, 4707, 2505, 515, 197, 12791, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAuthGet(t *testing.T) { t.Parallel() ab := authboss.New() responder := &mocks.Responder{} ab.Config.Core.Responder = responder a := &Auth{ab} r := mocks.Request("GET") r.URL.RawQuery = "redir=/redirectpage" if err := a.LoginGet(nil, r); err != nil { t.Error(err) } if responder.Page != PageLogin { t.Error("wanted login page, got:", responder.Page) } if responder.Status != http.StatusOK { t.Error("wanted ok status, got:", responder.Status) } if got := responder.Data[authboss.FormValueRedirect]; got != "/redirectpage" { t.Error("redirect page was wrong:", got) } }
explode_data.jsonl/3465
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 238 }
[ 2830, 3393, 5087, 1949, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 197, 370, 1669, 4166, 33314, 7121, 741, 10202, 20328, 1669, 609, 16712, 82, 8377, 20328, 16094, 197, 370, 10753, 12777, 8377, 20328, 284, 64034, 271, 11323, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestNewResourceFromFilenameSubPathInBaseURL(t *testing.T) { c := qt.New(t) spec := newTestResourceSpec(specDescriptor{c: c, baseURL: "https://example.com/docs"}) writeSource(t, spec.Fs, "content/a/b/logo.png", "image") bfs := afero.NewBasePathFs(spec.Fs.Source, "content") fmt.Println() r, err := spec.New(ResourceSourceDescriptor{Fs: bfs, SourceFilename: filepath.FromSlash("a/b/logo.png")}) c.Assert(err, qt.IsNil) c.Assert(r, qt.Not(qt.IsNil)) c.Assert(r.ResourceType(), qt.Equals, "image") c.Assert(r.RelPermalink(), qt.Equals, "/docs/a/b/logo.png") c.Assert(r.Permalink(), qt.Equals, "https://example.com/docs/a/b/logo.png") }
explode_data.jsonl/75611
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 285 }
[ 2830, 3393, 3564, 4783, 3830, 20759, 3136, 1820, 641, 3978, 3144, 1155, 353, 8840, 836, 8, 341, 1444, 1669, 38949, 7121, 1155, 340, 98100, 1669, 501, 2271, 4783, 8327, 38209, 11709, 90, 66, 25, 272, 11, 56741, 25, 330, 2428, 1110, 868...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSearch__fedachRoutingNumberOnlySearchRequest(t *testing.T) { u, _ := url.Parse("https://moov.io/fed/ach/search?routingNumber=044112187") req := readFEDSearchRequest(u) if req.RoutingNumber != "044112187" { t.Errorf("req.RoutingNUmber=%s", req.RoutingNumber) } if !req.routingNumberOnly() { t.Errorf("req is not routing number only") } }
explode_data.jsonl/71086
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 139 }
[ 2830, 3393, 5890, 563, 51123, 610, 24701, 2833, 7308, 5890, 1900, 1155, 353, 8840, 836, 8, 341, 10676, 11, 716, 1669, 2515, 8937, 445, 2428, 1110, 6355, 859, 4245, 6663, 291, 14, 610, 23167, 30, 73320, 2833, 28, 15, 19, 19, 16, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestMigrationType_String(t *testing.T) { if Upgrade.String() != UpgradeString { t.Error("expected upgrade type") } if Downgrade.String() != DowngradeString { t.Error("expected downgrade type") } }
explode_data.jsonl/41237
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 20168, 929, 31777, 1155, 353, 8840, 836, 8, 1476, 743, 40713, 6431, 368, 961, 40713, 703, 341, 197, 3244, 6141, 445, 7325, 13910, 943, 1138, 197, 630, 743, 6285, 6937, 6431, 368, 961, 6285, 6937, 703, 341, 197, 3244, 6141,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
3
func TestFindRegistryRoot(t *testing.T) { tests := []struct { name string fs afero.Fs dir string want string }{ { name: "EmptyFilesystem", fs: afero.NewMemMapFs(), dir: "/", want: "/", }, { name: "NoRegistrySubDir", fs: func() afero.Fs { fs := afero.NewMemMapFs() fs.MkdirAll("ext-dir", 0755) afero.WriteFile(fs, "ext-dir/b.txt", []byte("file b"), 0644) return fs }(), dir: "ext-dir", want: "ext-dir", }, { // registry root actually exists as a subdir underneath the requested dir, this subdir // should be returned. This is a common case for when the registry root has been copied // to the requested location. name: "RegistrySubDir", fs: func() afero.Fs { fs := afero.NewMemMapFs() fs.MkdirAll("/ext-dir/.registry", 0755) return fs }(), dir: "/ext-dir", want: "/ext-dir/.registry", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got := findRegistryRoot(tt.fs, tt.dir) if diff := cmp.Diff(got, tt.want); diff != "" { t.Errorf("findRegistryRoot() = %v, want %v\n%s", got, tt.want, diff) } }) } }
explode_data.jsonl/74160
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 534 }
[ 2830, 3393, 9885, 15603, 8439, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, 53584, 256, 264, 802, 78, 991, 82, 198, 197, 48532, 220, 914, 198, 197, 50780, 914, 198, 197, 59403, 197, 197, 515...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestReadAndUnmarshalNonStructBody(t *testing.T) { gateway, err := benchGateway.CreateGateway( defaultTestConfig, &testGateway.Options{ LogWhitelist: map[string]bool{ "Could not read response body": true, }, KnownHTTPBackends: []string{"bar", "contacts", "google-now"}, KnownTChannelBackends: []string{"baz"}, ConfigFiles: util.DefaultConfigFiles("example-gateway"), }, exampleGateway.CreateGateway, ) if !assert.NoError(t, err) { return } defer gateway.Close() bgateway := gateway.(*benchGateway.BenchGateway) fakeEcho := func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(200) _, err := w.Write([]byte(`"foo"`)) assert.NoError(t, err) } bgateway.HTTPBackends()["bar"].HandleFunc("POST", "/bar/echo", fakeEcho) addr := bgateway.HTTPBackends()["bar"].RealAddr baseURL := "http://" + addr client := zanzibar.NewHTTPClientContext( bgateway.ActualGateway.Logger, bgateway.ActualGateway.ContextMetrics, jsonwrapper.NewDefaultJSONWrapper(), "bar", map[string]string{ "echo": "bar::echo", }, baseURL, map[string]string{}, time.Second, true, ) ctx := context.Background() req := zanzibar.NewClientHTTPRequest(ctx, "bar", "echo", "bar::echo", client) err = req.WriteJSON("POST", baseURL+"/bar/echo", nil, myJson{}) assert.NoError(t, err) res, err := req.Do() assert.NoError(t, err) var resp string assert.NoError(t, res.ReadAndUnmarshalBody(&resp)) assert.Equal(t, "foo", resp) logs := bgateway.AllLogs() assert.Len(t, logs["Finished an outgoing client HTTP request"], 1) }
explode_data.jsonl/58105
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 634 }
[ 2830, 3393, 4418, 3036, 1806, 27121, 8121, 9422, 5444, 1155, 353, 8840, 836, 8, 341, 3174, 12043, 11, 1848, 1669, 13425, 40709, 7251, 40709, 1006, 197, 11940, 2271, 2648, 345, 197, 197, 5, 1944, 40709, 22179, 515, 298, 24201, 1639, 5764...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSensorReload(t *testing.T) { sensor := setupSensor() go sensor.Start(rulesPreReload) waitStartSensor(t, sensor, rulesPreReload, 10) for ruleID := range rulesPreReload { if _, ok := sensor.ruleInformers[ruleID]; !ok { t.Errorf("Rule %s should be added", ruleID) } } rule1StartTime := sensor.ruleInformers["test-rule-0"].InformerStartTime sensor.ReloadRules(rulesReload) waitStartSensor(t, sensor, rulesReload, 10) if len(rulesReload) != len(sensor.ruleInformers) { t.Error("Rules not reloaded properly") } if _, ok := sensor.ruleInformers["test-rule-2"]; ok { t.Error("test-rule-2 should be removed") } if rule1Inf, ok := sensor.ruleInformers["test-rule-1"]; !ok { t.Error("test-rule-1 should be added") } else { if rule1Inf.Rule.EventTypes[0] != rules.ADDED { t.Error("test-rule-1 has not been properly updated") } if rule1Inf.Rule.EventTypes[1] != rules.MODIFIED { t.Error("test-rule-1 has not been properly updated") } } if rule1StartTime != sensor.ruleInformers["test-rule-0"].InformerStartTime { t.Error("test-rule-0 should not be touched") } time.Sleep(1 * time.Second) configMap := &v1.ConfigMap{ ObjectMeta: metav1.ObjectMeta{ Name: "test-configmap", Namespace: "default", }, } _, err := kubernetes.NewForConfigOrDie(sensor.KubeConfig).CoreV1().ConfigMaps("default").Create(context.Background(), configMap, metav1.CreateOptions{}) if err != nil { t.Fatalf("Failed to create configmap: %s", err) } defer func() { kubernetes.NewForConfigOrDie(sensor.KubeConfig).CoreV1().ConfigMaps("default").Delete(context.Background(), configMap.Name, metav1.DeleteOptions{}) }() switch err := checkIfObjectExistsInQueue(15, sensor, configMap, rules.ADDED); err { case errNotFound: t.Error("Configmap should be added to queue") case errTimeout: t.Error("Timeout waiting for configmap to be added to queue") } pod := &v1.Pod{ ObjectMeta: metav1.ObjectMeta{ Name: "test-pod", Namespace: "default", }, Spec: v1.PodSpec{ Containers: []v1.Container{ { Name: "test-container", Image: "test-image", }, }, }, } _, err = kubernetes.NewForConfigOrDie(sensor.KubeConfig).CoreV1().Pods("default").Create(context.Background(), pod, metav1.CreateOptions{}) if err != nil { t.Fatalf("Failed to create pod: %s", err) } defer func() { kubernetes.NewForConfigOrDie(sensor.KubeConfig).CoreV1().Pods("default").Delete(context.Background(), pod.Name, metav1.DeleteOptions{}) }() switch err := checkIfObjectExistsInQueue(15, sensor, pod, rules.ADDED); err { case errNotFound: t.Error("Pod should be added to queue") case errTimeout: t.Error("Timeout waiting for pod to be added to queue") } }
explode_data.jsonl/66713
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1054 }
[ 2830, 3393, 30752, 50035, 1155, 353, 8840, 836, 8, 341, 1903, 3805, 1669, 6505, 30752, 741, 30680, 12002, 12101, 90094, 4703, 50035, 340, 48750, 3479, 30752, 1155, 11, 12002, 11, 5601, 4703, 50035, 11, 220, 16, 15, 692, 2023, 5912, 915,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPKIOperation(t *testing.T) { server, _, teardown := newServer(t) defer teardown() pkcsreq := loadTestFile(t, "../scep/testdata/PKCSReq.der") body := bytes.NewReader(pkcsreq) url := server.URL + "/scep?operation=PKIOperation" resp, err := http.Post(url, "", body) if err != nil { t.Fatal(err) } if resp.StatusCode != http.StatusOK { t.Error("expected", http.StatusOK, "got", resp.StatusCode) } }
explode_data.jsonl/70655
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 176 }
[ 2830, 3393, 22242, 3810, 91774, 1155, 353, 8840, 836, 8, 341, 41057, 11, 8358, 49304, 1669, 501, 5475, 1155, 340, 16867, 49304, 741, 3223, 74, 4837, 2958, 1669, 2795, 2271, 1703, 1155, 11, 7005, 82, 31652, 12697, 691, 16341, 42, 6412, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCreateLogger(t *testing.T) { tests := []struct { name string options options expectedLevel Level }{ { name: "Production", options: options{ name: "my-service", version: "0.1.0", tags: map[string]string{ "environment": "testing", }, loggerLevel: "warn", }, expectedLevel: LevelWarn, }, { name: "LogLevelDebug", options: options{ name: "my-service", loggerLevel: "debug", }, expectedLevel: LevelDebug, }, { name: "LogLevelInfo", options: options{ name: "my-service", loggerLevel: "info", }, expectedLevel: LevelInfo, }, { name: "LogLevelWarn", options: options{ name: "my-service", loggerLevel: "warn", }, expectedLevel: LevelWarn, }, { name: "LogLevelError", options: options{ name: "my-service", loggerLevel: "error", }, expectedLevel: LevelError, }, { name: "LogLevelNone", options: options{ name: "my-service", loggerLevel: "none", }, expectedLevel: LevelNone, }, { name: "InvalidLogLevel", options: options{ name: "my-service", loggerEnabled: true, loggerLevel: "invalid", }, expectedLevel: LevelNone, }, } for _, tc := range tests { t.Run(tc.name, func(T *testing.T) { logger, close := createLogger(tc.options) defer close(context.Background()) assert.NotNil(t, logger) assert.NotNil(t, close) assert.Equal(t, tc.expectedLevel, logger.Level()) }) } }
explode_data.jsonl/15682
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 742 }
[ 2830, 3393, 4021, 7395, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 688, 914, 198, 197, 35500, 981, 2606, 198, 197, 42400, 4449, 9395, 198, 197, 59403, 197, 197, 515, 298, 11609, 25, 330, 44967, 756, 298, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestAuthConfigsToAuthFile(t *testing.T) { for _, tc := range []struct { name string server string shouldErr bool expectedContains string }{ { name: "empty auth configs", server: "", shouldErr: false, expectedContains: "{}", }, { name: "registry with a namespace prefix", server: "my-registry.local/username", shouldErr: false, expectedContains: `"my-registry.local/username":`, }, { name: "URLs are interpreted as full registries", server: "http://my-registry.local/username", shouldErr: false, expectedContains: `"my-registry.local":`, }, { name: "the old-style docker registry URL is normalized", server: "http://index.docker.io/v1/", shouldErr: false, expectedContains: `"docker.io":`, }, { name: "docker.io vendor namespace", server: "docker.io/vendor", shouldErr: false, expectedContains: `"docker.io/vendor":`, }, } { configs := map[string]types.DockerAuthConfig{} if tc.server != "" { configs[tc.server] = types.DockerAuthConfig{} } filePath, err := authConfigsToAuthFile(configs) if tc.shouldErr { assert.Error(t, err) assert.Empty(t, filePath) } else { assert.NoError(t, err) content, err := ioutil.ReadFile(filePath) require.NoError(t, err) assert.Contains(t, string(content), tc.expectedContains) os.Remove(filePath) } } }
explode_data.jsonl/33434
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 747 }
[ 2830, 3393, 5087, 84905, 1249, 5087, 1703, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 17130, 1669, 2088, 3056, 1235, 341, 197, 11609, 1797, 914, 198, 197, 41057, 1843, 914, 198, 197, 197, 5445, 7747, 286, 1807, 198, 197, 42400, 23805, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestIsValidIPv6Address(t *testing.T) { goodValues := []string{ "2001:4860:4860::8888", "2a00:79e0:2:0:f1c3:e797:93c1:df80", "2001:0db8:85a3:0000:0000:8a2e:0370:7334", "::fff:1.1.1.1", "::1", "::", } for _, val := range goodValues { if msgs := IsValidIPv6Address(field.NewPath(""), val); len(msgs) != 0 { t.Errorf("expected %q to be valid IPv6 address: %v", val, msgs) } } badValues := []string{ "1.1.1.1", "1.1.1.01", "255.0.0.1", "1.0.0.0", "0.0.0.0", "[2001:db8:0:1]:80", "myhost.mydomain", "2001:0db8:85a3:0000:0000:8a2e:0370:7334:2001:0db8:85a3:0000:0000:8a2e:0370:7334", "-1.0.0.0", "[2001:db8:0:1]", "a", } for _, val := range badValues { if msgs := IsValidIPv6Address(field.NewPath(""), val); len(msgs) == 0 { t.Errorf("expected %q to be invalid IPv6 address", val) } } }
explode_data.jsonl/11827
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 458 }
[ 2830, 3393, 55470, 58056, 21, 4286, 1155, 353, 8840, 836, 8, 341, 3174, 1386, 6227, 1669, 3056, 917, 515, 197, 197, 1, 17, 15, 15, 16, 25, 19, 23, 21, 15, 25, 19, 23, 21, 15, 486, 23, 23, 23, 23, 756, 197, 197, 1, 17, 64, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestOverlap(t *testing.T) { for _, c := range testVectors { s, _ := NewUnauthenticatedCipher(hexDecode(c.key), hexDecode(c.nonce)) data := hexDecode(c.input) s.XORKeyStream(data, data) got := hex.EncodeToString(data) if got != c.output { t.Errorf("length=%v: got %#v, want %#v", len(data), got, c.output) } } }
explode_data.jsonl/80522
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 150 }
[ 2830, 3393, 82171, 1155, 353, 8840, 836, 8, 341, 2023, 8358, 272, 1669, 2088, 1273, 84744, 341, 197, 1903, 11, 716, 1669, 1532, 1806, 57707, 79460, 44660, 32564, 1337, 4735, 701, 12371, 32564, 1337, 1253, 13184, 1171, 197, 8924, 1669, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestUpload(t *testing.T) { th.SetupHTTP() defer th.TeardownHTTP() HandlePutImageDataSuccessfully(t) err := imagedata.Upload( fakeclient.ServiceClient(), "da3b75d9-3f4a-40e7-8a2c-bfab23927dea", readSeekerOfBytes([]byte{5, 3, 7, 24})).ExtractErr() th.AssertNoErr(t, err) }
explode_data.jsonl/7108
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 133 }
[ 2830, 3393, 13844, 1155, 353, 8840, 836, 8, 341, 70479, 39820, 9230, 741, 16867, 270, 94849, 37496, 9230, 2822, 197, 6999, 19103, 90583, 35959, 1155, 692, 9859, 1669, 732, 3279, 459, 86597, 1006, 197, 1166, 726, 2972, 13860, 2959, 3148, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNonexistentFile(t *testing.T) { const nonFile = "nonexistentFile.xyz" output := runCommand(nonFile, 1000) if !strings.Contains(output, "does not exist") || !notPossible(output) { t.Errorf("Incorrect outptut for nonexistent file") } }
explode_data.jsonl/66002
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 101 }
[ 2830, 3393, 8121, 64085, 1703, 1155, 353, 8840, 836, 8, 341, 262, 733, 2477, 1703, 284, 330, 6280, 64085, 1703, 55699, 698, 262, 2550, 1669, 1598, 4062, 29191, 1703, 11, 220, 16, 15, 15, 15, 340, 262, 421, 753, 18594, 11545, 11057, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNewDNSProviderConfig(t *testing.T) { testCases := []struct { desc string secretAPIKey string apiKey string expected string }{ { desc: "success", secretAPIKey: "secret", apiKey: "key", }, { desc: "missing secret API key", apiKey: "key", expected: "porkbun: some credentials information are missing", }, { desc: "missing API key", secretAPIKey: "secret", expected: "porkbun: some credentials information are missing", }, { desc: "missing all credentials", expected: "porkbun: some credentials information are missing", }, } for _, test := range testCases { t.Run(test.desc, func(t *testing.T) { config := NewDefaultConfig() config.SecretAPIKey = test.secretAPIKey config.APIKey = test.apiKey p, err := NewDNSProviderConfig(config) if test.expected == "" { require.NoError(t, err) require.NotNil(t, p) require.NotNil(t, p.config) } else { require.EqualError(t, err, test.expected) } }) } }
explode_data.jsonl/69082
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 461 }
[ 2830, 3393, 3564, 61088, 5179, 2648, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 41653, 260, 914, 198, 197, 197, 20474, 7082, 1592, 914, 198, 197, 54299, 1592, 981, 914, 198, 197, 42400, 257, 914, 198, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestExportNestedResourceValueFromScript(t *testing.T) { t.Parallel() barResourceType := &cadence.ResourceType{ TypeID: "S.test.Bar", Identifier: "Bar", Fields: []cadence.Field{ { Identifier: "uuid", Type: cadence.UInt64Type{}, }, { Identifier: "x", Type: cadence.IntType{}, }, }, } fooResourceType := &cadence.ResourceType{ TypeID: "S.test.Foo", Identifier: "Foo", Fields: []cadence.Field{ { Identifier: "uuid", Type: cadence.UInt64Type{}, }, { Identifier: "bar", Type: barResourceType, }, }, } script := ` access(all) resource Bar { access(all) let x: Int init(x: Int) { self.x = x } } access(all) resource Foo { access(all) let bar: @Bar init(bar: @Bar) { self.bar <- bar } destroy() { destroy self.bar } } access(all) fun main(): @Foo { return <- create Foo(bar: <- create Bar(x: 42)) } ` actual := exportValueFromScript(t, script) expected := cadence.NewResource([]cadence.Value{ cadence.NewUInt64(0), cadence.NewResource([]cadence.Value{ cadence.NewUInt64(0), cadence.NewInt(42), }).WithType(barResourceType), }).WithType(fooResourceType) assert.Equal(t, expected, actual) }
explode_data.jsonl/4629
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 749 }
[ 2830, 3393, 16894, 71986, 4783, 1130, 3830, 5910, 1155, 353, 8840, 836, 8, 1476, 3244, 41288, 7957, 2822, 90709, 4783, 929, 1669, 609, 34455, 763, 20766, 929, 515, 197, 27725, 915, 25, 257, 330, 50, 5958, 40520, 756, 197, 197, 8714, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBuildMultiStageParentConfig(t *testing.T) { skip.If(t, versions.LessThan(testEnv.DaemonAPIVersion(), "1.35"), "broken in earlier versions") skip.If(t, testEnv.DaemonInfo.OSType == "windows", "FIXME") dockerfile := ` FROM busybox AS stage0 ENV WHO=parent WORKDIR /foo FROM stage0 ENV WHO=sibling1 WORKDIR sub1 FROM stage0 WORKDIR sub2 ` ctx := context.Background() source := fakecontext.New(t, "", fakecontext.WithDockerfile(dockerfile)) defer source.Close() apiclient := testEnv.APIClient() resp, err := apiclient.ImageBuild(ctx, source.AsTarReader(t), types.ImageBuildOptions{ Remove: true, ForceRemove: true, Tags: []string{"build1"}, }) assert.NilError(t, err) _, err = io.Copy(ioutil.Discard, resp.Body) resp.Body.Close() assert.NilError(t, err) image, _, err := apiclient.ImageInspectWithRaw(ctx, "build1") assert.NilError(t, err) expected := "/foo/sub2" if testEnv.DaemonInfo.OSType == "windows" { expected = `C:\foo\sub2` } assert.Check(t, is.Equal(expected, image.Config.WorkingDir)) assert.Check(t, is.Contains(image.Config.Env, "WHO=parent")) }
explode_data.jsonl/82581
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 468 }
[ 2830, 3393, 11066, 20358, 19398, 8387, 2648, 1155, 353, 8840, 836, 8, 341, 1903, 13389, 32901, 1155, 11, 10795, 1214, 433, 26067, 8623, 14359, 909, 64, 7291, 7082, 5637, 1507, 330, 16, 13, 18, 20, 3975, 330, 48909, 304, 6788, 10795, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestUserCount(t *testing.T) { controller := gomock.NewController(t) // restore the default prometheus registerer // when the unit test is complete. snapshot := prometheus.DefaultRegisterer defer func() { prometheus.DefaultRegisterer = snapshot controller.Finish() }() // creates a blank registry registry := prometheus.NewRegistry() prometheus.DefaultRegisterer = registry // x2 repository count count := int64(5) store := mock.NewMockUserStore(controller) store.EXPECT().Count(gomock.Any()).Return(count, nil) UserCount(store) metrics, err := registry.Gather() if err != nil { t.Error(err) return } if want, got := len(metrics), 1; want != got { t.Errorf("Expect registered metric") return } metric := metrics[0] if want, got := metric.GetName(), "drone_user_count"; want != got { t.Errorf("Expect metric name %s, got %s", want, got) } if want, got := metric.Metric[0].Gauge.GetValue(), float64(count); want != got { t.Errorf("Expect metric value %f, got %f", want, got) } }
explode_data.jsonl/9820
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 365 }
[ 2830, 3393, 1474, 2507, 1155, 353, 8840, 836, 8, 341, 61615, 1669, 342, 316, 1176, 7121, 2051, 1155, 692, 197, 322, 14952, 279, 1638, 2706, 39705, 4161, 261, 198, 197, 322, 979, 279, 4982, 1273, 374, 4583, 624, 1903, 9601, 1669, 2706,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClhCreateVMWithInitrd(t *testing.T) { assert := assert.New(t) clhConfig, err := newClhConfig() assert.NoError(err) clhConfig.ImagePath = "" clhConfig.InitrdPath = testClhInitrdPath store, err := persist.GetDriver() assert.NoError(err) clhConfig.VMStorePath = store.RunVMStoragePath() clhConfig.RunStorePath = store.RunStoragePath() network, err := NewNetwork() assert.NoError(err) clh := &cloudHypervisor{ config: clhConfig, } sandbox := &Sandbox{ ctx: context.Background(), id: "testSandbox", config: &SandboxConfig{ HypervisorConfig: clhConfig, }, } err = clh.CreateVM(context.Background(), sandbox.id, network, &sandbox.config.HypervisorConfig) assert.NoError(err) assert.Exactly(clhConfig, clh.config) }
explode_data.jsonl/68500
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 282 }
[ 2830, 3393, 5066, 71, 4021, 11187, 2354, 3803, 6498, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 7121, 1155, 692, 39407, 71, 2648, 11, 1848, 1669, 501, 5066, 71, 2648, 741, 6948, 35699, 3964, 340, 39407, 71, 2648, 7528, 1820, 284,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPassthroughConn(t *testing.T) { SetLogger(testLogger{t}) connector, err := NewConnector(makeConnStr(t).String()) if err != nil { t.Error(err) } ctx, cancel := context.WithTimeout(context.Background(), time.Second*30) defer cancel() toconn, err := dialConnection(ctx, connector, connector.params) if err != nil { t.Error(err) } outbuf := newTdsBuffer(connector.params.packetSize, toconn) handshakeConn := tlsHandshakeConn{buf: outbuf} passthrough := passthroughConn{c: &handshakeConn} t.Run(`set deadline`, func(t *testing.T) { defer assertPanic(t, false) deadline := time.Now().Add(time.Millisecond * 100) err := passthrough.SetDeadline(deadline) if err != nil { t.Fatalf(`SetDeadline should return nil`) } }) t.Run(`set read deadline`, func(t *testing.T) { defer assertPanic(t, false) deadline := time.Now().Add(time.Minute) err := passthrough.SetReadDeadline(deadline) if err != nil { t.Fatalf(`SetReadDeadline should return nil`) } }) t.Run(`set write deadline`, func(t *testing.T) { defer assertPanic(t, false) deadline := time.Now().Add(time.Minute) err := passthrough.SetWriteDeadline(deadline) if err != nil { t.Fatalf(`SetWriteDeadline should return nil`) } }) t.Run(`get remote addr`, func(t *testing.T) { addr := passthrough.RemoteAddr() if addr != nil { t.Fatalf(`RemoteAddr should return nil`) } }) }
explode_data.jsonl/67692
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 557 }
[ 2830, 3393, 70911, 86901, 9701, 1155, 353, 8840, 836, 8, 341, 22212, 7395, 8623, 7395, 90, 83, 8824, 32917, 1256, 11, 1848, 1669, 1532, 35954, 36944, 9701, 2580, 1155, 568, 703, 2398, 743, 1848, 961, 2092, 341, 197, 3244, 6141, 3964, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCreateAppProtectLogConfEx(t *testing.T) { t.Parallel() tests := []struct { logConf *unstructured.Unstructured expectedLogConfEx *LogConfEx wantErr bool msg string }{ { logConf: &unstructured.Unstructured{ Object: map[string]interface{}{ "spec": map[string]interface{}{ "content": map[string]interface{}{}, "filter": map[string]interface{}{}, }, }, }, expectedLogConfEx: &LogConfEx{ IsValid: true, ErrorMsg: "", }, wantErr: false, msg: "Valid LogConf", }, { logConf: &unstructured.Unstructured{ Object: map[string]interface{}{ "spec": map[string]interface{}{ "content": map[string]interface{}{}, }, }, }, expectedLogConfEx: &LogConfEx{ IsValid: false, ErrorMsg: failedValidationErrorMsg, }, wantErr: true, msg: "Invalid LogConf", }, } for _, test := range tests { test.expectedLogConfEx.Obj = test.logConf policyEx, err := createAppProtectLogConfEx(test.logConf) if (err != nil) != test.wantErr { t.Errorf("createAppProtectLogConfEx() returned %v, for the case of %s", err, test.msg) } if diff := cmp.Diff(test.expectedLogConfEx, policyEx); diff != "" { t.Errorf("createAppProtectLogConfEx() %q returned unexpected result (-want +got):\n%s", test.msg, diff) } } }
explode_data.jsonl/45875
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 609 }
[ 2830, 3393, 4021, 2164, 61547, 2201, 15578, 840, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 78216, 1669, 3056, 1235, 341, 197, 6725, 15578, 1843, 353, 359, 51143, 10616, 51143, 198, 197, 42400, 2201, 15578, 840, 353, 2201, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestMockUser_Userinfo(t *testing.T) { testUser := mockoidc.DefaultUser() testCases := map[string]struct { Scope []string ExpectedEmail string ExpectedPhone string ExpectedGroups []string }{ "all scopes": { Scope: []string{"openid", "email", "profile", "groups"}, ExpectedEmail: testUser.Email, ExpectedPhone: testUser.Phone, ExpectedGroups: testUser.Groups, }, "missing groups scope": { Scope: []string{"openid", "email", "profile"}, ExpectedEmail: testUser.Email, ExpectedPhone: testUser.Phone, ExpectedGroups: nil, }, "missing profile scope": { Scope: []string{"openid", "email", "groups"}, ExpectedEmail: testUser.Email, ExpectedPhone: "", ExpectedGroups: testUser.Groups, }, "missing email scope": { Scope: []string{"openid", "profile", "groups"}, ExpectedEmail: "", ExpectedPhone: testUser.Phone, ExpectedGroups: testUser.Groups, }, } for name, tc := range testCases { t.Run(name, func(t *testing.T) { payload, err := testUser.Userinfo(tc.Scope) assert.NoError(t, err) data := make(map[string]interface{}) err = json.Unmarshal(payload, &data) assert.NoError(t, err) if tc.ExpectedEmail == "" { assert.Nil(t, data["email"]) } else { assert.Equal(t, tc.ExpectedEmail, data["email"]) } if tc.ExpectedPhone == "" { assert.Nil(t, data["phone_number"]) } else { assert.Equal(t, tc.ExpectedPhone, data["phone_number"]) } var groups []string if data["groups"] != nil { for _, group := range data["groups"].([]interface{}) { groups = append(groups, group.(string)) } } assert.Equal(t, tc.ExpectedGroups, groups) }) } }
explode_data.jsonl/70847
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 752 }
[ 2830, 3393, 11571, 1474, 31339, 2733, 1155, 353, 8840, 836, 8, 341, 18185, 1474, 1669, 7860, 588, 66, 13275, 1474, 741, 18185, 37302, 1669, 2415, 14032, 60, 1235, 341, 197, 7568, 2417, 688, 3056, 917, 198, 197, 197, 18896, 4781, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestLeftMoveItDoesntMerge(t *testing.T) { checkMoveTileDosentMerge(moveLeft, [][]int{[]int{0, 0, 2}, []int{3, 3, 4}}, []Tile{Tile{x: 0, y: 0, value: 2}, Tile{x: 0, y: 3, value: 4}}, t) }
explode_data.jsonl/59013
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 91 }
[ 2830, 3393, 5415, 9860, 2132, 21468, 406, 52096, 1155, 353, 8840, 836, 8, 341, 25157, 9860, 15628, 84343, 306, 52096, 34081, 5415, 11, 52931, 396, 90, 1294, 396, 90, 15, 11, 220, 15, 11, 220, 17, 2137, 3056, 396, 90, 18, 11, 220, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGetMaterialCount(t *testing.T) { resp := &http.Response{ StatusCode: http.StatusOK, Body: io.NopCloser(bytes.NewReader([]byte(`{ "voice_count": 1, "video_count": 2, "image_count": 3, "news_count": 4 }`))), } ctrl := gomock.NewController(t) defer ctrl.Finish() client := mock.NewMockHTTPClient(ctrl) client.EXPECT().Do(gomock.AssignableToTypeOf(context.TODO()), http.MethodGet, "https://api.weixin.qq.com/cgi-bin/material/get_materialcount?access_token=ACCESS_TOKEN", nil).Return(resp, nil) oa := New("APPID", "APPSECRET") oa.SetClient(wx.WithHTTPClient(client)) result := new(ResultMaterialCount) err := oa.Do(context.TODO(), "ACCESS_TOKEN", GetMaterialCount(result)) assert.Nil(t, err) assert.Equal(t, &ResultMaterialCount{ VoiceCount: 1, VideoCount: 2, ImageCount: 3, NewsCount: 4, }, result) }
explode_data.jsonl/20875
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 340 }
[ 2830, 3393, 1949, 13415, 2507, 1155, 353, 8840, 836, 8, 341, 34653, 1669, 609, 1254, 12574, 515, 197, 197, 15872, 25, 1758, 52989, 345, 197, 197, 5444, 25, 6399, 2067, 453, 51236, 799, 23158, 68587, 10556, 3782, 5809, 515, 197, 1, 986...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_GoSwaggerTestCases(t *testing.T) { if !enableGoSwaggerTests { skipNotifyGoSwagger(t) t.SkipNow() } // A list of test cases which fail on "swagger validate" at spec load time expectedLoadFailures := map[string]bool{ "fixtures/go-swagger/bugs/342/fixture-342.yaml": false, "fixtures/go-swagger/bugs/342/fixture-342-2.yaml": true, } // A list of test cases which fail on "swagger validate" expectedFailures := map[string]bool{ "fixtures/go-swagger/bugs/1010/swagger.yml": true, "fixtures/go-swagger/bugs/103/swagger.json": true, "fixtures/go-swagger/bugs/106/swagger.json": true, "fixtures/go-swagger/bugs/1171/swagger.yaml": true, "fixtures/go-swagger/bugs/1238/swagger.yaml": true, "fixtures/go-swagger/bugs/1289/fixture-1289-2.yaml": true, "fixtures/go-swagger/bugs/1289/fixture-1289.yaml": true, "fixtures/go-swagger/bugs/193/spec2.json": true, "fixtures/go-swagger/bugs/195/swagger.json": true, "fixtures/go-swagger/bugs/248/swagger.json": true, "fixtures/go-swagger/bugs/249/swagger.json": true, "fixtures/go-swagger/bugs/342/fixture-342-2.yaml": true, "fixtures/go-swagger/bugs/342/fixture-342.yaml": true, "fixtures/go-swagger/bugs/423/swagger.json": true, "fixtures/go-swagger/bugs/453/swagger.yml": true, "fixtures/go-swagger/bugs/455/swagger.yml": true, "fixtures/go-swagger/bugs/628/swagger.yml": true, "fixtures/go-swagger/bugs/733/swagger.json": false, "fixtures/go-swagger/bugs/763/swagger.yml": true, "fixtures/go-swagger/bugs/774/swagger.yml": true, "fixtures/go-swagger/bugs/776/error.yaml": true, "fixtures/go-swagger/bugs/776/item.yaml": true, "fixtures/go-swagger/bugs/809/swagger.yml": true, "fixtures/go-swagger/bugs/825/swagger.yml": true, "fixtures/go-swagger/bugs/890/path/health_check.yaml": true, "fixtures/go-swagger/bugs/981/swagger.json": true, "fixtures/go-swagger/canary/docker/swagger.json": true, "fixtures/go-swagger/canary/ms-cog-sci/swagger.json": true, "fixtures/go-swagger/codegen/azure-text-analyis.json": true, "fixtures/go-swagger/codegen/issue72.json": true, "fixtures/go-swagger/codegen/simplesearch.yml": true, "fixtures/go-swagger/codegen/swagger-codegen-tests.json": true, "fixtures/go-swagger/codegen/todolist.allparams.yml": true, "fixtures/go-swagger/codegen/todolist.bodyparams.yml": true, "fixtures/go-swagger/codegen/todolist.discriminators.yml": true, "fixtures/go-swagger/codegen/todolist.enums.yml": true, "fixtures/go-swagger/codegen/todolist.models.yml": true, "fixtures/go-swagger/codegen/todolist.responses.yml": true, "fixtures/go-swagger/codegen/todolist.schemavalidation.yml": true, "fixtures/go-swagger/codegen/todolist.simplepath.yml": true, "fixtures/go-swagger/codegen/todolist.simple.yml": true, "fixtures/go-swagger/codegen/todolist.url.basepath.yml": true, "fixtures/go-swagger/codegen/todolist.url.simple.yml": true, "fixtures/go-swagger/expansion/all-the-things.json": true, "fixtures/go-swagger/expansion/circularRefs.json": true, "fixtures/go-swagger/expansion/invalid-refs.json": true, "fixtures/go-swagger/expansion/params.json": true, "fixtures/go-swagger/expansion/schemas1.json": true, "fixtures/go-swagger/expansion/schemas2.json": true, "fixtures/go-swagger/petstores/petstore-expanded.json": true, "fixtures/go-swagger/petstores/petstore-simple.json": true, "fixtures/go-swagger/petstores/petstore-with-external-docs.json": true, "fixtures/go-swagger/remotes/folder/folderInteger.json": true, "fixtures/go-swagger/remotes/integer.json": true, "fixtures/go-swagger/remotes/subSchemas.json": true, "fixtures/go-swagger/specs/deeper/arrayProp.json": true, "fixtures/go-swagger/specs/deeper/stringProp.json": true, "fixtures/go-swagger/specs/refed.json": true, "fixtures/go-swagger/specs/resolution2.json": true, "fixtures/go-swagger/specs/resolution.json": true, } testGoSwaggerSpecs(t, filepath.Join(".", "fixtures", "go-swagger"), expectedFailures, expectedLoadFailures, true) }
explode_data.jsonl/28899
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2725 }
[ 2830, 3393, 2646, 78, 67714, 2271, 37302, 1155, 353, 8840, 836, 8, 341, 743, 753, 12552, 10850, 67714, 18200, 341, 197, 1903, 13389, 28962, 10850, 67714, 1155, 340, 197, 3244, 57776, 7039, 741, 197, 532, 197, 322, 362, 1140, 315, 1273, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestLocalSync(t *testing.T) { Given(t). // we've got to use Helm as this uses kubeVersion Path("helm"). When(). CreateApp(). Then(). And(func(app *Application) { FailOnErr(RunCli("app", "sync", app.Name, "--local", "testdata/helm")) }) }
explode_data.jsonl/35641
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 107 }
[ 2830, 3393, 7319, 12154, 1155, 353, 8840, 836, 8, 341, 9600, 2071, 1155, 4292, 197, 197, 322, 582, 3003, 2684, 311, 990, 62042, 438, 419, 5711, 80958, 5637, 198, 197, 69640, 445, 51899, 38609, 197, 197, 4498, 25829, 197, 75569, 2164, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStartClusterUpWithOverrideHostVolumesDirFlag(t *testing.T) { setUp(t) defer os.RemoveAll(testDir) defer minitesting.ResetDefaultRoundTripper() defer viper.Reset() viper.Set("host-volumes-dir", "/var/tmp/foo") clusterUpParams := determineClusterUpParameters(testConfig) clusterup.ClusterUp(testConfig, clusterUpParams, testRunner) expectedArguments := []string{ "cluster", "up", "--use-existing-config", "--host-config-dir", hostConfigDirectory, "--host-data-dir", hostDataDirectory, "--host-pv-dir", hostPvDirectory, "--host-volumes-dir", "/var/tmp/foo", "--routing-suffix", testConfig.Ip + ".nip.io", } assertCommandLineArguments(expectedArguments, t) }
explode_data.jsonl/12415
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 265 }
[ 2830, 3393, 3479, 28678, 2324, 2354, 2177, 9296, 96325, 6184, 12135, 1155, 353, 8840, 836, 8, 341, 8196, 2324, 1155, 340, 16867, 2643, 84427, 8623, 6184, 340, 16867, 1308, 275, 59855, 36660, 3675, 27497, 21884, 6922, 741, 16867, 95132, 36...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUserStoreUpdateMfaSecret(t *testing.T) { Setup() u1 := model.User{} u1.Email = model.NewId() Must(store.User().Save(&u1)) time.Sleep(100 * time.Millisecond) if err := (<-store.User().UpdateMfaSecret(u1.Id, "12345")).Err; err != nil { t.Fatal(err) } // should pass, no update will occur though if err := (<-store.User().UpdateMfaSecret("junk", "12345")).Err; err != nil { t.Fatal(err) } }
explode_data.jsonl/5106
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 172 }
[ 2830, 3393, 1474, 6093, 4289, 44, 3632, 19773, 1155, 353, 8840, 836, 8, 341, 197, 21821, 2822, 10676, 16, 1669, 1614, 7344, 16094, 10676, 16, 24066, 284, 1614, 7121, 764, 741, 9209, 590, 31200, 7344, 1005, 8784, 2099, 84, 16, 4390, 21...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSymmetricallyEncrypted(t *testing.T) { expected := "Symmetrically encrypted.\n" prompt := func(keys []Key, symmetric bool) ([]byte, error) { if len(keys) != 0 { t.Errorf("prompt: len(keys) = %d (want 0)", len(keys)) } if !symmetric { t.Errorf("symmetric is not set") } return []byte("password"), nil } md, err := ReadMessage(readerFromHex(symmetricallyEncryptedCompressedHex), nil, prompt, nil) if err != nil { t.Errorf("ReadMessage: %s", err) return } contents, err := ioutil.ReadAll(md.UnverifiedBody) if err != nil { t.Errorf("ReadAll: %s", err) } expectedCreationTime := uint32(1295992998) if md.LiteralData.Time != expectedCreationTime { t.Errorf("LiteralData.Time is %d, want %d", md.LiteralData.Time, expectedCreationTime) } if string(contents) != expected { t.Errorf("contents got: %s want: %s", string(contents), expected) } }
explode_data.jsonl/50375
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 357 }
[ 2830, 3393, 27912, 15903, 745, 7408, 14026, 1155, 353, 8840, 836, 8, 341, 42400, 1669, 330, 27912, 15903, 745, 24455, 7110, 77, 1837, 3223, 14749, 1669, 2915, 36131, 3056, 1592, 11, 54343, 1807, 8, 34923, 3782, 11, 1465, 8, 341, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestCustomResourceCascadingDeletion(t *testing.T) { ctx := setup(t, 5) defer ctx.tearDown() clientSet, apiExtensionClient, dynamicClient := ctx.clientSet, ctx.apiExtensionClient, ctx.dynamicClient ns := createNamespaceOrDie("crd-cascading", clientSet, t) definition, resourceClient := createRandomCustomResourceDefinition(t, apiExtensionClient, dynamicClient, ns.Name) // Create a custom owner resource. owner := newCRDInstance(definition, ns.Name, names.SimpleNameGenerator.GenerateName("owner")) owner, err := resourceClient.Create(owner, metav1.CreateOptions{}) if err != nil { t.Fatalf("failed to create owner resource %q: %v", owner.GetName(), err) } t.Logf("created owner resource %q", owner.GetName()) // Create a custom dependent resource. dependent := newCRDInstance(definition, ns.Name, names.SimpleNameGenerator.GenerateName("dependent")) link(t, owner, dependent) dependent, err = resourceClient.Create(dependent, metav1.CreateOptions{}) if err != nil { t.Fatalf("failed to create dependent resource %q: %v", dependent.GetName(), err) } t.Logf("created dependent resource %q", dependent.GetName()) // Delete the owner. foreground := metav1.DeletePropagationForeground err = resourceClient.Delete(owner.GetName(), &metav1.DeleteOptions{PropagationPolicy: &foreground}) if err != nil { t.Fatalf("failed to delete owner resource %q: %v", owner.GetName(), err) } // Ensure the owner is deleted. if err := wait.Poll(1*time.Second, 60*time.Second, func() (bool, error) { _, err := resourceClient.Get(owner.GetName(), metav1.GetOptions{}) return apierrors.IsNotFound(err), nil }); err != nil { t.Fatalf("failed waiting for owner resource %q to be deleted", owner.GetName()) } // Ensure the dependent is deleted. _, err = resourceClient.Get(dependent.GetName(), metav1.GetOptions{}) if err == nil { t.Fatalf("expected dependent %q to be deleted", dependent.GetName()) } else { if !apierrors.IsNotFound(err) { t.Fatalf("unexpected error getting dependent %q: %v", dependent.GetName(), err) } } }
explode_data.jsonl/18183
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 674 }
[ 2830, 3393, 10268, 4783, 34, 5061, 2228, 1912, 52625, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 6505, 1155, 11, 220, 20, 340, 16867, 5635, 31853, 59342, 2822, 25291, 1649, 11, 6330, 12049, 2959, 11, 8741, 2959, 1669, 5635, 6581, 1649, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestPodSpecConversion(t *testing.T) { name, other := "foo", "bar" // Test internal -> v1. Should have both alias (DeprecatedServiceAccount) // and new field (ServiceAccountName). i := &core.PodSpec{ ServiceAccountName: name, } v := v1.PodSpec{} if err := legacyscheme.Scheme.Convert(i, &v, nil); err != nil { t.Fatalf("unexpected error: %v", err) } if v.ServiceAccountName != name { t.Fatalf("want v1.ServiceAccountName %q, got %q", name, v.ServiceAccountName) } if v.DeprecatedServiceAccount != name { t.Fatalf("want v1.DeprecatedServiceAccount %q, got %q", name, v.DeprecatedServiceAccount) } // Test v1 -> internal. Either DeprecatedServiceAccount, ServiceAccountName, // or both should translate to ServiceAccountName. ServiceAccountName wins // if both are set. testCases := []*v1.PodSpec{ // New {ServiceAccountName: name}, // Alias {DeprecatedServiceAccount: name}, // Both: same {ServiceAccountName: name, DeprecatedServiceAccount: name}, // Both: different {ServiceAccountName: name, DeprecatedServiceAccount: other}, } for k, v := range testCases { got := core.PodSpec{} err := legacyscheme.Scheme.Convert(v, &got, nil) if err != nil { t.Fatalf("unexpected error for case %d: %v", k, err) } if got.ServiceAccountName != name { t.Fatalf("want core.ServiceAccountName %q, got %q", name, got.ServiceAccountName) } } }
explode_data.jsonl/27248
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 502 }
[ 2830, 3393, 23527, 8327, 48237, 1155, 353, 8840, 836, 8, 341, 11609, 11, 1008, 1669, 330, 7975, 497, 330, 2257, 1837, 197, 322, 3393, 5306, 1464, 348, 16, 13, 12260, 614, 2176, 15534, 320, 51344, 1860, 7365, 340, 197, 322, 323, 501, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestMinOpenedSessions(t *testing.T) { sp, _, cleanup := setup(t, SessionPoolConfig{MinOpened: 1}) defer cleanup() // Take ten sessions from session pool and recycle them. var ss []*session var shs []*sessionHandle for i := 0; i < 10; i++ { sh, err := sp.take(context.Background()) if err != nil { t.Errorf("failed to get session(%v): %v", i, err) } ss = append(ss, sh.session) shs = append(shs, sh) sh.recycle() } for _, sh := range shs { sh.recycle() } // Simulate session expiration. for _, s := range ss { s.destroy(true) } sp.mu.Lock() defer sp.mu.Unlock() // There should be still one session left in idle list due to the min open sessions constraint. if sp.idleList.Len() != 1 { t.Errorf("got %v sessions in idle list, want 1 %d", sp.idleList.Len(), sp.numOpened) } }
explode_data.jsonl/78698
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 320 }
[ 2830, 3393, 6217, 53522, 59062, 1155, 353, 8840, 836, 8, 341, 41378, 11, 8358, 21290, 1669, 6505, 1155, 11, 9164, 10551, 2648, 90, 6217, 53522, 25, 220, 16, 3518, 16867, 21290, 2822, 197, 322, 11778, 5779, 15704, 504, 3797, 7314, 323, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestQueryError(t *testing.T) { opts := EngineOpts{ Logger: nil, Reg: nil, MaxSamples: 10, Timeout: 10 * time.Second, } engine := NewEngine(opts) errStorage := ErrStorage{errors.New("storage error")} queryable := storage.QueryableFunc(func(ctx context.Context, mint, maxt int64) (storage.Querier, error) { return &errQuerier{err: errStorage}, nil }) ctx, cancelCtx := context.WithCancel(context.Background()) defer cancelCtx() vectorQuery, err := engine.NewInstantQuery(queryable, "foo", time.Unix(1, 0)) require.NoError(t, err) res := vectorQuery.Exec(ctx) require.Error(t, res.Err, "expected error on failed select but got none") require.True(t, errors.Is(res.Err, errStorage), "expected error doesn't match") matrixQuery, err := engine.NewInstantQuery(queryable, "foo[1m]", time.Unix(1, 0)) require.NoError(t, err) res = matrixQuery.Exec(ctx) require.Error(t, res.Err, "expected error on failed select but got none") require.True(t, errors.Is(res.Err, errStorage), "expected error doesn't match") }
explode_data.jsonl/35551
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 385 }
[ 2830, 3393, 2859, 1454, 1155, 353, 8840, 836, 8, 341, 64734, 1669, 8200, 43451, 515, 197, 55861, 25, 257, 2092, 345, 197, 197, 3477, 25, 286, 2092, 345, 197, 197, 5974, 39571, 25, 220, 16, 15, 345, 197, 197, 7636, 25, 262, 220, 16...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSerializeStruct(t *testing.T) { t.Skip("skipping TestSerializeStruct") u := User{ID: 10, Name: "harry dayo"} result, err := ToGOB64(u) if err != nil { t.Errorf("ToGOB64() error: %s", err) } if result != "Iv+BAwEBBFVzZXIB/4IAAQIBAklkAQQAAQROYW1lAQwAAAAR/4IBFAEKaGFycnkgZGF5bwA=" { t.Errorf("ToGOB64 result: %+v", result) } }
explode_data.jsonl/30430
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 171 }
[ 2830, 3393, 15680, 9422, 1155, 353, 8840, 836, 8, 341, 3244, 57776, 445, 4886, 5654, 3393, 15680, 9422, 5130, 10676, 1669, 2657, 90, 915, 25, 220, 16, 15, 11, 3988, 25, 330, 71, 11433, 1899, 78, 16707, 9559, 11, 1848, 1669, 2014, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestValidateResolveAutoConsistency(t *testing.T) { conf1 := defaultConfigForTest(t) d := &Dumper{conf: conf1} conf := d.conf testCases := []struct { confConsistency string confSnapshot string err bool }{ {consistencyTypeAuto, "", true}, {consistencyTypeAuto, "123", false}, {consistencyTypeFlush, "", true}, {consistencyTypeFlush, "456", false}, {consistencyTypeLock, "", true}, {consistencyTypeLock, "789", false}, {consistencyTypeSnapshot, "", true}, {consistencyTypeSnapshot, "456", true}, {consistencyTypeNone, "", true}, {consistencyTypeNone, "123", false}, } for _, testCase := range testCases { conf.Consistency = testCase.confConsistency conf.Snapshot = testCase.confSnapshot if testCase.err == true { require.NoError(t, validateResolveAutoConsistency(d)) } else { require.EqualError(t, validateResolveAutoConsistency(d), fmt.Sprintf("can't specify --snapshot when --consistency isn't snapshot, resolved consistency: %s", conf.Consistency)) } } }
explode_data.jsonl/49489
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 393 }
[ 2830, 3393, 17926, 56808, 13253, 15220, 47094, 1155, 353, 8840, 836, 8, 341, 67850, 16, 1669, 1638, 2648, 2461, 2271, 1155, 340, 2698, 1669, 609, 35, 24027, 90, 6135, 25, 2335, 16, 532, 67850, 1669, 294, 13937, 271, 18185, 37302, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetStatDateWithinPlace(t *testing.T) { t.Parallel() ctx := context.Background() _, filename, _, _ := runtime.Caller(0) goldenPath := path.Join( path.Dir(filename), "get_stat_date_within_place") testSuite := func(mixer pb.MixerClient, recon pb.ReconClient, latencyTest bool) { for _, c := range []struct { ancestorPlace string childPlaceType string statVars []string goldenFile string }{ { "geoId/06", "County", []string{"Count_Person", "Median_Age_Person"}, "CA_County.json", }, { "country/USA", "State", []string{"Count_Person", "Count_Person_Female"}, "USA_State.json", }, { "country/USA", "State", []string{"Count_Person_FoodInsecure", "Mean_MealCost_Person_FoodSecure"}, "memdb.json", }, } { resp, err := mixer.GetStatDateWithinPlace(ctx, &pb.GetStatDateWithinPlaceRequest{ AncestorPlace: c.ancestorPlace, ChildPlaceType: c.childPlaceType, StatVars: c.statVars, }) if err != nil { t.Errorf("could not GetStatDateWithinPlace: %s", err) continue } if latencyTest { continue } if test.GenerateGolden { test.UpdateGolden(resp, goldenPath, c.goldenFile) continue } var expected pb.GetStatDateWithinPlaceResponse if err := test.ReadJSON(goldenPath, c.goldenFile, &expected); err != nil { t.Errorf("Can not Unmarshal golden file") continue } if diff := cmp.Diff(resp, &expected, protocmp.Transform()); diff != "" { t.Errorf("payload got diff: %v", diff) continue } } } if err := test.TestDriver( "GetStatDateWithinPlace", &test.TestOption{UseMemdb: true}, testSuite, ); err != nil { t.Errorf("TestDriver() = %s", err) } }
explode_data.jsonl/12387
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 766 }
[ 2830, 3393, 1949, 15878, 1916, 41961, 17371, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 20985, 1669, 2266, 19047, 2822, 197, 6878, 3899, 11, 8358, 716, 1669, 15592, 727, 13956, 7, 15, 340, 3174, 813, 268, 1820, 1669, 1815, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestConcurrentUnreliable(t *testing.T) { fmt.Printf("Test: Concurrent Put/Get/Move (unreliable) ...\n") doConcurrent(t, true) fmt.Printf(" ... Passed\n") }
explode_data.jsonl/18809
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 67 }
[ 2830, 3393, 1109, 3231, 1806, 265, 50330, 1155, 353, 8840, 836, 8, 341, 11009, 19367, 445, 2271, 25, 42704, 10224, 14, 1949, 14, 9860, 320, 359, 265, 50330, 8, 98760, 77, 1138, 19935, 1109, 3231, 1155, 11, 830, 340, 11009, 19367, 445,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestScrapeMetricsDataOp(t *testing.T) { tt, err := obsreporttest.SetupTelemetry() require.NoError(t, err) t.Cleanup(func() { require.NoError(t, tt.Shutdown(context.Background())) }) parentCtx, parentSpan := tt.TracerProvider.Tracer("test").Start(context.Background(), t.Name()) defer parentSpan.End() params := []testParams{ {items: 23, err: partialErrFake}, {items: 29, err: errFake}, {items: 15, err: nil}, } for i := range params { scrp := NewScraper(ScraperSettings{ ReceiverID: receiver, Scraper: scraper, ReceiverCreateSettings: tt.ToReceiverCreateSettings(), }) ctx := scrp.StartMetricsOp(parentCtx) assert.NotNil(t, ctx) scrp.EndMetricsOp(ctx, params[i].items, params[i].err) } spans := tt.SpanRecorder.Ended() require.Equal(t, len(params), len(spans)) var scrapedMetricPoints, erroredMetricPoints int for i, span := range spans { assert.Equal(t, "scraper/"+receiver.String()+"/"+scraper.String()+"/MetricsScraped", span.Name()) switch params[i].err { case nil: scrapedMetricPoints += params[i].items require.Contains(t, span.Attributes(), attribute.KeyValue{Key: obsmetrics.ScrapedMetricPointsKey, Value: attribute.Int64Value(int64(params[i].items))}) require.Contains(t, span.Attributes(), attribute.KeyValue{Key: obsmetrics.ErroredMetricPointsKey, Value: attribute.Int64Value(0)}) assert.Equal(t, codes.Unset, span.Status().Code) case errFake: erroredMetricPoints += params[i].items require.Contains(t, span.Attributes(), attribute.KeyValue{Key: obsmetrics.ScrapedMetricPointsKey, Value: attribute.Int64Value(0)}) require.Contains(t, span.Attributes(), attribute.KeyValue{Key: obsmetrics.ErroredMetricPointsKey, Value: attribute.Int64Value(int64(params[i].items))}) assert.Equal(t, codes.Error, span.Status().Code) assert.Equal(t, params[i].err.Error(), span.Status().Description) case partialErrFake: scrapedMetricPoints += params[i].items erroredMetricPoints++ require.Contains(t, span.Attributes(), attribute.KeyValue{Key: obsmetrics.ScrapedMetricPointsKey, Value: attribute.Int64Value(int64(params[i].items))}) require.Contains(t, span.Attributes(), attribute.KeyValue{Key: obsmetrics.ErroredMetricPointsKey, Value: attribute.Int64Value(1)}) assert.Equal(t, codes.Error, span.Status().Code) assert.Equal(t, params[i].err.Error(), span.Status().Description) default: t.Fatalf("unexpected err param: %v", params[i].err) } } require.NoError(t, obsreporttest.CheckScraperMetrics(tt, receiver, scraper, int64(scrapedMetricPoints), int64(erroredMetricPoints))) }
explode_data.jsonl/57969
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 983 }
[ 2830, 3393, 3326, 19842, 27328, 1043, 7125, 1155, 353, 8840, 836, 8, 341, 3244, 83, 11, 1848, 1669, 7448, 11736, 1944, 39820, 6639, 35958, 741, 17957, 35699, 1155, 11, 1848, 340, 3244, 727, 60639, 18552, 368, 314, 1373, 35699, 1155, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestIngestLinkFallback(t *testing.T) { // Verify that ingestLink succeeds if linking fails by falling back to // copying. mem := vfs.NewMem() src, err := mem.Create("source") require.NoError(t, err) opts := &Options{FS: errorfs.Wrap(mem, errorfs.OnIndex(0))} opts.EnsureDefaults() meta := []*fileMetadata{{FileNum: 1}} require.NoError(t, ingestLink(0, opts, "", []string{"source"}, meta)) dest, err := mem.Open("000001.sst") require.NoError(t, err) // We should be able to write bytes to src, and not have them show up in // dest. _, _ = src.Write([]byte("test")) data, err := ioutil.ReadAll(dest) require.NoError(t, err) if len(data) != 0 { t.Fatalf("expected copy, but files appear to be hard linked: [%s] unexpectedly found", data) } }
explode_data.jsonl/40257
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 282 }
[ 2830, 3393, 641, 6969, 3939, 87206, 1155, 353, 8840, 836, 8, 341, 197, 322, 25429, 429, 88272, 3939, 50081, 421, 30699, 14525, 553, 15679, 1182, 311, 198, 197, 322, 31039, 624, 14145, 1669, 92941, 7121, 18816, 741, 41144, 11, 1848, 1669...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMatchPath(t *testing.T) { cases := []struct { value string path string matches bool }{ {"/foo", "/foo", true}, {"/foo", "/foo/bar", true}, {"bar", "/foo/bar", true}, {"foo", "/foo/bar", true}, {"bar$", "/foo/bar", true}, {"/foo/*", "/foo/bar", true}, {"/foo/[a-z]+", "/foo/bar", true}, {"/foo/baz", "/foo/bar", false}, {"/foo/baz", "/foo/bar", false}, } for _, test := range cases { u, _ := url.Parse("http://foo.com" + test.path) mu, _ := url.Parse("http://foo.com" + test.value) req := &http.Request{URL: u} ereq := &Request{URLStruct: mu} matches, err := MatchPath(req, ereq) st.Expect(t, err, nil) st.Expect(t, matches, test.matches) } }
explode_data.jsonl/9200
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 317 }
[ 2830, 3393, 8331, 1820, 1155, 353, 8840, 836, 8, 341, 1444, 2264, 1669, 3056, 1235, 341, 197, 16309, 256, 914, 198, 197, 26781, 262, 914, 198, 197, 2109, 9118, 1807, 198, 197, 59403, 197, 197, 90, 3115, 7975, 497, 3521, 7975, 497, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestMempoolTxsBytes(t *testing.T) { app := kvstore.NewApplication() cc := proxy.NewLocalClientCreator(app) config := cfg.ResetTestRoot("mempool_test") config.Mempool.MaxTxsBytes = 10 mempool, cleanup := newMempoolWithAppAndConfig(cc, config) defer cleanup() // 1. zero by default assert.EqualValues(t, 0, mempool.TxsBytes()) // 2. len(tx) after CheckTx err := mempool.CheckTx([]byte{0x01}, nil, TxInfo{}) require.NoError(t, err) assert.EqualValues(t, 1, mempool.TxsBytes()) // 3. zero again after tx is removed by Update err = mempool.Update(1, []types.Tx{[]byte{0x01}}, abciResponses(1, abci.CodeTypeOK), nil, nil) require.NoError(t, err) assert.EqualValues(t, 0, mempool.TxsBytes()) // 4. zero after Flush err = mempool.CheckTx([]byte{0x02, 0x03}, nil, TxInfo{}) require.NoError(t, err) assert.EqualValues(t, 2, mempool.TxsBytes()) mempool.Flush() assert.EqualValues(t, 0, mempool.TxsBytes()) // 5. ErrMempoolIsFull is returned when/if MaxTxsBytes limit is reached. err = mempool.CheckTx([]byte{0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04, 0x04}, nil, TxInfo{}) require.NoError(t, err) err = mempool.CheckTx([]byte{0x05}, nil, TxInfo{}) if assert.Error(t, err) { assert.IsType(t, ErrMempoolIsFull{}, err) } // 6. zero after tx is rechecked and removed due to not being valid anymore app2 := counter.NewApplication(true) cc = proxy.NewLocalClientCreator(app2) mempool, cleanup = newMempoolWithApp(cc) defer cleanup() txBytes := make([]byte, 8) binary.BigEndian.PutUint64(txBytes, uint64(0)) err = mempool.CheckTx(txBytes, nil, TxInfo{}) require.NoError(t, err) assert.EqualValues(t, 8, mempool.TxsBytes()) appConnCon, _ := cc.NewABCIClient() appConnCon.SetLogger(log.TestingLogger().With("module", "abci-client", "connection", "consensus")) err = appConnCon.Start() require.Nil(t, err) t.Cleanup(func() { if err := appConnCon.Stop(); err != nil { t.Error(err) } }) ctx := context.Background() res, err := appConnCon.DeliverTxSync(ctx, abci.RequestDeliverTx{Tx: txBytes}) require.NoError(t, err) require.EqualValues(t, 0, res.Code) res2, err := appConnCon.CommitSync(ctx) require.NoError(t, err) require.NotEmpty(t, res2.Data) // Pretend like we committed nothing so txBytes gets rechecked and removed. err = mempool.Update(1, []types.Tx{}, abciResponses(0, abci.CodeTypeOK), nil, nil) require.NoError(t, err) assert.EqualValues(t, 0, mempool.TxsBytes()) // 7. Test RemoveTxByKey function err = mempool.CheckTx([]byte{0x06}, nil, TxInfo{}) require.NoError(t, err) assert.EqualValues(t, 1, mempool.TxsBytes()) mempool.RemoveTxByKey(TxKey([]byte{0x07}), true) assert.EqualValues(t, 1, mempool.TxsBytes()) mempool.RemoveTxByKey(TxKey([]byte{0x06}), true) assert.EqualValues(t, 0, mempool.TxsBytes()) }
explode_data.jsonl/14616
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1130 }
[ 2830, 3393, 44, 3262, 1749, 51, 18561, 7078, 1155, 353, 8840, 836, 8, 341, 28236, 1669, 16178, 4314, 7121, 4988, 741, 63517, 1669, 13291, 7121, 7319, 2959, 31865, 11462, 340, 25873, 1669, 13286, 36660, 2271, 8439, 445, 76, 3262, 1749, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSin(t *testing.T) { out := talib.Sin([]float64{0, math.Pi / 2}) expected := []float64{0, 1} if !reflect.DeepEqual(expected, out) { t.Errorf("Expected %#v got %#v.", expected, out) } }
explode_data.jsonl/51211
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 86 }
[ 2830, 3393, 48268, 1155, 353, 8840, 836, 8, 341, 13967, 1669, 8210, 579, 66055, 10556, 3649, 21, 19, 90, 15, 11, 6888, 1069, 72, 608, 220, 17, 3518, 42400, 1669, 3056, 3649, 21, 19, 90, 15, 11, 220, 16, 532, 743, 753, 34913, 94750...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDatetimeUserVariable(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("set @p = now()") tk.MustExec("set @@tidb_enable_vectorized_expression = false") require.NotEqual(t, "", tk.MustQuery("select @p").Rows()[0][0]) tk.MustExec("set @@tidb_enable_vectorized_expression = true") require.NotEqual(t, "", tk.MustQuery("select @p").Rows()[0][0]) }
explode_data.jsonl/65564
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 171 }
[ 2830, 3393, 94191, 1474, 7827, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 340, 3244, 74, 50463, 10216, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStoreGateway_ShouldSupportLoadRingTokensFromFile(t *testing.T) { tests := map[string]struct { storedTokens ring.Tokens expectedNumTokens int }{ "stored tokens are less than the configured ones": { storedTokens: generateSortedTokens(RingNumTokens - 10), expectedNumTokens: RingNumTokens, }, "stored tokens are equal to the configured ones": { storedTokens: generateSortedTokens(RingNumTokens), expectedNumTokens: RingNumTokens, }, "stored tokens are more then the configured ones": { storedTokens: generateSortedTokens(RingNumTokens + 10), expectedNumTokens: RingNumTokens + 10, }, } for testName, testData := range tests { t.Run(testName, func(t *testing.T) { tokensFile, err := ioutil.TempFile(os.TempDir(), "tokens-*") require.NoError(t, err) defer os.Remove(tokensFile.Name()) //nolint:errcheck // Store some tokens to the file. require.NoError(t, testData.storedTokens.StoreToFile(tokensFile.Name())) ctx := context.Background() gatewayCfg := mockGatewayConfig() gatewayCfg.ShardingEnabled = true gatewayCfg.ShardingRing.TokensFilePath = tokensFile.Name() storageCfg := mockStorageConfig(t) ringStore := consul.NewInMemoryClient(ring.GetCodec()) bucketClient := &bucket.ClientMock{} bucketClient.MockIter("", []string{}, nil) g, err := newStoreGateway(gatewayCfg, storageCfg, bucketClient, ringStore, defaultLimitsOverrides(t), mockLoggingLevel(), log.NewNopLogger(), nil) require.NoError(t, err) defer services.StopAndAwaitTerminated(ctx, g) //nolint:errcheck assert.False(t, g.ringLifecycler.IsRegistered()) require.NoError(t, services.StartAndAwaitRunning(ctx, g)) assert.True(t, g.ringLifecycler.IsRegistered()) assert.Equal(t, ring.ACTIVE, g.ringLifecycler.GetState()) assert.Len(t, g.ringLifecycler.GetTokens(), testData.expectedNumTokens) assert.Subset(t, g.ringLifecycler.GetTokens(), testData.storedTokens) }) } }
explode_data.jsonl/57961
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 748 }
[ 2830, 3393, 6093, 40709, 36578, 616, 7916, 5879, 43466, 29300, 43633, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 2415, 14032, 60, 1235, 341, 197, 18388, 3018, 29300, 414, 10058, 836, 9713, 198, 197, 42400, 4651, 29300, 526, 198, 197, 59...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_buildAnnotations_AllowsWhitelisted(t *testing.T) { whitelist := []string{ "topic", "schedule", } userValues := map[string]string{ "topic": "function.deployed", "schedule": "has schedule", } out := buildAnnotations(whitelist, userValues) topicVal, ok := out["topic"] if !ok { t.Errorf("want user annotation: topic") t.Fail() } if topicVal != userValues["topic"] { t.Errorf("want user annotation: topic - got %s, want %s", topicVal, userValues["topic"]) t.Fail() } scheduleVal, ok := out["schedule"] if !ok { t.Errorf("want user annotation: schedule") t.Fail() } if scheduleVal != userValues["schedule"] { t.Errorf("want user annotation: schedule - got %s, want %s", scheduleVal, userValues["schedule"]) t.Fail() } }
explode_data.jsonl/11985
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 294 }
[ 2830, 3393, 20801, 21418, 53629, 4241, 1639, 56643, 13236, 1155, 353, 8840, 836, 8, 341, 197, 1312, 57645, 1669, 3056, 917, 515, 197, 197, 1, 16411, 756, 197, 197, 1, 28630, 756, 197, 630, 19060, 6227, 1669, 2415, 14032, 30953, 515, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestReceiveNewEventsConcurrently(t *testing.T) { workers := 4 eventsCount := 100 ch := make(chan *info, eventsCount*workers) defer close(ch) to := func(message []byte, mt inputsource.NetworkMetadata) { ch <- &info{message: string(message), mt: mt} } cfg, err := common.NewConfigFrom(map[string]interface{}{"host": "127.0.0.1:0"}) if !assert.NoError(t, err) { return } config := defaultConfig err = cfg.Unpack(&config) if !assert.NoError(t, err) { return } factory := netcommon.SplitHandlerFactory(netcommon.FamilyTCP, logp.NewLogger("test"), MetadataCallback, to, bufio.ScanLines) server, err := New(&config, factory) if !assert.NoError(t, err) { return } err = server.Start() if !assert.NoError(t, err) { return } defer server.Stop() samples := generateMessages(eventsCount, 1024) for w := 0; w < workers; w++ { go func() { conn, err := net.Dial("tcp", server.Listener.Listener.Addr().String()) defer conn.Close() assert.NoError(t, err) for _, sample := range samples { fmt.Fprintln(conn, sample) } }() } var events []*info for len(events) < eventsCount*workers { select { case event := <-ch: events = append(events, event) default: } } }
explode_data.jsonl/78878
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 479 }
[ 2830, 3393, 14742, 3564, 7900, 1109, 58202, 1155, 353, 8840, 836, 8, 341, 197, 54958, 1669, 220, 19, 198, 90873, 2507, 1669, 220, 16, 15, 15, 198, 23049, 1669, 1281, 35190, 353, 2733, 11, 4357, 2507, 9, 54958, 340, 16867, 3265, 7520, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContext2Plan_moduleVarWrongTypeNested(t *testing.T) { m := testModule(t, "plan-module-wrong-var-type-nested") p := testProvider("null") p.DiffFn = testDiffFn ctx := testContext2(t, &ContextOpts{ Config: m, ProviderResolver: providers.ResolverFixed( map[string]providers.Factory{ "null": testProviderFuncFixed(p), }, ), }) _, diags := ctx.Plan() if !diags.HasErrors() { t.Fatalf("succeeded; want errors") } }
explode_data.jsonl/28657
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 186 }
[ 2830, 3393, 1972, 17, 20485, 10750, 3962, 29185, 929, 71986, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1273, 3332, 1155, 11, 330, 10393, 46718, 2630, 14347, 85415, 10604, 5279, 9980, 1138, 3223, 1669, 1273, 5179, 445, 2921, 1138, 3223, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDeSerializeMap(t *testing.T) { t.Skip("skipping TestDeSerializeMap") m := map[string]int{} //FromGOB64("Dv+DBAEC/4QAAQwBBAAAIP+EAAMFYXBwbGX+ASwGYmFuYW5h/gJYBWxlbW9u/gJY", &u) err := FromGOB64("Dv+DBAEC/4QAAQwBBAAAIP+EAAMGYmFuYW5h/gJYBWxlbW9u/gJYBWFwcGxl/gEs", &m) if err != nil { t.Errorf("FromGOB64 error: %s", err) } if m["apple"] != 150 { t.Errorf("FromGOB64 result: %#v", m) } }
explode_data.jsonl/30433
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 225 }
[ 2830, 3393, 1912, 15680, 2227, 1155, 353, 8840, 836, 8, 341, 3244, 57776, 445, 4886, 5654, 3393, 1912, 15680, 2227, 5130, 2109, 1669, 2415, 14032, 63025, 16094, 197, 322, 3830, 15513, 33, 21, 19, 445, 35, 85, 10, 3506, 32, 7498, 14, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRedrawGauge(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) defer cancel() tests := []struct { name string size int count int gauge Gauge }{ { name: "draw once", size: 1, gauge: func() Gauge { g := NewMockGauge(ctrl) g.EXPECT().Percent(0) g.EXPECT().Percent(100) return g }(), }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { d := &drawer{ widgets: &widgets{progressGauge: tt.gauge}, gaugeCh: make(chan struct{}), } go d.redrawGauge(ctx, tt.size) for i := 0; i < tt.size; i++ { d.gaugeCh <- struct{}{} } }) } }
explode_data.jsonl/19374
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 354 }
[ 2830, 3393, 6033, 1041, 38, 19392, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 20985, 11, 9121, 1669, 2266, 26124, 7636, 5378, 19047, 1507, 220, 18, 15, 77053, 32435,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGenerateResponseTypesWithTimeTime(t *testing.T) { spec := `--- components: responses: FooResponse: description: bar content: application/json: schema: type: object properties: baz: type: string format: date-time ` doc, err := openapi.Load([]byte(spec)) if err != nil { t.Fatal(err) } typ, err := generator.GenerateResponseTypes(doc) if err != nil { t.Error(err) return } expected := `package models import "time" // code generated by restgen. DO NOT EDIT. // bar type FooResponse struct { Baz time.Time } ` if string(typ) != expected { t.Errorf("%s != %s", string(typ), expected) return } }
explode_data.jsonl/65726
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 327 }
[ 2830, 3393, 31115, 2582, 4173, 2354, 1462, 1462, 1155, 353, 8840, 836, 8, 341, 98100, 1669, 1565, 10952, 5149, 510, 220, 14507, 510, 262, 33428, 2582, 510, 414, 4008, 25, 3619, 198, 414, 2213, 510, 286, 3766, 8931, 510, 688, 10802, 51...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestGobMarshalString(t *testing.T) { s := test.RunServerOnPort(TEST_PORT) defer s.Shutdown() ec := NewGobEncodedConn(t) defer ec.Close() ch := make(chan bool) testString := "Hello World!" ec.Subscribe("gob_string", func(s string) { if s != testString { t.Fatalf("Received test string of '%s', wanted '%s'\n", s, testString) } ch <- true }) ec.Publish("gob_string", testString) if e := test.Wait(ch); e != nil { t.Fatal("Did not receive the message") } }
explode_data.jsonl/39501
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 196 }
[ 2830, 3393, 38, 674, 55438, 703, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 1273, 16708, 5475, 1925, 7084, 50320, 12377, 340, 16867, 274, 10849, 18452, 2822, 197, 757, 1669, 1532, 38, 674, 46795, 9701, 1155, 340, 16867, 11942, 10421, 741...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStopFailed(t *testing.T) { done := make(chan struct{}, 1) waitDialer := func(network, addr string) (net.Conn, error) { done <- struct{}{} time.Sleep(time.Millisecond) return nil, errors.New("network down") } cmgr, err := New(&Config{ Dial: waitDialer, }) if err != nil { t.Fatalf("New error: %v", err) } cmgr.Start() go func() { <-done atomic.StoreInt32(&cmgr.stop, 1) time.Sleep(2 * time.Millisecond) atomic.StoreInt32(&cmgr.stop, 0) cmgr.Stop() }() cr := &ConnReq{ Addr: &net.TCPAddr{ IP: net.ParseIP("127.0.0.1"), Port: 18555, }, Permanent: true, } go cmgr.Connect(cr) cmgr.Wait() }
explode_data.jsonl/53953
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 299 }
[ 2830, 3393, 10674, 9408, 1155, 353, 8840, 836, 8, 341, 40495, 1669, 1281, 35190, 2036, 22655, 220, 16, 340, 48750, 35, 530, 261, 1669, 2915, 46542, 11, 10789, 914, 8, 320, 4711, 50422, 11, 1465, 8, 341, 197, 40495, 9119, 2036, 6257, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRepositoryCommit_String(t *testing.T) { v := RepositoryCommit{ NodeID: String(""), SHA: String(""), Commit: &Commit{}, Author: &User{}, Committer: &User{}, HTMLURL: String(""), URL: String(""), CommentsURL: String(""), Stats: &CommitStats{}, } want := `github.RepositoryCommit{NodeID:"", SHA:"", Commit:github.Commit{}, Author:github.User{}, Committer:github.User{}, HTMLURL:"", URL:"", CommentsURL:"", Stats:github.CommitStats{}}` if got := v.String(); got != want { t.Errorf("RepositoryCommit.String = %v, want %v", got, want) } }
explode_data.jsonl/33286
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 262 }
[ 2830, 3393, 4624, 33441, 31777, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 26377, 33441, 515, 197, 30217, 915, 25, 414, 923, 445, 4461, 197, 7568, 17020, 25, 260, 923, 445, 4461, 197, 197, 33441, 25, 414, 609, 33441, 38837, 197, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestNumberDataPoint_LabelsMap(t *testing.T) { ms := NewNumberDataPoint() assert.EqualValues(t, NewStringMap(), ms.LabelsMap()) fillTestStringMap(ms.LabelsMap()) testValLabelsMap := generateTestStringMap() assert.EqualValues(t, testValLabelsMap, ms.LabelsMap()) }
explode_data.jsonl/32722
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 96 }
[ 2830, 3393, 2833, 1043, 2609, 53557, 82, 2227, 1155, 353, 8840, 836, 8, 341, 47691, 1669, 1532, 2833, 1043, 2609, 741, 6948, 12808, 6227, 1155, 11, 1532, 703, 2227, 1507, 9829, 4679, 82, 2227, 2398, 65848, 2271, 703, 2227, 35680, 4679, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestEncryptLayer(t *testing.T) { data := []byte("This is some text!") desc := ocispec.Descriptor{ Digest: digest.FromBytes(data), Size: int64(len(data)), } dataReader := bytes.NewReader(data) encLayerReader, encLayerFinalizer, err := EncryptLayer(ec, dataReader, desc) if err != nil { t.Fatal(err) } encLayer := make([]byte, 1024) encsize, err := encLayerReader.Read(encLayer) if err != io.EOF { t.Fatal("Expected EOF") } encLayerReaderAt := bytes.NewReader(encLayer[:encsize]) annotations, err := encLayerFinalizer() if err != nil { t.Fatal(err) } if len(annotations) == 0 { t.Fatal("No keys created for annotations") } newDesc := ocispec.Descriptor{ Annotations: annotations, } decLayerReader, _, err := DecryptLayer(dc, encLayerReaderAt, newDesc, false) if err != nil { t.Fatal(err) } decLayer := make([]byte, 1024) decsize, err := decLayerReader.Read(decLayer) if err != nil && err != io.EOF { t.Fatal(err) } if !reflect.DeepEqual(decLayer[:decsize], data) { t.Fatalf("Expected %v, got %v", data, decLayer) } }
explode_data.jsonl/15435
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 433 }
[ 2830, 3393, 61520, 9188, 1155, 353, 8840, 836, 8, 341, 8924, 1669, 3056, 3782, 445, 1986, 374, 1045, 1467, 22988, 41653, 1669, 17796, 285, 992, 23548, 6820, 515, 197, 10957, 15153, 25, 20882, 11439, 7078, 2592, 1326, 197, 91224, 25, 256...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
9
func TestPlural(t *testing.T) { tpl := `{{$num := len "two"}}{{$num}} {{$num | plural "1 char" "chars"}}` if err := runt(tpl, "3 chars"); err != nil { t.Error(err) } tpl = `{{len "t" | plural "cheese" "%d chars"}}` if err := runt(tpl, "cheese"); err != nil { t.Error(err) } }
explode_data.jsonl/63889
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 133 }
[ 2830, 3393, 2120, 4176, 1155, 353, 8840, 836, 8, 341, 3244, 500, 1669, 1565, 29283, 2413, 1669, 2422, 330, 19789, 30975, 29283, 2413, 3417, 30108, 2413, 760, 38498, 330, 16, 1161, 1, 330, 19255, 30975, 3989, 743, 1848, 1669, 1598, 83, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestGetForecast(t *testing.T) { t.Parallel() ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) { testFile := "testdata/response-compact.json" wantURI := "/weatherapi/locationforecast/2.0/compact?lat=53.86&lon=-9.30" gotURI := r.RequestURI testhelper.VerifyURIs(wantURI, gotURI, t) f, err := os.Open(testFile) if err != nil { t.Fatal(err) } defer f.Close() _, err = io.Copy(rw, f) if err != nil { t.Fatalf("copying data from file %s to test HTTP server: %v", testFile, err) } })) defer ts.Close() client := meteo.NewYRClient() client.BaseURL = ts.URL client.Resolve = func(location string) (meteo.Location, error) { return meteo.Location{ Lat: 53.86, Long: -9.30, }, nil } got, err := client.GetForecast("Castlebar,IE") if err != nil { t.Errorf("error getting forecast data, %v", err) } want := meteo.Weather{ Summary: "rain", Temp: 13.7, } if !cmp.Equal(want, got) { t.Error(cmp.Diff(want, got)) } }
explode_data.jsonl/55220
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 440 }
[ 2830, 3393, 1949, 72459, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 57441, 1669, 54320, 70334, 7121, 5475, 19886, 89164, 18552, 2601, 86, 1758, 37508, 11, 435, 353, 1254, 9659, 8, 341, 197, 18185, 1703, 1669, 330, 92425, 988...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSuObjectDelete(t *testing.T) { assert := assert.T(t).This ob := SuObject{} ob.Delete(nil, Zero) ob.Delete(nil, SuStr("baz")) for i := 0; i < 5; i++ { ob.Add(SuInt(i)) } ob.Set(SuStr("foo"), SuInt(8)) ob.Set(SuStr("bar"), SuInt(9)) assert(ob.Show()).Is("#(0, 1, 2, 3, 4, bar: 9, foo: 8)") ob.Delete(nil, SuStr("foo")) assert(ob.Show()).Is("#(0, 1, 2, 3, 4, bar: 9)") ob.Delete(nil, SuInt(2)) assert(ob.Show()).Is("#(0, 1, 3, 4, bar: 9)") ob.Delete(nil, Zero) assert(ob.Show()).Is("#(1, 3, 4, bar: 9)") ob.Delete(nil, SuInt(2)) assert(ob.Show()).Is("#(1, 3, bar: 9)") ob.DeleteAll() assert(ob.Show()).Is("#()") assert(ob.Size()).Is(0) }
explode_data.jsonl/7116
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 334 }
[ 2830, 3393, 36459, 1190, 6435, 1155, 353, 8840, 836, 8, 341, 6948, 1669, 2060, 836, 1155, 568, 1986, 198, 63353, 1669, 16931, 1190, 16094, 63353, 18872, 27907, 11, 18306, 340, 63353, 18872, 27907, 11, 16931, 2580, 445, 42573, 5455, 2023, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func Test_marshal_general_struct_of_struct(t *testing.T) { should := require.New(t) for _, c := range test.Combinations { obj := general.Struct{ protocol.FieldId(1): general.Struct{ protocol.FieldId(1): "abc", }, } output, err := c.Marshal(obj) should.NoError(err) output1, err := c.Marshal(&obj) should.NoError(err) should.Equal(output, output1) var val general.Struct should.NoError(c.Unmarshal(output, &val)) should.Equal(general.Struct{ protocol.FieldId(1): "abc", }, val[protocol.FieldId(1)]) } }
explode_data.jsonl/47462
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 232 }
[ 2830, 3393, 717, 28423, 39177, 15126, 3575, 15126, 1155, 353, 8840, 836, 8, 341, 197, 5445, 1669, 1373, 7121, 1155, 340, 2023, 8358, 272, 1669, 2088, 1273, 727, 2855, 12634, 341, 197, 22671, 1669, 4586, 51445, 515, 298, 197, 17014, 1708...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGet(t *testing.T) { fixture := consulutil.NewFixture(t) defer fixture.Stop() store := newStore(fixture.Client.KV()) // // Create DaemonSet // podID := types.PodID("some_pod_id") minHealth := 0 clusterName := ds_fields.ClusterName("some_name") azLabel := pc_fields.AvailabilityZone("some_zone") selector := klabels.Everything(). Add(pc_fields.AvailabilityZoneLabel, klabels.EqualsOperator, []string{azLabel.String()}) manifestBuilder := manifest.NewBuilder() manifestBuilder.SetID(podID) manifest := manifestBuilder.GetManifest() timeout := replication.NoTimeout ctx, cancelFunc := transaction.New(context.Background()) defer cancelFunc() ds, err := store.Create(ctx, manifest, minHealth, clusterName, selector, podID, timeout) if err != nil { t.Fatalf("Unable to create daemon set: %s", err) } err = transaction.MustCommit(ctx, fixture.Client.KV()) if err != nil { t.Fatalf("Unable to create daemon set: %s", err) } Assert(t).AreNotEqual(ds.ID, "", "Daemon set should have an id") // // Get DaemonSet and verify it is the same // getDS, _, err := store.Get(ds.ID) if err != nil { t.Fatalf("Error retrieving created daemon set: %s", err) } Assert(t).AreNotEqual(getDS.ID, "", "Daemon set should have an id") Assert(t).AreNotEqual(getDS.PodID, "", "Daemon set should have a pod id") Assert(t).AreEqual(ds.ID, getDS.ID, "Daemon set should be equal ids") Assert(t).AreEqual(ds.PodID, getDS.PodID, "Daemon set should have equal pod ids") Assert(t).AreEqual(ds.MinHealth, getDS.MinHealth, "Daemon set should have equal minimum healths") Assert(t).AreEqual(ds.Name, getDS.Name, "Daemon set should have equal names") Assert(t).AreEqual(ds.Disabled, getDS.Disabled, "Daemon set should have same disabled fields") testLabels := klabels.Set{ pc_fields.AvailabilityZoneLabel: azLabel.String(), } if matches := getDS.NodeSelector.Matches(testLabels); !matches { t.Error("The daemon set has a bad node selector") } originalSHA, err := manifest.SHA() if err != nil { t.Fatal("Unable to retrieve SHA from manifest") } getSHA, err := getDS.Manifest.SHA() if err != nil { t.Fatal("Unable to retrieve SHA from manifest retrieved from daemon set") } Assert(t).AreEqual(originalSHA, getSHA, "Daemon set shas were not equal") // Invalid get opertaion _, _, err = store.Get("bad_id") if err == nil { t.Error("Expected get operation to fail when getting a daemon set which does not exist") } }
explode_data.jsonl/3471
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 882 }
[ 2830, 3393, 1949, 1155, 353, 8840, 836, 8, 341, 1166, 12735, 1669, 74189, 1314, 7121, 18930, 1155, 340, 16867, 12507, 30213, 741, 57279, 1669, 501, 6093, 94886, 11716, 11352, 53, 2398, 197, 2289, 197, 322, 4230, 92236, 1649, 198, 197, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestRequestParserMiddlewarePanicsWhenNilHandler(t *testing.T) { handle := requestParserMiddleware(func(Context) (Handler, error) { return nil, nil }) assert.Panics(t, func() { handle(nil) }) }
explode_data.jsonl/18944
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 73 }
[ 2830, 3393, 1900, 6570, 24684, 35693, 1211, 4498, 19064, 3050, 1155, 353, 8840, 836, 8, 341, 53822, 1669, 1681, 6570, 24684, 18552, 14001, 8, 320, 3050, 11, 1465, 8, 341, 197, 853, 2092, 11, 2092, 198, 197, 8824, 6948, 1069, 276, 1211...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestDiscoKeyMarshal(t *testing.T) { var k1, k2 DiscoKey for i := range k1 { k1[i] = byte(i) } testKey(t, "discokey:", k1, &k2) }
explode_data.jsonl/48390
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 72 }
[ 2830, 3393, 4839, 1015, 1592, 55438, 1155, 353, 8840, 836, 8, 341, 2405, 595, 16, 11, 595, 17, 93629, 1592, 198, 2023, 600, 1669, 2088, 595, 16, 341, 197, 16463, 16, 989, 60, 284, 4922, 1956, 340, 197, 532, 18185, 1592, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestCursor_Sub(t *testing.T) { t.Run("ok", func(t *testing.T) { data := []byte{0, 1, 2, 3, 4, 5, 6} cur := NewCursor(data) cur2, ok := cur.Sub(1, 4) require.True(t, ok) require.Equal(t, []byte{1, 2, 3}, cur2.Buffer()) }) t.Run("ok", func(t *testing.T) { data := []byte{0} cur := NewCursor(data) cur2, ok := cur.Sub(0, 1) require.True(t, ok) require.Equal(t, []byte{0}, cur2.Buffer()) }) t.Run("not-ok", func(t *testing.T) { data := []byte{0} cur := NewCursor(data) _, ok := cur.Sub(1, 4) require.False(t, ok) }) t.Run("not-ok", func(t *testing.T) { data := []byte{0, 1, 2} cur := NewCursor(data) _, ok := cur.Sub(1, 4) require.False(t, ok) }) }
explode_data.jsonl/80973
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 350 }
[ 2830, 3393, 14543, 36359, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 562, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 8924, 1669, 3056, 3782, 90, 15, 11, 220, 16, 11, 220, 17, 11, 220, 18, 11, 220, 19, 11, 220, 20, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_HandleDeleted(t *testing.T) { ctrl := gomock.NewController(t) defer ctrl.Finish() handler, mockProvider := getHandlerWithMock(ctrl) mockProvider.EXPECT().SubmitAsyncJob(deletedJob) _, err := handler.HandleDelete(mockResourceName, mockPod) assert.NoError(t, err) }
explode_data.jsonl/39507
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 100 }
[ 2830, 3393, 42714, 26039, 1155, 353, 8840, 836, 8, 341, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 2822, 53326, 11, 7860, 5179, 1669, 633, 3050, 2354, 11571, 62100, 692, 77333, 5179, 22402, 7285, 1005, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestBotIP(t *testing.T) { tests := []struct { in string want uint8 }{ {"114.122.138.27", NoBotNoMatch}, {"35.180.1.1", BotRangeAWS}, {"2600:1fff:5000::1", BotRangeAWS}, {"100.20.156.62", BotRangeAWS}, {"13.237.31.191", BotRangeAWS}, {"13.57.187.238", BotRangeAWS}, {"13.58.249.187", BotRangeAWS}, {"18.189.178.53", BotRangeAWS}, {"18.191.239.50", BotRangeAWS}, {"18.206.115.23", BotRangeAWS}, {"18.207.119.101", BotRangeAWS}, {"18.217.17.227", BotRangeAWS}, {"18.224.140.0", BotRangeAWS}, {"18.236.221.165", BotRangeAWS}, {"3.81.56.221", BotRangeAWS}, {"3.83.24.166", BotRangeAWS}, {"3.94.114.22", BotRangeAWS}, {"34.207.159.142", BotRangeAWS}, {"34.209.26.42", BotRangeAWS}, {"34.217.96.163", BotRangeAWS}, {"34.221.199.187", BotRangeAWS}, {"34.222.59.41", BotRangeAWS}, {"34.231.157.157", BotRangeAWS}, {"34.232.127.140", BotRangeAWS}, {"35.174.166.183", BotRangeAWS}, {"44.234.24.80", BotRangeAWS}, {"44.234.66.18", BotRangeAWS}, {"52.12.38.56", BotRangeAWS}, {"52.34.76.65", BotRangeAWS}, {"52.44.93.197", BotRangeAWS}, {"52.56.255.25", BotRangeAWS}, {"54.158.227.15", BotRangeAWS}, {"54.159.60.243", BotRangeAWS}, {"54.166.166.23", BotRangeAWS}, {"54.200.108.160", BotRangeAWS}, {"54.215.29.10", BotRangeAWS}, {"54.226.25.34", BotRangeAWS}, {"54.227.27.249", BotRangeAWS}, {"54.242.93.252", BotRangeAWS}, {"54.70.53.60", BotRangeAWS}, {"54.71.187.124", BotRangeAWS}, {"54.86.34.110", BotRangeAWS}, {"54.91.251.150", BotRangeAWS}, {"54.92.222.34", BotRangeAWS}, {"68.183.241.134", BotRangeDigitalOcean}, {"88.212.248.0", BotRangeServersCom}, {"88.212.255.255", BotRangeServersCom}, {"88.213.0.0", NoBotNoMatch}, } for _, tt := range tests { t.Run(tt.in, func(t *testing.T) { r := &http.Request{Header: make(http.Header), RemoteAddr: tt.in} r.Header.Add("User-Agent", "Your user agent: Mozilla/5.0 (X11; Linux x86_64; rv:75.0) Gecko/20100101 Firefox/75.0") got := Bot(r) if got != tt.want { t.Errorf("got %d; want %d", got, tt.want) } }) } }
explode_data.jsonl/82220
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1031 }
[ 2830, 3393, 23502, 3298, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 17430, 256, 914, 198, 197, 50780, 2622, 23, 198, 197, 59403, 197, 197, 4913, 16, 16, 19, 13, 16, 17, 17, 13, 16, 18, 23, 13, 17, 22, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestDeletePrefix(t *testing.T) { graph, _ := tempGraph(t) defer nukeGraph(graph) img := createTestImage(graph, t) if err := graph.Delete(stringid.TruncateID(img.ID)); err != nil { t.Fatal(err) } assertNImages(graph, t, 0) }
explode_data.jsonl/64324
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 98 }
[ 2830, 3393, 6435, 14335, 1155, 353, 8840, 836, 8, 341, 66616, 11, 716, 1669, 2730, 11212, 1155, 340, 16867, 308, 10430, 11212, 24312, 340, 39162, 1669, 1855, 2271, 1906, 24312, 11, 259, 340, 743, 1848, 1669, 4771, 18872, 3609, 307, 8240...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestFunc(t *testing.T) { gopClTest(t, `func foo(format string, a [10]int, args ...interface{}) { } func main() { }`, `package main func foo(format string, a [10]int, args ...interface { }) { } func main() { } `) }
explode_data.jsonl/73682
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 89 }
[ 2830, 3393, 9626, 1155, 353, 8840, 836, 8, 341, 3174, 453, 5066, 2271, 1155, 11, 1565, 2830, 15229, 20698, 914, 11, 264, 508, 16, 15, 63025, 11, 2827, 2503, 4970, 28875, 341, 630, 2830, 1887, 368, 341, 28350, 1565, 1722, 1887, 271, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNewAccountImportCmd(t *testing.T) { require := require.New(t) ctrl := gomock.NewController(t) defer ctrl.Finish() client := mock_ioctlclient.NewMockClient(ctrl) client.EXPECT().SelectTranslation(gomock.Any()).Return("", config.English).AnyTimes() client.EXPECT().Config().Return(config.Config{}).AnyTimes() cmd := NewAccountImportCmd(client) result, err := util.ExecuteCmd(cmd, "hh") require.NotNil(result) require.Error(err) }
explode_data.jsonl/13446
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 157 }
[ 2830, 3393, 3564, 7365, 11511, 15613, 1155, 353, 8840, 836, 8, 341, 17957, 1669, 1373, 7121, 1155, 340, 84381, 1669, 342, 316, 1176, 7121, 2051, 1155, 340, 16867, 23743, 991, 18176, 741, 25291, 1669, 7860, 59683, 2972, 7121, 11571, 2959, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestClusterAdminDeleteAcl(t *testing.T) { seedBroker := NewMockBroker(t, 1) defer seedBroker.Close() seedBroker.SetHandlerByMap(map[string]MockResponse{ "MetadataRequest": NewMockMetadataResponse(t). SetController(seedBroker.BrokerID()). SetBroker(seedBroker.Addr(), seedBroker.BrokerID()), "DeleteAclsRequest": NewMockDeleteAclsResponse(t), }) config := NewTestConfig() config.Version = V1_0_0_0 admin, err := NewClusterAdmin([]string{seedBroker.Addr()}, config) if err != nil { t.Fatal(err) } resourceName := "my_topic" filter := AclFilter{ ResourceType: AclResourceTopic, Operation: AclOperationAlter, ResourceName: &resourceName, } _, err = admin.DeleteACL(filter, false) if err != nil { t.Fatal(err) } err = admin.Close() if err != nil { t.Fatal(err) } }
explode_data.jsonl/40801
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 330 }
[ 2830, 3393, 28678, 7210, 6435, 32, 564, 1155, 353, 8840, 836, 8, 341, 197, 22602, 65545, 1669, 1532, 11571, 65545, 1155, 11, 220, 16, 340, 16867, 10320, 65545, 10421, 2822, 197, 22602, 65545, 4202, 3050, 1359, 2227, 9147, 14032, 60, 115...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestEvalType(t *testing.T) { for i, tt := range []struct { name string in string typ influxql.DataType data EvalFixture }{ { name: `a single data type`, in: `min(value)`, typ: influxql.Integer, data: EvalFixture{ "cpu": map[string]influxql.DataType{ "value": influxql.Integer, }, }, }, { name: `multiple data types`, in: `min(value)`, typ: influxql.Integer, data: EvalFixture{ "cpu": map[string]influxql.DataType{ "value": influxql.Integer, }, "mem": map[string]influxql.DataType{ "value": influxql.String, }, }, }, { name: `count() with a float`, in: `count(value)`, typ: influxql.Integer, data: EvalFixture{ "cpu": map[string]influxql.DataType{ "value": influxql.Float, }, }, }, { name: `mean() with an integer`, in: `mean(value)`, typ: influxql.Float, data: EvalFixture{ "cpu": map[string]influxql.DataType{ "value": influxql.Integer, }, }, }, } { sources := make([]influxql.Source, 0, len(tt.data)) for src := range tt.data { sources = append(sources, &influxql.Measurement{Name: src}) } expr := influxql.MustParseExpr(tt.in) typ := influxql.EvalType(expr, sources, tt.data) if typ != tt.typ { t.Errorf("%d. %s: unexpected type:\n\nexp=%#v\n\ngot=%#v\n\n", i, tt.name, tt.typ, typ) } } }
explode_data.jsonl/28579
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 677 }
[ 2830, 3393, 54469, 929, 1155, 353, 8840, 836, 8, 341, 2023, 600, 11, 17853, 1669, 2088, 3056, 1235, 341, 197, 11609, 914, 198, 197, 17430, 256, 914, 198, 197, 25314, 220, 52852, 1470, 77277, 198, 197, 8924, 58239, 18930, 198, 197, 594...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestKeyUsagesFromStrings(t *testing.T) { testcases := []struct { usages []capi.KeyUsage expectedKeyUsage x509.KeyUsage expectedExtKeyUsage []x509.ExtKeyUsage expectErr bool }{ { usages: []capi.KeyUsage{"signing"}, expectedKeyUsage: x509.KeyUsageDigitalSignature, expectedExtKeyUsage: nil, expectErr: false, }, { usages: []capi.KeyUsage{"client auth"}, expectedKeyUsage: 0, expectedExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageClientAuth}, expectErr: false, }, { usages: []capi.KeyUsage{"client auth", "client auth"}, expectedKeyUsage: 0, expectedExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageClientAuth}, expectErr: false, }, { usages: []capi.KeyUsage{"cert sign", "encipher only"}, expectedKeyUsage: x509.KeyUsageCertSign | x509.KeyUsageEncipherOnly, expectedExtKeyUsage: nil, expectErr: false, }, { usages: []capi.KeyUsage{"ocsp signing", "crl sign", "s/mime", "content commitment"}, expectedKeyUsage: x509.KeyUsageCRLSign | x509.KeyUsageContentCommitment, expectedExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageEmailProtection, x509.ExtKeyUsageOCSPSigning}, expectErr: false, }, { usages: []capi.KeyUsage{"unsupported string"}, expectedKeyUsage: 0, expectedExtKeyUsage: nil, expectErr: true, }, } for _, tc := range testcases { t.Run(fmt.Sprint(tc.usages), func(t *testing.T) { ku, eku, err := keyUsagesFromStrings(tc.usages) if tc.expectErr { if err == nil { t.Errorf("did not return an error, but expected one") } return } if err != nil { t.Errorf("unexpected error: %v", err) } if ku != tc.expectedKeyUsage || !reflect.DeepEqual(eku, tc.expectedExtKeyUsage) { t.Errorf("got=(%v, %v), want=(%v, %v)", ku, eku, tc.expectedKeyUsage, tc.expectedExtKeyUsage) } }) } }
explode_data.jsonl/674
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 965 }
[ 2830, 3393, 1592, 3558, 1134, 3830, 20859, 1155, 353, 8840, 836, 8, 341, 18185, 23910, 1669, 3056, 1235, 341, 197, 70175, 1134, 1060, 3056, 66, 2068, 9610, 14783, 198, 197, 42400, 1592, 14783, 262, 856, 20, 15, 24, 9610, 14783, 198, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestCurl(t *testing.T) { tests := []struct { name string src []trinary.Trits hashLen int }{ {"trits and hash", Trits(bct.MaxBatchSize, consts.HashTrinarySize), consts.HashTrinarySize}, {"multi trits and hash", Trits(bct.MaxBatchSize, consts.TransactionTrinarySize), consts.HashTrinarySize}, {"trits and multi squeeze", Trits(bct.MaxBatchSize, consts.HashTrinarySize), 3 * consts.HashTrinarySize}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { c := NewCurlP81() require.NoError(t, c.Absorb(tt.src, len(tt.src[0]))) dst := make([]trinary.Trits, len(tt.src)) require.NoError(t, c.Squeeze(dst, tt.hashLen)) for i := range dst { // compare against the non batched Curl implementation from iota.go require.Equal(t, CurlSum(tt.src[i], tt.hashLen), dst[i]) } }) } }
explode_data.jsonl/45215
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 368 }
[ 2830, 3393, 34, 1085, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 262, 914, 198, 197, 41144, 257, 3056, 376, 3287, 836, 1003, 82, 198, 197, 50333, 11271, 526, 198, 197, 59403, 197, 197, 4913, 376, 1199, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2