text
stringlengths
93
16.4k
id
stringlengths
20
40
metadata
dict
input_ids
listlengths
45
2.05k
attention_mask
listlengths
45
2.05k
complexity
int64
1
9
func TestCommandWithBotName(t *testing.T) { message := tgbotapi.Message{Text: "/command@testbot"} message.Entities = &[]tgbotapi.MessageEntity{{Type: "bot_command", Offset: 0, Length: 16}} if message.Command() != "command" { t.Fail() } }
explode_data.jsonl/25801
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 89 }
[ 2830, 3393, 4062, 2354, 23502, 675, 1155, 353, 8840, 836, 8, 341, 24753, 1669, 53188, 6331, 2068, 8472, 90, 1178, 25, 3521, 5631, 47327, 6331, 16707, 24753, 23793, 284, 609, 1294, 41428, 6331, 2068, 8472, 3030, 2979, 929, 25, 330, 6331,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIsSignatureContractBadWrongInstr(t *testing.T) { prog := make([]byte, 30) prog[0] = byte(PUSHBYTES33) assert.Equal(t, false, IsSignatureContract(prog)) }
explode_data.jsonl/40585
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 69 }
[ 2830, 3393, 3872, 25088, 14067, 17082, 29185, 74375, 1155, 353, 8840, 836, 8, 341, 197, 32992, 1669, 1281, 10556, 3782, 11, 220, 18, 15, 340, 197, 32992, 58, 15, 60, 284, 4922, 5304, 19518, 97849, 18, 18, 340, 6948, 12808, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestReceiversBuilder_Build(t *testing.T) { tests := []testCase{ { name: "one-exporter", receiverName: "examplereceiver", exporterNames: []string{"exampleexporter"}, hasTraces: true, hasMetrics: true, }, { name: "multi-exporter", receiverName: "examplereceiver/2", exporterNames: []string{"exampleexporter", "exampleexporter/2"}, hasTraces: true, }, { name: "multi-metrics-receiver", receiverName: "examplereceiver/3", exporterNames: []string{"exampleexporter", "exampleexporter/2"}, hasTraces: false, hasMetrics: true, }, { name: "multi-receiver-multi-exporter", receiverName: "examplereceiver/multi", exporterNames: []string{"exampleexporter", "exampleexporter/2"}, // Check pipelines_builder.yaml to understand this case. // We have 2 pipelines, one exporting to one exporter, the other // exporting to both exporters, so we expect a duplication on // one of the exporters, but not on the other. spanDuplicationByExporter: map[string]int{ "exampleexporter": 2, "exampleexporter/2": 1, }, hasTraces: true, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { testReceivers(t, test) }) } }
explode_data.jsonl/50902
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 538 }
[ 2830, 3393, 693, 346, 1945, 3297, 96686, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 66194, 515, 197, 197, 515, 298, 11609, 25, 688, 330, 603, 65827, 261, 756, 298, 17200, 12862, 675, 25, 220, 330, 41616, 500, 485, 12862, 756, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCleanerCond(t *testing.T) { d, err := Open("", testingRandomized(&Options{ FS: vfs.NewMem(), })) require.NoError(t, err) for i := 0; i < 10; i++ { d.mu.Lock() require.True(t, d.acquireCleaningTurn(true)) d.mu.Unlock() var wg sync.WaitGroup wg.Add(2) go func() { defer wg.Done() d.mu.Lock() if d.acquireCleaningTurn(true) { d.releaseCleaningTurn() } d.mu.Unlock() }() runtime.Gosched() go func() { defer wg.Done() d.mu.Lock() d.disableFileDeletions() d.enableFileDeletions() d.mu.Unlock() }() runtime.Gosched() d.mu.Lock() d.releaseCleaningTurn() d.mu.Unlock() wg.Wait() } require.NoError(t, d.Close()) }
explode_data.jsonl/51420
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 344 }
[ 2830, 3393, 27529, 261, 49696, 1155, 353, 8840, 836, 8, 341, 2698, 11, 1848, 1669, 5264, 19814, 7497, 13999, 1506, 2099, 3798, 515, 197, 197, 8485, 25, 92941, 7121, 18816, 3148, 197, 44194, 17957, 35699, 1155, 11, 1848, 692, 2023, 600, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSimple(t *testing.T) { RegisterTestingT(t) listener, err := net.Listen("tcp", "localhost:0") defer listener.Close() Expect(err).To(BeNil()) grpcServer := grpc.NewServer() monitor := crossconnect_monitor.NewCrossConnectMonitor() crossconnect.RegisterMonitorCrossConnectServer(grpcServer, monitor) go func() { grpcServer.Serve(listener) }() monitor.Update(&crossconnect.CrossConnect{Id: "1"}) startClient(listenerAddress(listener)) }
explode_data.jsonl/5042
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 157 }
[ 2830, 3393, 16374, 1155, 353, 8840, 836, 8, 341, 79096, 16451, 51, 1155, 692, 14440, 798, 11, 1848, 1669, 4179, 68334, 445, 27161, 497, 330, 8301, 25, 15, 1138, 16867, 11446, 10421, 741, 35911, 3964, 568, 1249, 76860, 19064, 12367, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDefault(t *testing.T) { cfg, output, err := settings.FromFlags("lambda-router", []string{}) if err != nil { t.Fatalf("Unexpected error: %v", err) } assert.Empty(t, output) expected := settings.DefaultConfig() assert.Equal(t, cfg, expected) }
explode_data.jsonl/14448
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 95 }
[ 2830, 3393, 3675, 1155, 353, 8840, 836, 8, 341, 50286, 11, 2550, 11, 1848, 1669, 5003, 11439, 9195, 445, 12935, 14266, 497, 3056, 917, 37790, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 29430, 1465, 25, 1018, 85, 497, 1848, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestRedis_BitOpXor(t *testing.T) { runOnRedis(t, func(client *Redis) { err := client.Set("key1", "\xff") assert.Nil(t, err) err = client.Set("key2", "\x0f") assert.Nil(t, err) _, err = NewRedis(client.Addr, "").BitOpXor("destKey", "key1", "key2") assert.NotNil(t, err) val, err := client.BitOpXor("destKey", "key1", "key2") assert.Nil(t, err) assert.Equal(t, int64(1), val) valStr, err := client.Get("destKey") assert.Nil(t, err) assert.Equal(t, "\xf0", valStr) }) }
explode_data.jsonl/39177
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 240 }
[ 2830, 3393, 48137, 1668, 275, 7125, 55, 269, 1155, 353, 8840, 836, 8, 341, 56742, 1925, 48137, 1155, 11, 2915, 12805, 353, 48137, 8, 341, 197, 9859, 1669, 2943, 4202, 445, 792, 16, 497, 2917, 9020, 1138, 197, 6948, 59678, 1155, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestKubeadmConfigReconciler_Reconcile_RequeueIfControlPlaneIsMissingAPIEndpoints(t *testing.T) { g := NewWithT(t) cluster := newCluster("cluster") cluster.Status.InfrastructureReady = true cluster.Status.ControlPlaneInitialized = true controlPlaneInitMachine := newControlPlaneMachine(cluster, "control-plane-init-machine") controlPlaneInitConfig := newControlPlaneInitKubeadmConfig(controlPlaneInitMachine, "control-plane-init-cfg") workerMachine := newWorkerMachine(cluster) workerJoinConfig := newWorkerJoinKubeadmConfig(workerMachine) objects := []client.Object{ cluster, workerMachine, workerJoinConfig, } objects = append(objects, createSecrets(t, cluster, controlPlaneInitConfig)...) myclient := helpers.NewFakeClientWithScheme(setupScheme(), objects...) k := &KubeadmConfigReconciler{ Client: myclient, KubeadmInitLock: &myInitLocker{}, } request := ctrl.Request{ NamespacedName: client.ObjectKey{ Namespace: "default", Name: "worker-join-cfg", }, } result, err := k.Reconcile(ctx, request) g.Expect(err).NotTo(HaveOccurred()) g.Expect(result.Requeue).To(BeFalse()) g.Expect(result.RequeueAfter).To(Equal(10 * time.Second)) }
explode_data.jsonl/44320
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 434 }
[ 2830, 3393, 42, 392, 3149, 76, 2648, 693, 40446, 5769, 50693, 40446, 457, 50693, 4584, 2679, 3273, 34570, 3872, 25080, 7082, 80786, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1532, 2354, 51, 1155, 692, 197, 18855, 1669, 501, 28678, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTestGetImmutableIterator(t *testing.T) { db := dbm.NewMemDB() tree, cID := newAlohaTree(t, db) store := UnsafeNewStore(tree) newStore, err := store.GetImmutable(cID.Version) require.NoError(t, err) iter := newStore.Iterator([]byte("aloha"), []byte("hellz")) expected := []string{"aloha", "hello"} var i int for i = 0; iter.Valid(); iter.Next() { expectedKey := expected[i] key, value := iter.Key(), iter.Value() require.EqualValues(t, key, expectedKey) require.EqualValues(t, value, treeData[expectedKey]) i++ } require.Equal(t, len(expected), i) }
explode_data.jsonl/38059
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 222 }
[ 2830, 3393, 2271, 1949, 58890, 11951, 1155, 353, 8840, 836, 8, 341, 20939, 1669, 2927, 76, 7121, 18816, 3506, 741, 51968, 11, 272, 915, 1669, 501, 32, 385, 4223, 6533, 1155, 11, 2927, 340, 57279, 1669, 73067, 3564, 6093, 21298, 692, 8...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSchemaParser_SimpleNonNullType(t *testing.T) { body := ` type Hello { world: String! }` astDoc := parse(t, body) expected := ast.NewDocument(&ast.Document{ Loc: testLoc(1, 32), Definitions: []ast.Node{ ast.NewObjectDefinition(&ast.ObjectDefinition{ Loc: testLoc(1, 32), Name: ast.NewName(&ast.Name{ Value: "Hello", Loc: testLoc(6, 11), }), Directives: []*ast.Directive{}, Interfaces: []*ast.Named{}, Fields: []*ast.FieldDefinition{ ast.NewFieldDefinition(&ast.FieldDefinition{ Loc: testLoc(16, 30), Name: ast.NewName(&ast.Name{ Value: "world", Loc: testLoc(16, 21), }), Directives: []*ast.Directive{}, Arguments: []*ast.InputValueDefinition{}, Type: ast.NewNonNull(&ast.NonNull{ Kind: "NonNullType", Loc: testLoc(23, 30), Type: ast.NewNamed(&ast.Named{ Loc: testLoc(23, 29), Name: ast.NewName(&ast.Name{ Value: "String", Loc: testLoc(23, 29), }), }), }), }), }, }), }, }) if !reflect.DeepEqual(astDoc, expected) { t.Fatalf("unexpected document, expected: %v, got: %v", expected, astDoc) } }
explode_data.jsonl/73868
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 593 }
[ 2830, 3393, 8632, 6570, 1098, 6456, 16834, 929, 1155, 353, 8840, 836, 8, 1476, 35402, 1669, 22074, 1313, 21927, 341, 220, 1879, 25, 923, 4894, 31257, 88836, 9550, 1669, 4715, 1155, 11, 2487, 340, 42400, 1669, 11763, 7121, 7524, 2099, 55...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSpinLock(t *testing.T) { sp := NewSpinLock() tx1 := &pb.Transaction{ Txid: []byte("tx1"), TxInputs: []*pb.TxInput{ &pb.TxInput{ RefTxid: []byte("tx0"), }, &pb.TxInput{ RefTxid: []byte("tx3"), RefOffset: 1, }, }, TxOutputs: []*pb.TxOutput{ &pb.TxOutput{}, }, TxInputsExt: []*pb.TxInputExt{ &pb.TxInputExt{ Bucket: "bk2", Key: []byte("key2"), }, }, TxOutputsExt: []*pb.TxOutputExt{ &pb.TxOutputExt{ Bucket: "bk1", Key: []byte("key1"), }, }, } tx2 := &pb.Transaction{ TxInputsExt: []*pb.TxInputExt{ &pb.TxInputExt{ Bucket: "bk2", Key: []byte("key2"), }, }, TxInputs: []*pb.TxInput{ &pb.TxInput{ RefTxid: []byte("tx3"), }, }, } lockKeys1 := sp.ExtractLockKeys(tx1) lockKeys2 := sp.ExtractLockKeys(tx2) t.Log(lockKeys1) t.Log(lockKeys2) if fmt.Sprintf("%v", lockKeys1) != "[bk1/key1:X bk2/key2:S tx0_0:X tx1_0:X tx3_1:X]" { t.Fatal("tx1 lock error") } if fmt.Sprintf("%v", lockKeys2) != "[bk2/key2:S tx3_0:X]" { t.Fatal("tx2 lock error") } go func() { succLks, ok := sp.TryLock(lockKeys2) t.Log("tx2 got lock", succLks, ok) sp.Unlock(succLks) }() sp.TryLock(lockKeys1) if !sp.IsLocked("tx1_0") { t.Fatal("tx1_0 is expected to be locked") } time.Sleep(1 * time.Second) sp.Unlock(lockKeys1) t.Log("tx1 unlock") }
explode_data.jsonl/54170
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 735 }
[ 2830, 3393, 41738, 11989, 1155, 353, 8840, 836, 8, 341, 41378, 1669, 1532, 41738, 11989, 741, 46237, 16, 1669, 609, 16650, 29284, 515, 197, 10261, 87, 307, 25, 3056, 3782, 445, 3998, 16, 4461, 197, 10261, 87, 31946, 25, 29838, 16650, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGNMIClient(t *testing.T) { var displayOut string display := func(b []byte) { displayOut += string(b) + "\n" } tests := []struct { desc string updates []*fpb.Value query client.Query cfg Config want string sort bool }{{ desc: "single target single output with provided layout", updates: []*fpb.Value{{ Path: []string{"a"}, Timestamp: &fpb.Timestamp{Timestamp: 100}, Repeat: 1, Value: &fpb.Value_IntValue{IntValue: &fpb.IntValue{Value: 5}}, }, { Path: []string{"a", "b"}, Timestamp: &fpb.Timestamp{Timestamp: 100}, Repeat: 1, Value: &fpb.Value_IntValue{IntValue: &fpb.IntValue{Value: 5}}, }, { Path: []string{"a", "b"}, Timestamp: &fpb.Timestamp{Timestamp: 200}, Repeat: 1, Value: &fpb.Value_Delete{Delete: &fpb.DeleteValue{}}, }, { Timestamp: &fpb.Timestamp{Timestamp: 300}, Repeat: 1, Value: &fpb.Value_Sync{Sync: 1}, }}, query: client.Query{ Target: "dev", Queries: []client.Path{{"a"}}, Type: client.Once, Timeout: 3 * time.Second, TLS: &tls.Config{InsecureSkipVerify: true}, }, cfg: Config{ Delimiter: "/", Display: display, DisplayPrefix: "", DisplayIndent: " ", DisplayType: "single", Timestamp: "2006-01-02-15:04:05", Location: time.UTC, // make tests deterministic across different local time zones }, want: `dev/a, 5, 1970-01-01-00:00:00 dev/a/b, 5, 1970-01-01-00:00:00 dev/a/b, <nil>, 1970-01-01-00:00:00 `, }, { desc: "single target group output with provided layout", updates: []*fpb.Value{{ Path: []string{"a"}, Timestamp: &fpb.Timestamp{Timestamp: 100}, Repeat: 1, Value: &fpb.Value_IntValue{IntValue: &fpb.IntValue{Value: 5}}, }, { Path: []string{"a", "b"}, Timestamp: &fpb.Timestamp{Timestamp: 100}, Repeat: 1, Value: &fpb.Value_IntValue{IntValue: &fpb.IntValue{Value: 5}}, }, { Path: []string{"a", "b"}, Timestamp: &fpb.Timestamp{Timestamp: 200}, Repeat: 1, Value: &fpb.Value_Delete{Delete: &fpb.DeleteValue{}}, }, { Timestamp: &fpb.Timestamp{Timestamp: 300}, Repeat: 1, Value: &fpb.Value_Sync{Sync: 1}, }}, query: client.Query{ Target: "dev", Queries: []client.Path{{"a"}}, Type: client.Once, Timeout: 3 * time.Second, TLS: &tls.Config{InsecureSkipVerify: true}, }, cfg: Config{ Delimiter: "/", Display: display, DisplayPrefix: "", DisplayIndent: " ", DisplayType: "group", Timestamp: "2006-01-02-15:04:05", Location: time.UTC, // make tests deterministic across different local time zones }, want: `{ "dev": { "a": { "timestamp": "1970-01-01-00:00:00", "value": 5 } } } `, }, { desc: "single target multiple paths (proto short)", updates: []*fpb.Value{{ Path: []string{"a"}, Timestamp: &fpb.Timestamp{Timestamp: 100}, Repeat: 1, Value: &fpb.Value_IntValue{IntValue: &fpb.IntValue{Value: 5}}, }, { Path: []string{"a", "b"}, Timestamp: &fpb.Timestamp{Timestamp: 100}, Repeat: 1, Value: &fpb.Value_IntValue{IntValue: &fpb.IntValue{Value: 5}}, }, { Path: []string{"a", "b"}, Timestamp: &fpb.Timestamp{Timestamp: 200}, Repeat: 1, Value: &fpb.Value_Delete{Delete: &fpb.DeleteValue{}}, }, { Timestamp: &fpb.Timestamp{Timestamp: 300}, Repeat: 1, Value: &fpb.Value_Sync{Sync: 1}, }}, query: client.Query{ Target: "dev1", Queries: []client.Path{{"a"}}, Type: client.Once, Timeout: 3 * time.Second, TLS: &tls.Config{InsecureSkipVerify: true}, }, cfg: Config{ Display: display, DisplayType: "shortproto", }, want: `update:<timestamp:100 prefix:<target:"dev1" > update:<path:<element:"a" > val:<int_val:5 > > > update:<timestamp:100 prefix:<target:"dev1" > update:<path:<element:"a" element:"b" > val:<int_val:5 > > > update:<timestamp:200 prefix:<target:"dev1" > delete:<element:"a" element:"b" > > sync_response:true `, }} opt, err := config.WithSelfTLSCert() if err != nil { t.Fatalf("failed to generate cert: %v", err) } for _, tt := range tests { t.Run(tt.desc, func(t *testing.T) { displayOut = "" s, err := gnmi.New( &fpb.Config{ Target: "dev1", DisableSync: true, Values: tt.updates, }, []grpc.ServerOption{opt}, ) if err != nil { t.Fatal("failed to start test server") } defer s.Close() tt.query.Addrs = []string{s.Address()} if err := QueryDisplay(context.Background(), tt.query, &tt.cfg); err != nil { // This is fine if we use cfg.StreamingDuration. t.Logf("sendQueryAndDisplay returned error: %v", err) } // The test server comes up on an arbitrary port. Remove the port number // from the output before comparison. re := regexp.MustCompile("127.0.0.1:[0-9]*") got := re.ReplaceAllLiteralString(displayOut, "127.0.0.1:port") reLat := regexp.MustCompile(`\d+h\d+m\d+.\d+s`) got = reLat.ReplaceAllLiteralString(got, "<h>h<m>m<s>.<ns>s") if got != tt.want { t.Errorf("sendQueryAndDisplay(ctx, address, %v, %v):\ngot(%d):\n%s\nwant(%d):\n%s", tt.query, tt.cfg, len(got), got, len(tt.want), tt.want) } }) } }
explode_data.jsonl/3104
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2512 }
[ 2830, 3393, 25167, 54961, 1451, 1155, 353, 8840, 836, 8, 341, 2405, 3037, 2662, 914, 198, 31271, 1669, 2915, 1883, 3056, 3782, 8, 341, 197, 31271, 2662, 1421, 914, 1883, 8, 488, 2917, 77, 698, 197, 532, 78216, 1669, 3056, 1235, 341, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestContext2Plan_dataResourceBecomesComputed(t *testing.T) { m := testModule(t, "plan-data-resource-becomes-computed") p := testProvider("aws") p.GetSchemaReturn = &ProviderSchema{ ResourceTypes: map[string]*configschema.Block{ "aws_instance": { Attributes: map[string]*configschema.Attribute{ "foo": {Type: cty.String, Optional: true}, "computed": {Type: cty.String, Computed: true}, }, }, }, DataSources: map[string]*configschema.Block{ "aws_data_source": { Attributes: map[string]*configschema.Attribute{ "foo": {Type: cty.String, Optional: true}, }, }, }, } p.PlanResourceChangeFn = func(req providers.PlanResourceChangeRequest) providers.PlanResourceChangeResponse { fooVal := req.ProposedNewState.GetAttr("foo") return providers.PlanResourceChangeResponse{ PlannedState: cty.ObjectVal(map[string]cty.Value{ "foo": fooVal, "computed": cty.UnknownVal(cty.String), }), PlannedPrivate: req.PriorPrivate, } } schema := p.GetSchemaReturn.DataSources["aws_data_source"] ty := schema.ImpliedType() p.ReadDataSourceResponse = providers.ReadDataSourceResponse{ // This should not be called, because the configuration for the // data resource contains an unknown value for "foo". Diagnostics: tfdiags.Diagnostics(nil).Append(fmt.Errorf("ReadDataSource called, but should not have been")), } ctx := testContext2(t, &ContextOpts{ Config: m, ProviderResolver: providers.ResolverFixed( map[string]providers.Factory{ "aws": testProviderFuncFixed(p), }, ), State: MustShimLegacyState(&State{ Modules: []*ModuleState{ &ModuleState{ Path: rootModulePath, Resources: map[string]*ResourceState{ "data.aws_data_source.foo": &ResourceState{ Type: "aws_data_source", Primary: &InstanceState{ ID: "i-abc123", Attributes: map[string]string{ "id": "i-abc123", "foo": "baz", }, }, }, }, }, }, }), }) _, diags := ctx.Refresh() if diags.HasErrors() { t.Fatalf("unexpected errors during refresh: %s", diags.Err()) } plan, diags := ctx.Plan() if diags.HasErrors() { t.Fatalf("unexpected errors during plan: %s", diags.Err()) } rcs := plan.Changes.ResourceInstance(addrs.Resource{ Mode: addrs.DataResourceMode, Type: "aws_data_source", Name: "foo", }.Instance(addrs.NoKey).Absolute(addrs.RootModuleInstance)) if rcs == nil { t.Logf("full changeset: %s", spew.Sdump(plan.Changes)) t.Fatalf("missing diff for data.aws_data_resource.foo") } rc, err := rcs.Decode(ty) if err != nil { t.Fatal(err) } // foo should now be unknown foo := rc.After.GetAttr("foo") if foo.IsKnown() { t.Fatalf("foo should be unknown, got %#v", foo) } }
explode_data.jsonl/28672
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1147 }
[ 2830, 3393, 1972, 17, 20485, 1769, 4783, 33, 757, 20347, 56474, 1155, 353, 8840, 836, 8, 341, 2109, 1669, 1273, 3332, 1155, 11, 330, 10393, 13945, 74790, 1455, 757, 20347, 11476, 19292, 1138, 3223, 1669, 1273, 5179, 445, 8635, 5130, 322...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStoreCreateIfNotExists(t *testing.T) { testWithPostgresStoreV2(t, func(s storev2.Interface) { fixture := corev3.FixtureEntityState("foo") ctx := context.Background() req := storev2.NewResourceRequestFromResource(ctx, fixture) req.UsePostgres = true wrapper := WrapEntityState(fixture) // CreateIfNotExists should succeed if err := s.CreateIfNotExists(req, wrapper); err != nil { t.Fatal(err) } // CreateIfNotExists should fail if err := s.CreateIfNotExists(req, wrapper); err == nil { t.Error("expected non-nil error") } else if _, ok := err.(*store.ErrAlreadyExists); !ok { t.Errorf("wrong error: %s", err) } // UpdateIfExists should succeed if err := s.UpdateIfExists(req, wrapper); err != nil { t.Error(err) } }) }
explode_data.jsonl/73381
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 295 }
[ 2830, 3393, 6093, 4021, 2679, 2623, 15575, 1155, 353, 8840, 836, 8, 341, 18185, 2354, 4133, 17818, 6093, 53, 17, 1155, 11, 2915, 1141, 3553, 85, 17, 41065, 8, 341, 197, 1166, 12735, 1669, 6200, 85, 18, 991, 12735, 3030, 1397, 445, 7...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestPointInTimeRecovery(t *testing.T) { defer leaktest.AfterTest(t)() const numAccounts = 1000 _, _, sqlDB, _, cleanupFn := BackupRestoreTestSetup(t, singleNode, numAccounts, InitNone) defer cleanupFn() fullBackupDir := LocalFoo + "/full" sqlDB.Exec(t, `BACKUP data.* TO $1`, fullBackupDir) sqlDB.Exec(t, `UPDATE data.bank SET balance = 2`) incBackupDir := LocalFoo + "/inc" sqlDB.Exec(t, `BACKUP data.* TO $1 INCREMENTAL FROM $2`, incBackupDir, fullBackupDir) var beforeBadThingTs string sqlDB.Exec(t, `UPDATE data.bank SET balance = 3`) sqlDB.QueryRow(t, `SELECT cluster_logical_timestamp()`).Scan(&beforeBadThingTs) // Something bad happens. sqlDB.Exec(t, `UPDATE data.bank SET balance = 4`) beforeBadThingData := sqlDB.QueryStr(t, fmt.Sprintf(`SELECT * FROM data.bank AS OF SYSTEM TIME '%s' ORDER BY id`, beforeBadThingTs), ) // If no previous BACKUPs have been taken, a new one can be taken using `AS // OF SYSTEM TIME` with a timestamp before the badness started. This can // then be RESTORE'd into a temporary database. The operator can manually // reconcile the current data with the restored data before finally // RENAME-ing the table into the final location. t.Run("recovery=new-backup", func(t *testing.T) { sqlDB = sqlutils.MakeSQLRunner(sqlDB.DB) recoveryDir := LocalFoo + "/new-backup" sqlDB.Exec(t, fmt.Sprintf(`BACKUP data.* TO $1 AS OF SYSTEM TIME '%s'`, beforeBadThingTs), recoveryDir, ) sqlDB.Exec(t, `CREATE DATABASE newbackup`) sqlDB.Exec(t, `RESTORE data.* FROM $1 WITH into_db=newbackup`, recoveryDir) // Some manual reconciliation of the data in data.bank and // newbackup.bank could be done here by the operator. sqlDB.Exec(t, `DROP TABLE data.bank`) sqlDB.Exec(t, `ALTER TABLE newbackup.bank RENAME TO data.bank`) sqlDB.Exec(t, `DROP DATABASE newbackup`) sqlDB.CheckQueryResults(t, `SELECT * FROM data.bank ORDER BY id`, beforeBadThingData) }) // If there is a recent BACKUP (either full or incremental), then it will // likely be faster to make a BACKUP that is incremental from it and RESTORE // using that. Everything else works the same as above. t.Run("recovery=inc-backup", func(t *testing.T) { sqlDB = sqlutils.MakeSQLRunner(sqlDB.DB) recoveryDir := LocalFoo + "/inc-backup" sqlDB.Exec(t, fmt.Sprintf(`BACKUP data.* TO $1 AS OF SYSTEM TIME '%s' INCREMENTAL FROM $2, $3`, beforeBadThingTs), recoveryDir, fullBackupDir, incBackupDir, ) sqlDB.Exec(t, `CREATE DATABASE incbackup`) sqlDB.Exec(t, `RESTORE data.* FROM $1, $2, $3 WITH into_db=incbackup`, fullBackupDir, incBackupDir, recoveryDir, ) // Some manual reconciliation of the data in data.bank and // incbackup.bank could be done here by the operator. sqlDB.Exec(t, `DROP TABLE data.bank`) sqlDB.Exec(t, `ALTER TABLE incbackup.bank RENAME TO data.bank`) sqlDB.Exec(t, `DROP DATABASE incbackup`) sqlDB.CheckQueryResults(t, `SELECT * FROM data.bank ORDER BY id`, beforeBadThingData) }) }
explode_data.jsonl/57607
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1113 }
[ 2830, 3393, 2609, 641, 1462, 693, 7449, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 2822, 4777, 1629, 41369, 284, 220, 16, 15, 15, 15, 198, 197, 6878, 8358, 5704, 3506, 11, 8358, 21290, 24911, 1669, 43438, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRepository_Exists(t *testing.T) { // given sqlxDB, sqlMock := testdb.MockDatabase(t) defer sqlMock.AssertExpectations(t) sqlMock.ExpectQuery(regexp.QuoteMeta("SELECT 1 FROM public.applications WHERE tenant_id = $1 AND id = $2")).WithArgs( givenTenant(), givenID()). WillReturnRows(testdb.RowWhenObjectExist()) ctx := persistence.SaveToContext(context.TODO(), sqlxDB) repo := application.NewRepository(nil) // when ex, err := repo.Exists(ctx, givenTenant(), givenID()) // then require.NoError(t, err) assert.True(t, ex) }
explode_data.jsonl/52671
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 208 }
[ 2830, 3393, 4624, 62, 15575, 1155, 353, 8840, 836, 8, 341, 197, 322, 2661, 198, 30633, 82344, 11, 5704, 11571, 1669, 1273, 1999, 24664, 5988, 1155, 340, 16867, 5704, 11571, 11711, 17536, 804, 1155, 692, 30633, 11571, 81893, 2859, 18390, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDeleteMissing(t *testing.T) { storage := map[string]RESTStorage{} ID := "id" simpleStorage := SimpleRESTStorage{ errors: map[string]error{"delete": apierrs.NewNotFound("simple", ID)}, } storage["simple"] = &simpleStorage handler := Handle(storage, codec, "/prefix/version", selfLinker) server := httptest.NewServer(handler) client := http.Client{} request, err := http.NewRequest("DELETE", server.URL+"/prefix/version/simple/"+ID, nil) response, err := client.Do(request) if err != nil { t.Errorf("unexpected error: %v", err) } if response.StatusCode != http.StatusNotFound { t.Errorf("Unexpected response %#v", response) } }
explode_data.jsonl/71496
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 229 }
[ 2830, 3393, 6435, 25080, 1155, 353, 8840, 836, 8, 341, 197, 16172, 1669, 2415, 14032, 60, 38307, 5793, 16094, 29580, 1669, 330, 307, 698, 1903, 6456, 5793, 1669, 8993, 38307, 5793, 515, 197, 73424, 25, 2415, 14032, 60, 841, 4913, 4542, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func Test_smallestFromLeaf(t *testing.T) { ast := assert.New(t) for _, tc := range tcs { root := kit.Ints2TreeNode(tc.root) ast.Equal(tc.ans, smallestFromLeaf(root), "输入:%v", tc) } }
explode_data.jsonl/38769
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 90 }
[ 2830, 3393, 15874, 19236, 3830, 31461, 1155, 353, 8840, 836, 8, 341, 88836, 1669, 2060, 7121, 1155, 692, 2023, 8358, 17130, 1669, 2088, 259, 4837, 341, 197, 33698, 1669, 16138, 7371, 82, 17, 26597, 44415, 12576, 340, 197, 88836, 12808, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
2
func TestHealthCheckTimeout(t *testing.T) { timeout := 500 * time.Millisecond tablet := topo.NewTablet(0, "cell", "a") tablet.PortMap["vt"] = 1 input := make(chan *querypb.StreamHealthResponse) createFakeConn(tablet, input) t.Logf(`createFakeConn({Host: "a", PortMap: {"vt": 1}}, c)`) l := newListener() hc := NewHealthCheck(1*time.Millisecond, 1*time.Millisecond, timeout).(*HealthCheckImpl) hc.SetListener(l, false) hc.AddTablet(tablet, "") t.Logf(`hc = HealthCheck(); hc.AddTablet({Host: "a", PortMap: {"vt": 1}}, "")`) // Immediately after AddTablet() there will be the first notification. want := &TabletStats{ Key: "a,vt:1", Tablet: tablet, Target: &querypb.Target{}, Up: true, Serving: false, } res := <-l.output if !reflect.DeepEqual(res, want) { t.Errorf(`<-l.output: %+v; want %+v`, res, want) } // one tablet after receiving a StreamHealthResponse shr := &querypb.StreamHealthResponse{ Target: &querypb.Target{Keyspace: "k", Shard: "s", TabletType: topodatapb.TabletType_MASTER}, Serving: true, TabletExternallyReparentedTimestamp: 10, RealtimeStats: &querypb.RealtimeStats{SecondsBehindMaster: 1, CpuUsage: 0.2}, } want = &TabletStats{ Key: "a,vt:1", Tablet: tablet, Target: &querypb.Target{Keyspace: "k", Shard: "s", TabletType: topodatapb.TabletType_MASTER}, Up: true, Serving: true, Stats: &querypb.RealtimeStats{SecondsBehindMaster: 1, CpuUsage: 0.2}, TabletExternallyReparentedTimestamp: 10, } input <- shr t.Logf(`input <- {{Keyspace: "k", Shard: "s", TabletType: MASTER}, Serving: true, TabletExternallyReparentedTimestamp: 10, {SecondsBehindMaster: 1, CpuUsage: 0.2}}`) res = <-l.output if !reflect.DeepEqual(res, want) { t.Errorf(`<-l.output: %+v; want %+v`, res, want) } // wait for timeout period time.Sleep(2 * timeout) t.Logf(`Sleep(2 * timeout)`) res = <-l.output if res.Serving { t.Errorf(`<-l.output: %+v; want not serving`, res) } // send a healthcheck response, it should be serving again input <- shr t.Logf(`input <- {{Keyspace: "k", Shard: "s", TabletType: MASTER}, Serving: true, TabletExternallyReparentedTimestamp: 10, {SecondsBehindMaster: 1, CpuUsage: 0.2}}`) res = <-l.output if !reflect.DeepEqual(res, want) { t.Errorf(`<-l.output: %+v; want %+v`, res, want) } // close healthcheck hc.Close() }
explode_data.jsonl/59733
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 983 }
[ 2830, 3393, 14542, 3973, 7636, 1155, 353, 8840, 836, 8, 341, 78395, 1669, 220, 20, 15, 15, 353, 882, 71482, 198, 26481, 83, 1669, 72519, 7121, 2556, 83, 7, 15, 11, 330, 5873, 497, 330, 64, 1138, 26481, 83, 43013, 2227, 1183, 9708, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestResourceDiffing(t *testing.T) { Given(t). Path(guestbookPath). When(). Create(). Sync(). Then(). Expect(SyncStatusIs(SyncStatusCodeSynced)). And(func(app *Application) { // Patch deployment _, err := KubeClientset.AppsV1().Deployments(DeploymentNamespace()).Patch(context.Background(), "guestbook-ui", types.JSONPatchType, []byte(`[{ "op": "replace", "path": "/spec/template/spec/containers/0/image", "value": "test" }]`), metav1.PatchOptions{}) assert.NoError(t, err) }). When(). Refresh(RefreshTypeNormal). Then(). Expect(SyncStatusIs(SyncStatusCodeOutOfSync)). And(func(app *Application) { diffOutput, err := RunCli("app", "diff", app.Name, "--local", "testdata/guestbook") assert.Error(t, err) assert.Contains(t, diffOutput, fmt.Sprintf("===== apps/Deployment %s/guestbook-ui ======", DeploymentNamespace())) }). Given(). ResourceOverrides(map[string]ResourceOverride{"apps/Deployment": { IgnoreDifferences: OverrideIgnoreDiff{JSONPointers: []string{"/spec/template/spec/containers/0/image"}}, }}). When(). Refresh(RefreshTypeNormal). Then(). Expect(SyncStatusIs(SyncStatusCodeSynced)). And(func(app *Application) { diffOutput, err := RunCli("app", "diff", app.Name, "--local", "testdata/guestbook") assert.NoError(t, err) assert.Empty(t, diffOutput) }) }
explode_data.jsonl/66677
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 526 }
[ 2830, 3393, 4783, 21751, 287, 1155, 353, 8840, 836, 8, 341, 9600, 2071, 1155, 4292, 197, 69640, 3268, 3045, 2190, 1820, 4292, 197, 197, 4498, 25829, 197, 75569, 25829, 197, 7568, 1721, 25829, 197, 197, 12209, 25829, 197, 35911, 3759, 17...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDSNWithCustomTLSQueryEscape(t *testing.T) { const configKey = "&%!:" dsn := "User:password@tcp(localhost:5555)/dbname?tls=" + url.QueryEscape(configKey) name := "foohost" tlsCfg := tls.Config{ServerName: name} RegisterTLSConfig(configKey, &tlsCfg) cfg, err := ParseDSN(dsn) if err != nil { t.Error(err.Error()) } else if cfg.tls.ServerName != name { t.Errorf("did not get the correct TLS ServerName (%s) parsing DSN (%s).", name, dsn) } }
explode_data.jsonl/70845
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 208 }
[ 2830, 3393, 5936, 45, 2354, 10268, 45439, 2859, 48124, 1155, 353, 8840, 836, 8, 972, 4777, 2193, 1592, 284, 13399, 89420, 2974, 319, 2698, 9613, 1669, 330, 1474, 25, 3833, 31, 27161, 7, 8301, 25, 20, 20, 20, 20, 5620, 35265, 30, 344...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestHandlerPanicWithHijack(t *testing.T) { // Only testing HTTP/1, and our http2 server doesn't support hijacking. testHandlerPanic(t, true, h1Mode, "intentional death for testing") }
explode_data.jsonl/22436
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 61 }
[ 2830, 3393, 3050, 47, 31270, 2354, 39, 3172, 473, 1155, 353, 8840, 836, 8, 341, 197, 322, 8278, 7497, 10130, 14, 16, 11, 323, 1039, 1758, 17, 3538, 3171, 944, 1824, 21415, 8985, 624, 18185, 3050, 47, 31270, 1155, 11, 830, 11, 305, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestWithdraw(t *testing.T) { TestSetRealOrderDefaults(t) withdrawCryptoRequest := withdraw.Request{ Crypto: withdraw.CryptoRequest{ Address: core.BitcoinDonationAddress, FeeAmount: 1, }, Amount: -1, Currency: currency.BTC, Description: "WITHDRAW IT ALL", TradePassword: "Password", } _, err := o.WithdrawCryptocurrencyFunds(&withdrawCryptoRequest) testStandardErrorHandling(t, err) }
explode_data.jsonl/30205
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 169 }
[ 2830, 3393, 92261, 1155, 353, 8840, 836, 8, 341, 73866, 1649, 12768, 4431, 16273, 1155, 692, 46948, 7633, 58288, 1900, 1669, 14798, 9659, 515, 197, 6258, 9444, 25, 14798, 727, 9444, 1900, 515, 298, 98090, 25, 256, 6200, 68866, 7160, 800...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCopyNestedWithUnexported(t *testing.T) { type Bar struct { a int B int } type Foo struct { A string B Bar } f1 := &Foo{A: "string", B: Bar{a: 1, B: 2}} var f2 Foo awsutil.Copy(&f2, f1) // Values match if v1, v2 := f2.A, f1.A; v1 != v2 { t.Errorf("expected values to be equivalent but received %v and %v", v1, v2) } if v1, v2 := f2.B, f1.B; v1 == v2 { t.Errorf("expected values to be not equivalent, but received %v", v1) } if v1, v2 := f2.B.a, f1.B.a; v1 == v2 { t.Errorf("expected values to be not equivalent, but received %v", v1) } if v1, v2 := f2.B.B, f2.B.B; v1 != v2 { t.Errorf("expected values to be equivalent but received %v and %v", v1, v2) } }
explode_data.jsonl/35700
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 327 }
[ 2830, 3393, 12106, 71986, 2354, 1806, 1533, 291, 1155, 353, 8840, 836, 8, 341, 13158, 4716, 2036, 341, 197, 11323, 526, 198, 197, 12791, 526, 198, 197, 532, 13158, 33428, 2036, 341, 197, 22985, 914, 198, 197, 12791, 4716, 198, 197, 63...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestCreate(t *testing.T) { storage, server := newStorage(t) defer server.Terminate(t) test := registrytest.New(t, storage.ReplicaSet.Etcd) rs := validNewReplicaSet() rs.ObjectMeta = api.ObjectMeta{} test.TestCreate( // valid rs, // invalid (invalid selector) &extensions.ReplicaSet{ Spec: extensions.ReplicaSetSpec{ Replicas: 2, Selector: &unversioned.LabelSelector{MatchLabels: map[string]string{}}, Template: validReplicaSet.Spec.Template, }, }, ) }
explode_data.jsonl/14199
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 197 }
[ 2830, 3393, 4021, 1155, 353, 8840, 836, 8, 341, 197, 16172, 11, 3538, 1669, 501, 5793, 1155, 340, 16867, 3538, 836, 261, 34016, 1155, 340, 18185, 1669, 19424, 1944, 7121, 1155, 11, 5819, 2817, 79, 15317, 1649, 5142, 83, 4385, 340, 412...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDecodeAssuredCallback(t *testing.T) { decoded := false expected := &Call{ Method: http.MethodPost, StatusCode: http.StatusCreated, Response: []byte(`{"done": true}`), Headers: map[string]string{"Assured-Callback-Target": "http://faketarget.com/", "Assured-Callback-Key": "call-key"}, } testDecode := func(resp http.ResponseWriter, req *http.Request) { c, err := decodeAssuredCallback(ctx, req) require.NoError(t, err) require.Equal(t, expected, c) decoded = true } req, err := http.NewRequest(http.MethodPost, "/callback?assured=max", bytes.NewBuffer([]byte(`{"done": true}`))) require.NoError(t, err) req.Header.Set(AssuredCallbackKey, "call-key") req.Header.Set(AssuredCallbackTarget, "http://faketarget.com/") router := mux.NewRouter() router.HandleFunc("/callback", testDecode).Methods(http.MethodPost) resp := httptest.NewRecorder() router.ServeHTTP(resp, req) require.True(t, decoded, "decode method was not hit") }
explode_data.jsonl/20257
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 367 }
[ 2830, 3393, 32564, 5615, 3073, 7494, 1155, 353, 8840, 836, 8, 341, 197, 62913, 1669, 895, 198, 42400, 1669, 609, 7220, 515, 197, 84589, 25, 257, 1758, 20798, 4133, 345, 197, 197, 15872, 25, 1758, 10538, 11694, 345, 197, 69604, 25, 256...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRepoRootForImportPath(t *testing.T) { testenv.MustHaveExternalNetwork(t) tests := []struct { path string want *repoRoot }{ { "github.com/golang/groupcache", &repoRoot{ vcs: vcsGit, repo: "https://github.com/golang/groupcache", }, }, // IBM DevOps Services tests { "hub.jazz.net/git/user1/pkgname", &repoRoot{ vcs: vcsGit, repo: "https://hub.jazz.net/git/user1/pkgname", }, }, { "hub.jazz.net/git/user1/pkgname/submodule/submodule/submodule", &repoRoot{ vcs: vcsGit, repo: "https://hub.jazz.net/git/user1/pkgname", }, }, { "hub.jazz.net", nil, }, { "hub2.jazz.net", nil, }, { "hub.jazz.net/someotherprefix", nil, }, { "hub.jazz.net/someotherprefix/user1/pkgname", nil, }, // Spaces are not valid in user names or package names { "hub.jazz.net/git/User 1/pkgname", nil, }, { "hub.jazz.net/git/user1/pkg name", nil, }, // Dots are not valid in user names { "hub.jazz.net/git/user.1/pkgname", nil, }, { "hub.jazz.net/git/user/pkg.name", &repoRoot{ vcs: vcsGit, repo: "https://hub.jazz.net/git/user/pkg.name", }, }, // User names cannot have uppercase letters { "hub.jazz.net/git/USER/pkgname", nil, }, // OpenStack tests { "git.openstack.org/openstack/swift", &repoRoot{ vcs: vcsGit, repo: "https://git.openstack.org/openstack/swift", }, }, // Trailing .git is less preferred but included for // compatibility purposes while the same source needs to // be compilable on both old and new go { "git.openstack.org/openstack/swift.git", &repoRoot{ vcs: vcsGit, repo: "https://git.openstack.org/openstack/swift.git", }, }, { "git.openstack.org/openstack/swift/go/hummingbird", &repoRoot{ vcs: vcsGit, repo: "https://git.openstack.org/openstack/swift", }, }, { "git.openstack.org", nil, }, { "git.openstack.org/openstack", nil, }, // Spaces are not valid in package name { "git.apache.org/package name/path/to/lib", nil, }, // Should have ".git" suffix { "git.apache.org/package-name/path/to/lib", nil, }, { "git.apache.org/package-name.git", &repoRoot{ vcs: vcsGit, repo: "https://git.apache.org/package-name.git", }, }, { "git.apache.org/package-name_2.x.git/path/to/lib", &repoRoot{ vcs: vcsGit, repo: "https://git.apache.org/package-name_2.x.git", }, }, } for _, test := range tests { got, err := repoRootForImportPath(test.path, secure) want := test.want if want == nil { if err == nil { t.Errorf("RepoRootForImport(%q): Error expected but not received", test.path) } continue } if err != nil { t.Errorf("RepoRootForImport(%q): %v", test.path, err) continue } if got.vcs.name != want.vcs.name || got.repo != want.repo { t.Errorf("RepoRootForImport(%q) = VCS(%s) Repo(%s), want VCS(%s) Repo(%s)", test.path, got.vcs, got.repo, want.vcs, want.repo) } } }
explode_data.jsonl/57221
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1533 }
[ 2830, 3393, 25243, 8439, 2461, 11511, 1820, 1155, 353, 8840, 836, 8, 341, 18185, 3160, 50463, 12116, 25913, 12320, 1155, 692, 78216, 1669, 3056, 1235, 341, 197, 26781, 914, 198, 197, 50780, 353, 23476, 8439, 198, 197, 59403, 197, 197, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestBadConfigOU(t *testing.T) { // testdata/badconfigou: // the configuration is such that only identities // with OU=COP2 and signed by the root ca should be validated thisMSP := getLocalMSP(t, "testdata/badconfigou") id, err := thisMSP.GetDefaultSigningIdentity() assert.NoError(t, err) // the default signing identity OU is COP but the msp is configured // to validate only identities whose OU is COP2 err = id.Validate() assert.Error(t, err) }
explode_data.jsonl/55271
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 149 }
[ 2830, 3393, 17082, 2648, 11922, 1155, 353, 8840, 836, 8, 341, 197, 322, 1273, 691, 3470, 329, 1676, 283, 510, 197, 322, 279, 6546, 374, 1741, 429, 1172, 39421, 198, 197, 322, 448, 62835, 40917, 3067, 17, 323, 8499, 553, 279, 3704, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_getPIClient(t *testing.T) { type args struct { debug bool host string scheme string } tests := []struct { name string args args }{ { name: "Scheme HTTTP", args: args{debug: true, host: "", scheme: "http"}, }, { name: "Scheme HTTTPS", args: args{debug: true, host: "", scheme: "https"}, }, { name: "Scheme Empty", args: args{debug: true, host: "", scheme: ""}, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { if got := getPIClient(tt.args.debug, tt.args.host, tt.args.scheme); got == nil { t.Errorf("getPIClient() = %v", got) } }) } }
explode_data.jsonl/37961
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 295 }
[ 2830, 3393, 3062, 81067, 1451, 1155, 353, 8840, 836, 8, 341, 13158, 2827, 2036, 341, 197, 39730, 220, 1807, 198, 197, 63104, 256, 914, 198, 197, 1903, 8058, 914, 198, 197, 532, 78216, 1669, 3056, 1235, 341, 197, 11609, 914, 198, 197, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestQueryParams_UnmarshalGQL(t *testing.T) { for name, tc := range map[string]struct { input interface{} err bool errmsg string expected QueryParams }{ //given "correct input": { input: map[string]interface{}{"param1": []interface{}{"val1", "val2"}}, err: false, expected: QueryParams{"param1": []string{"val1", "val2"}}, }, "error: input is nil": { input: nil, err: true, errmsg: "input should not be nil", }, "error: invalid input map type": { input: map[string]interface{}{"header": "invalid type"}, err: true, errmsg: "given value `string` must be a string array", }, "error: invalid input": { input: "invalid params", err: true, errmsg: "unexpected input type: string, should be map[string][]string", }, } { t.Run(name, func(t *testing.T) { //when params := QueryParams{} err := params.UnmarshalGQL(tc.input) //then if tc.err { assert.Error(t, err) assert.EqualError(t, err, tc.errmsg) assert.Empty(t, params) } else { assert.NoError(t, err) assert.Equal(t, tc.expected, params) } }) } }
explode_data.jsonl/70122
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 509 }
[ 2830, 3393, 2859, 4870, 40687, 27121, 38, 3588, 1155, 353, 8840, 836, 8, 1476, 2023, 829, 11, 17130, 1669, 2088, 2415, 14032, 60, 1235, 341, 197, 22427, 262, 3749, 16094, 197, 9859, 414, 1807, 198, 197, 9859, 3236, 256, 914, 198, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCheckTablesNum(t *testing.T) { ctx := cdcContext.NewBackendContext4Test(true) p, tester := initProcessor4Test(ctx, t) var err error _, err = p.Tick(ctx, p.changefeed) require.Nil(t, err) tester.MustApplyPatches() require.Equal(t, p.changefeed.TaskPositions[p.captureInfo.ID], &model.TaskPosition{ CheckPointTs: 0, ResolvedTs: 0, Count: 0, Error: nil, }) p, tester = initProcessor4Test(ctx, t) p.changefeed.Info.StartTs = 66 p.changefeed.Status.CheckpointTs = 88 _, err = p.Tick(ctx, p.changefeed) require.Nil(t, err) tester.MustApplyPatches() require.Equal(t, p.changefeed.TaskPositions[p.captureInfo.ID], &model.TaskPosition{ CheckPointTs: 88, ResolvedTs: 88, Count: 0, Error: nil, }) }
explode_data.jsonl/81935
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 359 }
[ 2830, 3393, 3973, 21670, 4651, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 272, 7628, 1972, 7121, 29699, 1972, 19, 2271, 3715, 340, 3223, 11, 37111, 1669, 2930, 22946, 19, 2271, 7502, 11, 259, 340, 2405, 1848, 1465, 198, 197, 6878, 184...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_addonValidator_validateDependencies(t *testing.T) { var cache = NewAddonVersionCacheClient() type fields struct { addon *addonmgrv1alpha1.Addon } tests := []struct { name string fields fields wantErr bool }{ // TODO: Add test cases. } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { av := &addonValidator{ addon: tt.fields.addon, cache: cache, dynClient: dynClient, } if err := av.validateDependencies(); (err != nil) != tt.wantErr { t.Errorf("addonValidator.validateDependencies() error = %v, wantErr %v", err, tt.wantErr) } }) } }
explode_data.jsonl/2960
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 273 }
[ 2830, 3393, 2891, 263, 14256, 42681, 48303, 1155, 353, 8840, 836, 8, 341, 2405, 6500, 284, 1532, 84312, 5637, 8233, 2959, 741, 13158, 5043, 2036, 341, 197, 12718, 263, 353, 51099, 48292, 85, 16, 7141, 16, 1904, 263, 198, 197, 532, 782...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSharedBufferConcurrent(t *testing.T) { const threads = 10 const iters = 200 buf := NewSharedBuffer(3) done := make(chan bool) for i := 0; i < threads; i++ { go func() { ch := buf.NewChannel() for i := 0; i < iters; i++ { ch.In() <- i val := <-ch.Out() if val.(int) != i { t.Error("Mismatched value out of channel") } } ch.Close() done <- true }() } for i := 0; i < threads; i++ { <-done } close(done) buf.Close() }
explode_data.jsonl/59665
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 222 }
[ 2830, 3393, 16997, 4095, 1109, 3231, 1155, 353, 8840, 836, 8, 341, 4777, 14564, 284, 220, 16, 15, 198, 4777, 432, 388, 284, 220, 17, 15, 15, 271, 26398, 1669, 1532, 16997, 4095, 7, 18, 340, 40495, 1669, 1281, 35190, 1807, 692, 2023,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestNewPrimaryHandler(t *testing.T) { var ( l = logging.New(nil) viper = viper.New() sw = &ServerHandler{} reg = &webhook.Registry{} expectedAuthHeader = []string{"Basic xxxxxxx"} ) viper.Set("authHeader", expectedAuthHeader) if _, err := NewPrimaryHandler(l, viper, sw, reg); err != nil { t.Fatalf("NewPrimaryHandler failed: %v", err) } }
explode_data.jsonl/63813
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 203 }
[ 2830, 3393, 3564, 15972, 3050, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 8810, 1698, 284, 8392, 7121, 27907, 340, 197, 5195, 12858, 1060, 284, 95132, 7121, 741, 197, 77295, 338, 284, 609, 5475, 3050, 16094, 197, 37013, 394, 284, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSelectInitializedVitessAwareVariable(t *testing.T) { executor, _, _, _ := createLegacyExecutorEnv() executor.normalize = true logChan := QueryLogger.Subscribe("Test") defer QueryLogger.Unsubscribe(logChan) masterSession.Autocommit = true masterSession.EnableSystemSettings = true defer func() { masterSession.Autocommit = false masterSession.EnableSystemSettings = false }() sql := "select @@autocommit, @@enable_system_settings" result, err := executorExec(executor, sql, nil) wantResult := &sqltypes.Result{ Fields: []*querypb.Field{ {Name: "@@autocommit", Type: sqltypes.Int64}, {Name: "@@enable_system_settings", Type: sqltypes.Int64}, }, RowsAffected: 1, Rows: [][]sqltypes.Value{{ sqltypes.NewInt64(1), sqltypes.NewInt64(1), }}, } require.NoError(t, err) utils.MustMatch(t, wantResult, result, "Mismatch") }
explode_data.jsonl/67389
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 329 }
[ 2830, 3393, 3379, 22495, 53, 275, 433, 58793, 7827, 1155, 353, 8840, 836, 8, 341, 67328, 4831, 11, 8358, 8358, 716, 1669, 1855, 77415, 25255, 14359, 741, 67328, 4831, 44657, 284, 830, 198, 6725, 46019, 1669, 11361, 7395, 82628, 445, 227...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestVerCodeValidate(t *testing.T) { maxAge := time.Hour * 24 * 14 oldTest := time.Now().Add(-1 * 20 * oneDay) cases := []struct { Name string Code VerificationCode Err error }{ { Name: "code too short", Code: VerificationCode{Code: "1"}, Err: ErrCodeTooShort, }, { Name: "invalid test type", Code: VerificationCode{ Code: "123456", TestType: "self-reported", }, Err: ErrInvalidTestType, }, { Name: "invalid test date", Code: VerificationCode{ Code: "123456", TestType: "negative", TestDate: &oldTest, }, Err: ErrTestTooOld, }, { Name: "already expired", Code: VerificationCode{ Code: "123456", TestType: "negative", ExpiresAt: time.Now().Add(-1 * time.Second), }, Err: ErrCodeAlreadyExpired, }, } for _, tc := range cases { t.Run(tc.Name, func(t *testing.T) { if err := tc.Code.Validate(maxAge); err != tc.Err { t.Fatalf("wrong error, want %v, got: %v", tc.Err, err) } }) } }
explode_data.jsonl/22972
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 476 }
[ 2830, 3393, 10141, 2078, 17926, 1155, 353, 8840, 836, 8, 341, 22543, 16749, 1669, 882, 73550, 353, 220, 17, 19, 353, 220, 16, 19, 198, 61828, 2271, 1669, 882, 13244, 1005, 2212, 4080, 16, 353, 220, 17, 15, 353, 825, 10159, 340, 1444...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestStrArray_SubSlice(t *testing.T) { gtest.C(t, func(t *gtest.T) { a1 := []string{"0", "1", "2", "3", "4", "5", "6"} array1 := garray.NewStrArrayFrom(a1) array2 := garray.NewStrArrayFrom(a1, true) t.Assert(array1.SubSlice(0, 2), []string{"0", "1"}) t.Assert(array1.SubSlice(2, 2), []string{"2", "3"}) t.Assert(array1.SubSlice(5, 8), []string{"5", "6"}) t.Assert(array1.SubSlice(8, 2), nil) t.Assert(array1.SubSlice(1, -2), nil) t.Assert(array1.SubSlice(-5, 2), []string{"2", "3"}) t.Assert(array1.SubSlice(-10, 1), nil) t.Assert(array2.SubSlice(0, 2), []string{"0", "1"}) }) }
explode_data.jsonl/53095
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 309 }
[ 2830, 3393, 2580, 1857, 36359, 33236, 1155, 353, 8840, 836, 8, 341, 3174, 1944, 727, 1155, 11, 2915, 1155, 353, 82038, 836, 8, 341, 197, 11323, 16, 1669, 3056, 917, 4913, 15, 497, 330, 16, 497, 330, 17, 497, 330, 18, 497, 330, 19,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStreamWriter2(t *testing.T) { normalModeOpts := DefaultOptions managedModeOpts := DefaultOptions managedModeOpts.managedTxns = true for _, opts := range []*Options{&normalModeOpts, &managedModeOpts} { runBadgerTest(t, opts, func(t *testing.T, db *DB) { // write entries using stream writer noOfKeys := 1000 valueSize := 128 list := getSortedKVList(valueSize, noOfKeys) sw := db.NewStreamWriter() require.NoError(t, sw.Prepare(), "sw.Prepare() failed") require.NoError(t, sw.Write(list), "sw.Write() failed") // get max version of sw, will be used in transactions for managed mode maxVs := sw.maxVersion require.NoError(t, sw.Flush(), "sw.Flush() failed") // delete all the inserted keys val := make([]byte, valueSize) y.Check2(rand.Read(val)) for i := 0; i < noOfKeys; i++ { txn := db.newTransaction(true, opts.managedTxns) if opts.managedTxns { txn.readTs = math.MaxUint64 txn.commitTs = maxVs } keybyte := make([]byte, 8) keyNo := uint64(i) binary.BigEndian.PutUint64(keybyte, keyNo) require.NoError(t, txn.Delete(keybyte), "error while deleting keys") require.NoError(t, txn.Commit(), "error while commit") } // verify while iteration count of keys should be 0 err := db.View(func(txn *Txn) error { keysCount := 0 itrOps := DefaultIteratorOptions it := txn.NewIterator(itrOps) defer it.Close() for it.Rewind(); it.Valid(); it.Next() { keysCount++ } require.Zero(t, keysCount, "count of keys should be 0") return nil }) require.Nil(t, err, "error should be nil while iterating") }) } }
explode_data.jsonl/17980
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 673 }
[ 2830, 3393, 93031, 17, 1155, 353, 8840, 836, 8, 341, 197, 8252, 3636, 43451, 1669, 7899, 3798, 198, 197, 25270, 3636, 43451, 1669, 7899, 3798, 198, 197, 25270, 3636, 43451, 99052, 31584, 4412, 284, 830, 271, 2023, 8358, 12185, 1669, 208...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestSubtract(t *testing.T) { testCases := map[string]struct { a api.ResourceList b api.ResourceList expected api.ResourceList }{ "noKeys": { a: api.ResourceList{}, b: api.ResourceList{}, expected: api.ResourceList{}, }, "value-empty": { a: api.ResourceList{api.ResourceCPU: resource.MustParse("100m")}, b: api.ResourceList{}, expected: api.ResourceList{api.ResourceCPU: resource.MustParse("100m")}, }, "empty-value": { a: api.ResourceList{}, b: api.ResourceList{api.ResourceCPU: resource.MustParse("100m")}, expected: api.ResourceList{api.ResourceCPU: resource.MustParse("-100m")}, }, "value-value": { a: api.ResourceList{api.ResourceCPU: resource.MustParse("200m")}, b: api.ResourceList{api.ResourceCPU: resource.MustParse("100m")}, expected: api.ResourceList{api.ResourceCPU: resource.MustParse("100m")}, }, } for testName, testCase := range testCases { sub := Subtract(testCase.a, testCase.b) if result := Equals(testCase.expected, sub); !result { t.Errorf("%s expected: %v, actual: %v", testName, testCase.expected, sub) } } }
explode_data.jsonl/59926
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 511 }
[ 2830, 3393, 3136, 2144, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 2415, 14032, 60, 1235, 341, 197, 11323, 286, 6330, 20766, 852, 198, 197, 2233, 286, 6330, 20766, 852, 198, 197, 42400, 6330, 20766, 852, 198, 197, 59403, 197, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestSortableList(t *testing.T) { higherPriority := func(pod1, pod2 interface{}) bool { return GetPodPriority(pod1.(*v1.Pod)) > GetPodPriority(pod2.(*v1.Pod)) } podList := SortableList{CompFunc: higherPriority} // Add a few Pods with different priorities from lowest to highest priority. for i := 0; i < 10; i++ { var p int32 = int32(i) pod := &v1.Pod{ Spec: v1.PodSpec{ Containers: []v1.Container{ { Name: "container", Image: "image", }, }, Priority: &p, }, } podList.Items = append(podList.Items, pod) } podList.Sort() if len(podList.Items) != 10 { t.Errorf("expected length of list was 10, got: %v", len(podList.Items)) } var prevPriority = int32(10) for _, p := range podList.Items { if *p.(*v1.Pod).Spec.Priority >= prevPriority { t.Errorf("Pods are not soreted. Current pod pririty is %v, while previous one was %v.", *p.(*v1.Pod).Spec.Priority, prevPriority) } } }
explode_data.jsonl/59155
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 406 }
[ 2830, 3393, 88352, 852, 1155, 353, 8840, 836, 8, 341, 9598, 1090, 261, 20555, 1669, 2915, 1295, 347, 16, 11, 7509, 17, 3749, 28875, 1807, 341, 197, 853, 2126, 23527, 20555, 1295, 347, 16, 41399, 85, 16, 88823, 593, 861, 2126, 23527, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestApplyNilRules(t *testing.T) { var rules MetricRules res, s := rules.Apply("hello") if "hello" != s { t.Fatal(s) } if RuleResultUnmatched != res { t.Fatal(res) } }
explode_data.jsonl/63830
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 80 }
[ 2830, 3393, 28497, 19064, 26008, 1155, 353, 8840, 836, 8, 341, 2405, 5601, 52458, 26008, 271, 10202, 11, 274, 1669, 5601, 36051, 445, 14990, 1138, 743, 330, 14990, 1, 961, 274, 341, 197, 3244, 26133, 1141, 340, 197, 532, 743, 18100, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
3
func TestTransportConnectionCloseOnResponse(t *testing.T) { defer afterTest(t) ts := httptest.NewServer(hostPortHandler) defer ts.Close() connSet, testDial := makeTestDial(t) for _, connectionClose := range []bool{false, true} { tr := &Transport{ Dial: testDial, } c := &Client{Transport: tr} fetch := func(n int) string { req := new(Request) var err error req.URL, err = url.Parse(ts.URL + fmt.Sprintf("/?close=%v", connectionClose)) if err != nil { t.Fatalf("URL parse error: %v", err) } req.Method = "GET" req.Proto = "HTTP/1.1" req.ProtoMajor = 1 req.ProtoMinor = 1 res, err := c.Do(req) if err != nil { t.Fatalf("error in connectionClose=%v, req #%d, Do: %v", connectionClose, n, err) } defer res.Body.Close() body, err := ioutil.ReadAll(res.Body) if err != nil { t.Fatalf("error in connectionClose=%v, req #%d, ReadAll: %v", connectionClose, n, err) } return string(body) } body1 := fetch(1) body2 := fetch(2) bodiesDiffer := body1 != body2 if bodiesDiffer != connectionClose { t.Errorf("error in connectionClose=%v. unexpected bodiesDiffer=%v; body1=%q; body2=%q", connectionClose, bodiesDiffer, body1, body2) } tr.CloseIdleConnections() } connSet.check(t) }
explode_data.jsonl/4874
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 544 }
[ 2830, 3393, 27560, 4526, 7925, 1925, 2582, 1155, 353, 8840, 836, 8, 341, 16867, 1283, 2271, 1155, 340, 57441, 1669, 54320, 70334, 7121, 5475, 19973, 7084, 3050, 340, 16867, 10591, 10421, 2822, 32917, 1649, 11, 1273, 35, 530, 1669, 1281, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestConvertYAMLToJSON(t *testing.T) { input1 := ` a: 1 b: 'test' c: - 2 - 3 - 4 3: 'test2'` _, err := ConvertYAMLToJSON( []byte(input1) ) if err != nil { t.Errorf("ConvertYAMLToJSON should have been able to convert input 1") } }
explode_data.jsonl/52977
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 137 }
[ 2830, 3393, 12012, 56, 31102, 1249, 5370, 1155, 353, 8840, 836, 8, 341, 220, 1946, 16, 1669, 22074, 262, 264, 25, 220, 16, 198, 262, 293, 25, 364, 1944, 1248, 262, 272, 510, 414, 481, 220, 17, 198, 414, 481, 220, 18, 198, 414, 4...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPoolExhaustOnCancel(t *testing.T) { if testing.Short() { t.Skip("long test") } max := 3 var saturate, saturateDone sync.WaitGroup saturate.Add(max) saturateDone.Add(max) donePing := make(chan bool) state := 0 // waiter will be called for all queries, including // initial setup queries. The state is only assigned when // no queries are made. // // Only allow the first batch of queries to finish once the // second batch of Ping queries have finished. waiter := func(ctx context.Context) { switch state { case 0: // Nothing. Initial database setup. case 1: saturate.Done() select { case <-ctx.Done(): case <-donePing: } case 2: } } db := newTestDBConnector(t, &fakeConnector{waiter: waiter}, "people") defer closeDB(t, db) db.SetMaxOpenConns(max) // First saturate the connection pool. // Then start new requests for a connection that is canceled after it is requested. state = 1 for i := 0; i < max; i++ { go func() { rows, err := db.Query("SELECT|people|name,photo|") if err != nil { t.Errorf("Query: %v", err) return } rows.Close() saturateDone.Done() }() } saturate.Wait() if t.Failed() { t.FailNow() } state = 2 // Now cancel the request while it is waiting. ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second) defer cancel() for i := 0; i < max; i++ { ctxReq, cancelReq := context.WithCancel(ctx) go func() { time.Sleep(100 * time.Millisecond) cancelReq() }() err := db.PingContext(ctxReq) if err != context.Canceled { t.Fatalf("PingContext (Exhaust): %v", err) } } close(donePing) saturateDone.Wait() // Now try to open a normal connection. err := db.PingContext(ctx) if err != nil { t.Fatalf("PingContext (Normal): %v", err) } }
explode_data.jsonl/15958
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 698 }
[ 2830, 3393, 10551, 840, 15074, 1925, 9269, 1155, 353, 8840, 836, 8, 341, 743, 7497, 55958, 368, 341, 197, 3244, 57776, 445, 4825, 1273, 1138, 197, 630, 22543, 1669, 220, 18, 198, 2405, 93477, 349, 11, 93477, 349, 17453, 12811, 28384, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestHijackAfterCloseNotifier(t *testing.T) { defer afterTest(t) script := make(chan string, 2) script <- "closenotify" script <- "hijack" close(script) ts := httptest.NewServer(HandlerFunc(func(w ResponseWriter, r *Request) { plan := <-script switch plan { default: panic("bogus plan; too many requests") case "closenotify": w.(CloseNotifier).CloseNotify() // discard result w.Header().Set("X-Addr", r.RemoteAddr) case "hijack": c, _, err := w.(Hijacker).Hijack() if err != nil { t.Errorf("Hijack in Handler: %v", err) return } if _, ok := c.(*net.TCPConn); !ok { // Verify it's not wrapped in some type. // Not strictly a go1 compat issue, but in practice it probably is. t.Errorf("type of hijacked conn is %T; want *net.TCPConn", c) } fmt.Fprintf(c, "HTTP/1.0 200 OK\r\nX-Addr: %v\r\nContent-Length: 0\r\n\r\n", r.RemoteAddr) c.Close() return } })) defer ts.Close() res1, err := Get(ts.URL) if err != nil { log.Fatal(err) } res2, err := Get(ts.URL) if err != nil { log.Fatal(err) } addr1 := res1.Header.Get("X-Addr") addr2 := res2.Header.Get("X-Addr") if addr1 == "" || addr1 != addr2 { t.Errorf("addr1, addr2 = %q, %q; want same", addr1, addr2) } }
explode_data.jsonl/22455
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 561 }
[ 2830, 3393, 39, 3172, 473, 6025, 7925, 64729, 1155, 353, 8840, 836, 8, 341, 16867, 1283, 2271, 1155, 340, 86956, 1669, 1281, 35190, 914, 11, 220, 17, 340, 86956, 9119, 330, 66, 2301, 268, 37253, 698, 86956, 9119, 330, 71, 3172, 473, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestPaging(t *testing.T) { session := createSession(t) defer session.Close() if session.cfg.ProtoVersion == 1 { t.Skip("Paging not supported. Please use Cassandra >= 2.0") } if err := createTable(session, "CREATE TABLE gocql_test.paging (id int primary key)"); err != nil { t.Fatal("create table:", err) } for i := 0; i < 100; i++ { if err := session.Query("INSERT INTO paging (id) VALUES (?)", i).Exec(); err != nil { t.Fatal("insert:", err) } } iter := session.Query("SELECT id FROM paging").PageSize(10).Iter() var id int count := 0 for iter.Scan(&id) { count++ } if err := iter.Close(); err != nil { t.Fatal("close:", err) } if count != 100 { t.Fatalf("expected %d, got %d", 100, count) } }
explode_data.jsonl/11135
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 291 }
[ 2830, 3393, 47, 4118, 1155, 353, 8840, 836, 8, 341, 25054, 1669, 1855, 5283, 1155, 340, 16867, 3797, 10421, 2822, 743, 3797, 30481, 7763, 983, 5637, 621, 220, 16, 341, 197, 3244, 57776, 445, 47, 4118, 537, 7248, 13, 5209, 990, 81242, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
8
func TestJSON(t *testing.T) { t.Skip("This test is disabled because every flavor of mysql has a different behavior.") // JSON is supported only after mysql57. if err := env.Mysqld.ExecuteSuperQuery(context.Background(), "create table vitess_json(id int default 1, val json, primary key(id))"); err != nil { // If it's a syntax error, MySQL is an older version. Skip this test. if strings.Contains(err.Error(), "syntax") { return } t.Fatal(err) } defer execStatement(t, "drop table vitess_json") engine.se.Reload(context.Background()) testcases := []testcase{{ input: []string{ `insert into vitess_json values(1, '{"foo": "bar"}')`, }, output: [][]string{{ `begin`, `type:FIELD field_event:<table_name:"vitess_json" fields:<name:"id" type:INT32 > fields:<name:"val" type:JSON > > `, `type:ROW row_event:<table_name:"vitess_json" row_changes:<after:<lengths:1 lengths:24 values:"1JSON_OBJECT('foo','bar')" > > > `, `gtid`, `commit`, }}, }} runCases(t, nil, testcases, "") }
explode_data.jsonl/70191
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 388 }
[ 2830, 3393, 5370, 1155, 353, 8840, 836, 8, 341, 3244, 57776, 445, 1986, 1273, 374, 8386, 1576, 1449, 17172, 315, 10564, 702, 264, 2155, 7709, 31225, 197, 322, 4718, 374, 7248, 1172, 1283, 10564, 20, 22, 624, 743, 1848, 1669, 6105, 132...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestValidate_UniqueOperationNames_MultipleOperationsOfSameNameOfDifferentTypes_Subscription(t *testing.T) { testutil.ExpectFailsRule(t, graphql.UniqueOperationNamesRule, ` query Foo { fieldA } subscription Foo { fieldB } `, []gqlerrors.FormattedError{ testutil.RuleError(`There can only be one operation named "Foo".`, 2, 13, 5, 20), }) }
explode_data.jsonl/23175
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 154 }
[ 2830, 3393, 17926, 62, 22811, 8432, 7980, 1245, 12229, 35120, 2124, 19198, 675, 2124, 69123, 4173, 36359, 12124, 1155, 353, 8840, 836, 8, 341, 18185, 1314, 81893, 37, 6209, 11337, 1155, 11, 48865, 87443, 8432, 7980, 11337, 11, 22074, 414,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_Gather(t *testing.T) { t.Logf("Start HTTP mock (%s) with sampleJSON", fluentdTest.Endpoint) ts := httptest.NewUnstartedServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") fmt.Fprintf(w, "%s", string(sampleJSON)) })) requestURL, _ := url.Parse(fluentdTest.Endpoint) ts.Listener, _ = net.Listen("tcp", fmt.Sprintf("%s:%s", requestURL.Hostname(), requestURL.Port())) ts.Start() defer ts.Close() var acc testutil.Accumulator err := fluentdTest.Gather(&acc) if err != nil { t.Error(err) } if !acc.HasMeasurement("fluentd") { t.Errorf("acc.HasMeasurement: expected fluentd") } assert.Equal(t, expectedOutput[0].PluginID, acc.Metrics[0].Tags["plugin_id"]) assert.Equal(t, expectedOutput[0].PluginType, acc.Metrics[0].Tags["plugin_type"]) assert.Equal(t, expectedOutput[0].PluginCategory, acc.Metrics[0].Tags["plugin_category"]) assert.Equal(t, *expectedOutput[0].RetryCount, acc.Metrics[0].Fields["retry_count"]) assert.Equal(t, expectedOutput[1].PluginID, acc.Metrics[1].Tags["plugin_id"]) assert.Equal(t, expectedOutput[1].PluginType, acc.Metrics[1].Tags["plugin_type"]) assert.Equal(t, expectedOutput[1].PluginCategory, acc.Metrics[1].Tags["plugin_category"]) assert.Equal(t, *expectedOutput[1].RetryCount, acc.Metrics[1].Fields["retry_count"]) assert.Equal(t, *expectedOutput[1].BufferQueueLength, acc.Metrics[1].Fields["buffer_queue_length"]) assert.Equal(t, *expectedOutput[1].BufferTotalQueuedSize, acc.Metrics[1].Fields["buffer_total_queued_size"]) }
explode_data.jsonl/35108
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 598 }
[ 2830, 3393, 2646, 1856, 1155, 353, 8840, 836, 8, 341, 3244, 98954, 445, 3479, 10130, 7860, 13456, 82, 8, 448, 6077, 5370, 497, 57768, 67, 2271, 90409, 692, 57441, 1669, 54320, 70334, 7121, 1806, 46723, 5475, 19886, 89164, 18552, 3622, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestJvDump(t *testing.T) { table := []struct { input string flags jq.JvPrintFlags output string }{ {"test", jq.JvPrintNone, `"test"`}, {"test", jq.JvPrintColour, "\x1b[0;32m" + `"test"` + "\x1b[0m"}, } for _, tt := range table { t.Run(fmt.Sprintf("%s-%b", tt.input, tt.flags), func(t *testing.T) { jv := jq.JvFromString(tt.input) defer jv.Free() dump := jv.Copy().Dump(tt.flags) if dump != tt.output { t.Errorf("dump not equal to expected got: %#v want: %#v", dump, tt.output) } }) } }
explode_data.jsonl/38759
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 266 }
[ 2830, 3393, 41, 85, 51056, 1155, 353, 8840, 836, 8, 341, 26481, 1669, 3056, 1235, 341, 197, 22427, 220, 914, 198, 197, 59516, 220, 44648, 3503, 85, 8994, 9195, 198, 197, 21170, 914, 198, 197, 59403, 197, 197, 4913, 1944, 497, 44648, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGAMinimizeEarlyStop(t *testing.T) { ga, err := NewDefaultGAConfig().NewGA() if err != nil { t.Errorf("Expected nil, got %v", err) } ga.NGenerations = 20 ga.EarlyStop = func(ga *GA) bool { return ga.Generations == 10 } if err = ga.Minimize(NewVector); err != nil { t.Errorf("Expected nil, got %v", err) } if ga.Generations != 10 { t.Errorf("Expected 10, got %d", ga.Generations) } }
explode_data.jsonl/82089
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 166 }
[ 2830, 3393, 38, 1402, 258, 11853, 41198, 10674, 1155, 353, 8840, 836, 8, 341, 3174, 64, 11, 1848, 1669, 1532, 3675, 16128, 2648, 1005, 3564, 16128, 741, 743, 1848, 961, 2092, 341, 197, 3244, 13080, 445, 18896, 2092, 11, 2684, 1018, 85...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCheckLeaseWithProofs(t *testing.T) { to, path := createCheckerTestObjects(t) defer func() { to.stor.close(t) err := common.CleanTemporaryDirs(path) assert.NoError(t, err, "failed to clean test data dirs") }() tx := createLeaseWithProofs(t) info := defaultCheckerInfo(t) tx.Recipient = proto.NewRecipientFromAddress(testGlobal.senderInfo.addr) _, err := to.tc.checkLeaseWithProofs(tx, info) assert.Error(t, err, "checkLeaseWithProofs did not fail when leasing to self") tx = createLeaseWithProofs(t) _, err = to.tc.checkLeaseWithProofs(tx, info) assert.Error(t, err, "checkLeaseWithProofs did not fail prior to SmartAccounts activation") to.stor.activateFeature(t, int16(settings.SmartAccounts)) _, err = to.tc.checkLeaseWithProofs(tx, info) assert.NoError(t, err, "checkLeaseWithProofs failed with valid lease tx") }
explode_data.jsonl/63089
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 313 }
[ 2830, 3393, 3973, 2304, 519, 2354, 31076, 82, 1155, 353, 8840, 836, 8, 341, 31709, 11, 1815, 1669, 1855, 35188, 2271, 11543, 1155, 692, 16867, 2915, 368, 341, 197, 31709, 1236, 269, 4653, 1155, 692, 197, 9859, 1669, 4185, 727, 2675, 5...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestFindBundleCacheDoesNotExist(t *testing.T) { cfg := config.NewTestConfig(t) home, err := cfg.Config.GetHomeDir() require.NoError(t, err, "should have had a porter home dir") cacheDir := filepath.Join(home, "cache") cfg.TestContext.AddTestDirectory("testdata", cacheDir) c := New(cfg.Config) _, _, ok, err := c.FindBundle("deislabs/kubekahn:latest") assert.NoError(t, err, "the cache dir doesn't exist, but this shouldn't be an error") assert.False(t, ok, "the bundle shouldn't exist") }
explode_data.jsonl/56181
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 181 }
[ 2830, 3393, 9885, 8409, 8233, 21468, 45535, 1155, 353, 8840, 836, 8, 341, 50286, 1669, 2193, 7121, 2271, 2648, 1155, 340, 197, 5117, 11, 1848, 1669, 13286, 10753, 2234, 7623, 6184, 741, 17957, 35699, 1155, 11, 1848, 11, 330, 5445, 614, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestNotMounted(t *testing.T) { t.Skip("FIXME: not a unit test") t.Skip("Not implemented") d := newDriver(t) defer cleanup(d) if err := d.Create("1", ""); err != nil { t.Fatal(err) } mounted, err := Mounted(path.Join(d.home, "mnt", "1")) if err != nil { t.Fatal(err) } if mounted { t.Fatal("Id 1 should not be mounted") } }
explode_data.jsonl/45481
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 152 }
[ 2830, 3393, 2623, 90789, 1155, 353, 8840, 836, 8, 341, 3244, 57776, 445, 81019, 25, 537, 264, 4982, 1273, 1138, 3244, 57776, 445, 2623, 11537, 1138, 2698, 1669, 501, 11349, 1155, 340, 16867, 21290, 1500, 692, 743, 1848, 1669, 294, 7251,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestDeleteIntervalByName(t *testing.T) { tests := []struct { name string request *http.Request dbMock interfaces.DBClient scMock interfaces.SchedulerQueueClient expectedStatus int }{ { name: "OK", request: createDeleteRequest(NAME, TestName), dbMock: createMockNameDeleterSuccess(), scMock: createMockNameSCDeleterSuccess(), expectedStatus: http.StatusOK, }, { name: "Interval not found", request: createDeleteRequest(NAME, TestName), dbMock: createMockNameDeleterNotFoundErr(), scMock: createMockNameSCDeleterSuccess(), expectedStatus: http.StatusNotFound, }, { name: "Error QueryUnescape", request: createDeleteRequest(NAME, TestIncorrectName), dbMock: createMockNameDeleterSuccess(), scMock: createMockNameSCDeleterSuccess(), expectedStatus: http.StatusBadRequest, }, { name: "Error ErrServiceClient", request: createDeleteRequest(NAME, TestName), dbMock: createMockNameDeleterErrServiceClient(), scMock: createMockNameSCDeleterSuccess(), expectedStatus: 500, }, { name: "ErrIntervalStillUsedByIntervalActions Error", request: createDeleteRequest(NAME, TestName), dbMock: createMockNameSCDeleterErrIntervalStillUsed(), scMock: createMockNameSCDeleterSuccess(), expectedStatus: http.StatusBadRequest, }, { name: "Unknown Error", request: createDeleteRequest(NAME, TestName), dbMock: createMockNameDeleterErr(), scMock: createMockNameSCDeleterSuccess(), expectedStatus: http.StatusInternalServerError, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { rr := httptest.NewRecorder() restDeleteIntervalByName(rr, tt.request, logger.NewMockClient(), tt.dbMock, tt.scMock) response := rr.Result() if response.StatusCode != tt.expectedStatus { t.Errorf("status code mismatch -- expected %v got %v", tt.expectedStatus, response.StatusCode) return } }) } }
explode_data.jsonl/51575
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 977 }
[ 2830, 3393, 6435, 10256, 16898, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 1843, 914, 198, 197, 23555, 286, 353, 1254, 9659, 198, 197, 20939, 11571, 260, 24099, 22537, 2959, 198, 197, 29928, 11571, 260, 2409...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestPFAdd(t *testing.T) { r.Del("hll") n, err := r.PFAdd("hll", "a", "b") if err != nil { t.Error(err.Error()) } if n != 1 { t.Fail() } }
explode_data.jsonl/81267
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 81 }
[ 2830, 3393, 19701, 2212, 1155, 353, 8840, 836, 8, 341, 7000, 909, 301, 445, 71, 654, 1138, 9038, 11, 1848, 1669, 435, 1069, 37, 2212, 445, 71, 654, 497, 330, 64, 497, 330, 65, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 6141, 3964...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
3
func TestHashToG1Point(t *testing.T) { msg := []byte{1, 2, 3, 4, 5, 6, 7, 8, 9, 10} g1 := HashToG1Point(msg) if g1 == nil { t.Error("g1 is nil") } form := g1.Marshal() _, ok := new(G1).Unmarshal(form) if !ok { t.Fatalf("failed to unmarshal") } g1.ScalarBaseMult(Order) form = g1.Marshal() g2, ok := new(G1).Unmarshal(form) if !ok { t.Fatalf("failed to unmarshal ∞") } if !g2.p.IsInfinity() { t.Fatalf("∞ unmarshaled incorrectly") } one := new(G1).ScalarBaseMult(new(big.Int).SetInt64(1)) g1.Add(g1, one) g1.p.MakeAffine(nil) if g1.p.x.Cmp(one.p.x) != 0 || g1.p.y.Cmp(one.p.y) != 0 { t.Errorf("1+0 != 1 in G1") } }
explode_data.jsonl/52906
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 343 }
[ 2830, 3393, 6370, 1249, 38, 16, 2609, 1155, 353, 8840, 836, 8, 1476, 21169, 1669, 3056, 3782, 90, 16, 11, 220, 17, 11, 220, 18, 11, 220, 19, 11, 220, 20, 11, 220, 21, 11, 220, 22, 11, 220, 23, 11, 220, 24, 11, 220, 16, 15, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func Test_StaticHeaders(t *testing.T) { // Test default policy "sameorigin" { s := fakeServer() cancelInformer := test.StartInformer(s.projInformer) defer cancelInformer() port, err := test.GetFreePort() assert.NoError(t, err) metricsPort, err := test.GetFreePort() assert.NoError(t, err) ctx, cancel := context.WithCancel(context.Background()) defer cancel() go s.Run(ctx, port, metricsPort) defer func() { time.Sleep(3 * time.Second) }() err = test.WaitForPortListen(fmt.Sprintf("127.0.0.1:%d", port), 10*time.Second) assert.NoError(t, err) // Allow server startup time.Sleep(1 * time.Second) client := http.Client{} url := fmt.Sprintf("http://127.0.0.1:%d/test.html", port) req, err := http.NewRequest("GET", url, nil) assert.NoError(t, err) resp, err := client.Do(req) assert.NoError(t, err) assert.Equal(t, "sameorigin", resp.Header.Get("X-Frame-Options")) } // Test custom policy { s := fakeServer() s.XFrameOptions = "deny" cancelInformer := test.StartInformer(s.projInformer) defer cancelInformer() port, err := test.GetFreePort() assert.NoError(t, err) metricsPort, err := test.GetFreePort() assert.NoError(t, err) ctx, cancel := context.WithCancel(context.Background()) defer cancel() go s.Run(ctx, port, metricsPort) defer func() { time.Sleep(3 * time.Second) }() err = test.WaitForPortListen(fmt.Sprintf("127.0.0.1:%d", port), 10*time.Second) assert.NoError(t, err) // Allow server startup time.Sleep(1 * time.Second) client := http.Client{} url := fmt.Sprintf("http://127.0.0.1:%d/test.html", port) req, err := http.NewRequest("GET", url, nil) assert.NoError(t, err) resp, err := client.Do(req) assert.NoError(t, err) assert.Equal(t, "deny", resp.Header.Get("X-Frame-Options")) } // Test disabled { s := fakeServer() s.XFrameOptions = "" cancelInformer := test.StartInformer(s.projInformer) defer cancelInformer() port, err := test.GetFreePort() assert.NoError(t, err) metricsPort, err := test.GetFreePort() assert.NoError(t, err) ctx, cancel := context.WithCancel(context.Background()) defer cancel() go s.Run(ctx, port, metricsPort) defer func() { time.Sleep(3 * time.Second) }() err = test.WaitForPortListen(fmt.Sprintf("127.0.0.1:%d", port), 10*time.Second) assert.NoError(t, err) // Allow server startup time.Sleep(1 * time.Second) client := http.Client{} url := fmt.Sprintf("http://127.0.0.1:%d/test.html", port) req, err := http.NewRequest("GET", url, nil) assert.NoError(t, err) resp, err := client.Do(req) assert.NoError(t, err) assert.Empty(t, resp.Header.Get("X-Frame-Options")) } }
explode_data.jsonl/38043
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1087 }
[ 2830, 3393, 27049, 10574, 1155, 353, 8840, 836, 8, 341, 197, 322, 3393, 1638, 4842, 330, 24063, 8611, 698, 197, 515, 197, 1903, 1669, 12418, 5475, 741, 197, 84441, 641, 34527, 1669, 1273, 12101, 641, 34527, 1141, 82177, 641, 34527, 340,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseNested(t *testing.T) { searchDir := "testdata/nested" p := New() p.ParseDependency = true err := p.ParseAPI(searchDir, mainAPIFile, defaultParseDepth) assert.NoError(t, err) expected, err := ioutil.ReadFile(filepath.Join(searchDir, "expected.json")) assert.NoError(t, err) b, _ := json.MarshalIndent(p.swagger, "", " ") assert.Equal(t, string(expected), string(b)) }
explode_data.jsonl/63562
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 159 }
[ 2830, 3393, 14463, 71986, 1155, 353, 8840, 836, 8, 341, 45573, 6184, 1669, 330, 92425, 9612, 9980, 698, 3223, 1669, 1532, 741, 3223, 8937, 36387, 284, 830, 198, 9859, 1669, 281, 8937, 7082, 20447, 6184, 11, 1887, 7082, 1703, 11, 1638, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestGreaterEqualTime(t *testing.T) { t.Parallel() tm, err := time.Parse("2006-01-02T15:04:05Z", "2006-01-02T15:04:05Z") require.Nil(t, err) match, err := path.GreaterEqual(&testType1{ Time: tm, }, "time", "2006-01-02T15:04:04Z") require.Nil(t, err) require.True(t, match) match, err = path.GreaterEqual(&testType1{ Time: tm, }, "time", "2006-01-02T15:04:05Z") require.Nil(t, err) require.True(t, match) match, err = path.GreaterEqual(&testType1{ Time: tm, }, "time", "2006-01-02T15:04:06Z") require.Nil(t, err) require.False(t, match) }
explode_data.jsonl/78480
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 271 }
[ 2830, 3393, 41366, 2993, 1462, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 3244, 76, 11, 1848, 1669, 882, 8937, 445, 17, 15, 15, 21, 12, 15, 16, 12, 15, 17, 51, 16, 20, 25, 15, 19, 25, 15, 20, 57, 497, 330, 17, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestSnippetOrderPlaceholders(t *testing.T) { _10 := newPlaceholder(10, &markers{}) _2 := newPlaceholder(2, &markers{}) assertEqual(t, compareByIndex(*_10, *_2), 1) }
explode_data.jsonl/60287
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 71 }
[ 2830, 3393, 87852, 4431, 17371, 16662, 1155, 353, 8840, 836, 8, 341, 197, 62, 16, 15, 1669, 501, 48305, 7, 16, 15, 11, 609, 60773, 37790, 197, 62, 17, 1669, 501, 48305, 7, 17, 11, 609, 60773, 6257, 692, 6948, 2993, 1155, 11, 9429,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestTeamsService_ListChildTeamsByParentID(t *testing.T) { client, mux, _, teardown := setup() defer teardown() mux.HandleFunc("/organizations/1/team/2/teams", func(w http.ResponseWriter, r *http.Request) { testMethod(t, r, "GET") testFormValues(t, r, values{"page": "2"}) fmt.Fprint(w, `[{"id":2}]`) }) opt := &ListOptions{Page: 2} ctx := context.Background() teams, _, err := client.Teams.ListChildTeamsByParentID(ctx, 1, 2, opt) if err != nil { t.Errorf("Teams.ListChildTeamsByParentID returned error: %v", err) } want := []*Team{{ID: Int64(2)}} if !cmp.Equal(teams, want) { t.Errorf("Teams.ListChildTeamsByParentID returned %+v, want %+v", teams, want) } const methodName = "ListChildTeamsByParentID" testBadOptions(t, methodName, func() (err error) { _, _, err = client.Teams.ListChildTeamsByParentID(ctx, -1, -2, opt) return err }) testNewRequestAndDoFailure(t, methodName, client, func() (*Response, error) { got, resp, err := client.Teams.ListChildTeamsByParentID(ctx, 1, 2, opt) if got != nil { t.Errorf("testNewRequestAndDoFailure %v = %#v, want nil", methodName, got) } return resp, err }) }
explode_data.jsonl/4522
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 470 }
[ 2830, 3393, 60669, 1860, 27104, 3652, 60669, 1359, 8387, 915, 1155, 353, 8840, 836, 8, 341, 25291, 11, 59807, 11, 8358, 49304, 1669, 6505, 741, 16867, 49304, 2822, 2109, 2200, 63623, 4283, 69253, 14, 16, 78015, 14, 17, 14, 38496, 497, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCapacityLimit(t *testing.T) { msr := NewInMemoryTransactionStore(DummyMatcher{}, 2) m1 := Transaction{Request: &mock.Request{Host: "TEST1"}} msr.Save(m1) m2 := Transaction{Request: &mock.Request{Host: "TEST2"}} msr.Save(m2) reqs := msr.GetAll() if len(reqs) != 2 { t.Fatalf("Invalid store len: %v", len(reqs)) } if reqs[0].Request.Host != "TEST1" { t.Fatalf("Store FIFO error") } if reqs[1].Request.Host != "TEST2" { t.Fatalf("Store FIFO error") } m3 := Transaction{Request: &mock.Request{Host: "TEST3"}} msr.Save(m3) reqs = msr.GetAll() if len(reqs) != 2 { t.Fatalf("Invalid store len: %v", len(reqs)) } if reqs[0].Request.Host != "TEST2" { t.Fatalf("Store FIFO error") } if reqs[1].Request.Host != "TEST3" { t.Fatalf("Store FIFO error") } }
explode_data.jsonl/44246
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 364 }
[ 2830, 3393, 29392, 16527, 1155, 353, 8840, 836, 8, 1476, 47691, 81, 1669, 1532, 641, 10642, 8070, 6093, 5432, 8574, 37554, 22655, 220, 17, 340, 2109, 16, 1669, 17869, 90, 1900, 25, 609, 16712, 9659, 90, 9296, 25, 330, 10033, 16, 95642...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
7
func TestGetReturnPayload(t *testing.T) { stmts := []*sysl.Statement{ { Stmt: &sysl.Statement_Call{}, }, { Stmt: &sysl.Statement_Action{}, }, { Stmt: &sysl.Statement_Ret{ Ret: &sysl.Return{ Payload: "test", }, }, }, } actual := getReturnPayload(stmts) assert.Equal(t, "test", actual) }
explode_data.jsonl/58744
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 166 }
[ 2830, 3393, 1949, 5598, 29683, 1155, 353, 8840, 836, 8, 341, 55822, 82, 1669, 29838, 7791, 75, 70215, 515, 197, 197, 515, 298, 197, 31063, 25, 609, 7791, 75, 70215, 76028, 38837, 197, 197, 1583, 197, 197, 515, 298, 197, 31063, 25, 6...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestInit(t *testing.T) { tdb := dbtest.Postgres(t) defer tdb.Close() db := tdb.Open() defer db.Close() _, err := Migrate(db.DB, MigrateUp, 0) assert.NoError(t, err) }
explode_data.jsonl/12578
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 82 }
[ 2830, 3393, 3803, 1155, 353, 8840, 836, 8, 341, 76373, 65, 1669, 2927, 1944, 23442, 17818, 1155, 340, 16867, 259, 1999, 10421, 741, 20939, 1669, 259, 1999, 12953, 2822, 16867, 2927, 10421, 2822, 197, 6878, 1848, 1669, 386, 34479, 9791, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestAddInterval(t *testing.T) { tests := []struct { name string request *http.Request dbMock interfaces.DBClient scClient interfaces.SchedulerQueueClient expectedStatus int }{ { name: "ErrInvalidTimeFormat", request: createRequestIntervalAdd(intervalForAddInvalidTime), dbMock: createMockIntervalLoaderAddSuccess(), scClient: createMockIntervalLoaderSCAddSuccess(), expectedStatus: http.StatusBadRequest, }, { name: "OK", request: createRequestIntervalAdd(intervalForAdd), dbMock: createMockIntervalLoaderAddSuccess(), scClient: createMockIntervalLoaderSCAddSuccess(), expectedStatus: http.StatusOK, }, { name: "ErrIntervalNameInUse", request: createRequestIntervalAdd(intervalForAdd), dbMock: createMockIntervalLoaderAddNameInUse(), scClient: createMockIntervalLoaderSCAddSuccess(), expectedStatus: http.StatusBadRequest, }, { name: "ErrInvalidFrequencyFormat", request: createRequestIntervalAdd(intervalForAddInvalidFreq), dbMock: createMockIntervalLoaderAddSuccess(), scClient: createMockIntervalLoaderSCAddSuccess(), expectedStatus: http.StatusBadRequest, }, { name: "Unexpected Error", request: createRequestIntervalAdd(intervalForAdd), dbMock: createMockIntervalLoaderAddErr(), scClient: createMockIntervalLoadeSCAddErr(), expectedStatus: http.StatusInternalServerError, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { rr := httptest.NewRecorder() restAddInterval(rr, tt.request, logger.NewMockClient(), tt.dbMock, tt.scClient) response := rr.Result() if response.StatusCode != tt.expectedStatus { t.Errorf("status code mismatch -- expected %v got %v", tt.expectedStatus, response.StatusCode) return } }) } }
explode_data.jsonl/51570
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 845 }
[ 2830, 3393, 2212, 10256, 1155, 353, 8840, 836, 8, 341, 78216, 1669, 3056, 1235, 341, 197, 11609, 1843, 914, 198, 197, 23555, 286, 353, 1254, 9659, 198, 197, 20939, 11571, 260, 24099, 22537, 2959, 198, 197, 29928, 2959, 981, 24099, 808, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestIpnsLocalLink(t *testing.T) { nd, mnt := setupIpnsTest(t, nil) defer mnt.Close() name := mnt.Dir + "/local" checkExists(t, name) linksto, err := os.Readlink(name) if err != nil { t.Fatal(err) } if linksto != nd.Identity.Pretty() { t.Fatal("Link invalid") } }
explode_data.jsonl/77464
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 130 }
[ 2830, 3393, 23378, 4412, 7319, 3939, 1155, 353, 8840, 836, 8, 341, 197, 303, 11, 296, 406, 1669, 6505, 23378, 4412, 2271, 1155, 11, 2092, 340, 16867, 296, 406, 10421, 741, 11609, 1669, 296, 406, 83757, 488, 3521, 2438, 1837, 25157, 15...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRollupFuncsLookbackDelta(t *testing.T) { t.Run("1", func(t *testing.T) { rc := rollupConfig{ Func: rollupFirst, Start: 80, End: 140, Step: 10, LookbackDelta: 1, } rc.Timestamps = getTimestamps(rc.Start, rc.End, rc.Step) values := rc.Do(nil, testValues, testTimestamps) valuesExpected := []float64{99, 12, 44, nan, 32, 34, nan} timestampsExpected := []int64{80, 90, 100, 110, 120, 130, 140} testRowsEqual(t, values, rc.Timestamps, valuesExpected, timestampsExpected) }) t.Run("7", func(t *testing.T) { rc := rollupConfig{ Func: rollupFirst, Start: 80, End: 140, Step: 10, LookbackDelta: 7, } rc.Timestamps = getTimestamps(rc.Start, rc.End, rc.Step) values := rc.Do(nil, testValues, testTimestamps) valuesExpected := []float64{99, 12, 44, 44, 32, 34, nan} timestampsExpected := []int64{80, 90, 100, 110, 120, 130, 140} testRowsEqual(t, values, rc.Timestamps, valuesExpected, timestampsExpected) }) t.Run("0", func(t *testing.T) { rc := rollupConfig{ Func: rollupFirst, Start: 80, End: 140, Step: 10, LookbackDelta: 0, } rc.Timestamps = getTimestamps(rc.Start, rc.End, rc.Step) values := rc.Do(nil, testValues, testTimestamps) valuesExpected := []float64{34, 12, 12, 44, 44, 34, nan} timestampsExpected := []int64{80, 90, 100, 110, 120, 130, 140} testRowsEqual(t, values, rc.Timestamps, valuesExpected, timestampsExpected) }) }
explode_data.jsonl/23123
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 739 }
[ 2830, 3393, 32355, 454, 9626, 82, 10380, 1419, 20277, 1155, 353, 8840, 836, 8, 341, 3244, 16708, 445, 16, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 30295, 1669, 6502, 454, 2648, 515, 298, 197, 9626, 25, 688, 6502, 454, 5338, 345...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_TableBorder_GetBorderOnline(t *testing.T) { config := ReadConfiguration(t) client, ctx := PrepareTest(t, config) localFile := "DocumentElements/Tables/TablesGet.docx" requestDocument := OpenFile(t, localFile) options := map[string]interface{}{ "nodePath": "tables/1/rows/0/cells/0", } request := &models.GetBorderOnlineRequest{ Document: requestDocument, BorderType: ToStringPointer("left"), Optionals: options, } _, _, err := client.WordsApi.GetBorderOnline(ctx, request) if err != nil { t.Error(err) } }
explode_data.jsonl/16255
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 243 }
[ 2830, 3393, 42544, 10691, 13614, 10691, 19598, 1155, 353, 8840, 836, 8, 341, 262, 2193, 1669, 4457, 7688, 1155, 340, 262, 2943, 11, 5635, 1669, 31166, 2271, 1155, 11, 2193, 340, 262, 2205, 1703, 1669, 330, 7524, 11868, 14, 21670, 14, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestConfigSourceManager_ParamsHandling(t *testing.T) { ctx := context.Background() tstCfgSrc := testConfigSource{ ValueMap: map[string]valueEntry{ "elem0": {Value: nil}, "elem1": { Value: map[string]interface{}{ "p0": true, "p1": "a string with spaces", "p3": 42, }, }, "k0": {Value: nil}, "k1": { Value: map[string]interface{}{ "p0": true, "p1": "a string with spaces", "p2": map[string]interface{}{ "p2_0": "a nested map0", "p2_1": true, }, }, }, }, } // Set OnRetrieve to check if the parameters were parsed as expected. tstCfgSrc.OnRetrieve = func(ctx context.Context, selector string, params interface{}) error { assert.Equal(t, tstCfgSrc.ValueMap[selector].Value, params) return nil } manager, err := NewManager(nil) require.NoError(t, err) manager.configSources = map[string]configsource.ConfigSource{ "tstcfgsrc": &tstCfgSrc, } file := path.Join("testdata", "params_handling.yaml") cp, err := configparser.NewConfigMapFromFile(file) require.NoError(t, err) expectedFile := path.Join("testdata", "params_handling_expected.yaml") expectedParser, err := configparser.NewConfigMapFromFile(expectedFile) require.NoError(t, err) actualParser, err := manager.Resolve(ctx, cp) require.NoError(t, err) assert.Equal(t, expectedParser.ToStringMap(), actualParser.ToStringMap()) assert.NoError(t, manager.Close(ctx)) }
explode_data.jsonl/34669
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 589 }
[ 2830, 3393, 2648, 3608, 2043, 44656, 38606, 1155, 353, 8840, 836, 8, 341, 20985, 1669, 2266, 19047, 741, 3244, 267, 42467, 20360, 1669, 1273, 2648, 3608, 515, 197, 47399, 2227, 25, 2415, 14032, 60, 957, 5874, 515, 298, 197, 1, 18871, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRekey_init(t *testing.T) { core, _, _ := vault.TestCoreUnsealed(t) ln, addr := http.TestServer(t, core) defer ln.Close() ui := new(cli.MockUi) c := &RekeyCommand{ Meta: meta.Meta{ Ui: ui, }, } args := []string{ "-address", addr, "-init", "-key-threshold", "10", "-key-shares", "10", } if code := c.Run(args); code != 0 { t.Fatalf("bad: %d\n\n%s", code, ui.ErrorWriter.String()) } config, err := core.RekeyConfig(false) if err != nil { t.Fatalf("err: %s", err) } if config.SecretShares != 10 { t.Fatal("should rekey") } if config.SecretThreshold != 10 { t.Fatal("should rekey") } }
explode_data.jsonl/39443
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 291 }
[ 2830, 3393, 693, 792, 6137, 1155, 353, 8840, 836, 8, 341, 71882, 11, 8358, 716, 1669, 34584, 8787, 5386, 1806, 75940, 1155, 340, 197, 2261, 11, 10789, 1669, 1758, 8787, 5475, 1155, 11, 6200, 340, 16867, 29390, 10421, 2822, 37278, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestHandshakeClientExportKeyingMaterial(t *testing.T) { test := &clientTest{ name: "ExportKeyingMaterial", config: testConfig.Clone(), validate: func(state ConnectionState) error { if km, err := state.ExportKeyingMaterial("test", nil, 42); err != nil { return fmt.Errorf("ExportKeyingMaterial failed: %v", err) } else if len(km) != 42 { return fmt.Errorf("Got %d bytes from ExportKeyingMaterial, wanted %d", len(km), 42) } return nil }, } runClientTestTLS10(t, test) runClientTestTLS12(t, test) runClientTestTLS13(t, test) }
explode_data.jsonl/27725
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 215 }
[ 2830, 3393, 2314, 29661, 2959, 16894, 1592, 287, 13415, 1155, 353, 8840, 836, 8, 341, 18185, 1669, 609, 2972, 2271, 515, 197, 11609, 25, 256, 330, 16894, 1592, 287, 13415, 756, 197, 25873, 25, 1273, 2648, 64463, 3148, 197, 197, 7067, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestListCommandRunEClosureWithWrappedJSONOutput(t *testing.T) { // User does not meet the Resource interface (no ObjectMeta), so the // "wrapped-json" output for it should be the exact same as the "json" // output. TestListCommandRunEClosureWithJSONOutput(t) }
explode_data.jsonl/11557
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 80 }
[ 2830, 3393, 852, 4062, 6727, 7498, 11653, 2354, 67795, 5370, 5097, 1155, 353, 8840, 836, 8, 341, 197, 322, 2657, 1558, 537, 3367, 279, 11765, 3749, 320, 2152, 3002, 12175, 701, 773, 279, 198, 197, 322, 330, 75704, 56080, 1, 2550, 369,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestWithErrorHandling(t *testing.T) { testCases := []struct { name string handler func(resp http.ResponseWriter, req *http.Request) error expectedResult string expectedCode int expectedLog string }{ { name: "test error middleware with typed error", handler: func(resp http.ResponseWriter, req *http.Request) error { db := func() error { return liberr.WithArgs( liberr.Operation("db.insert"), liberr.Kind("databaseError"), liberr.SeverityError, errors.New("insertion failed"), ) } svc := func() error { return liberr.WithArgs( liberr.Operation("svc.addUser"), liberr.Kind("dependencyError"), liberr.SeverityWarn, db(), ) } return liberr.WithArgs( liberr.Operation("handler.addUser"), liberr.ValidationError, liberr.SeverityInfo, svc(), ) }, expectedResult: "{\"error\":{\"message\":\"insertion failed\"},\"success\":false}", expectedCode: http.StatusBadRequest, expectedLog: "insertion failed", }, { name: "test error middleware with error", handler: func(resp http.ResponseWriter, req *http.Request) error { return errors.New("some random error") }, expectedResult: "{\"error\":{\"message\":\"internal server error\"},\"success\":false}", expectedCode: http.StatusInternalServerError, expectedLog: "some random error", }, { name: "test error middleware with no error", handler: func(resp http.ResponseWriter, req *http.Request) error { resp.WriteHeader(http.StatusOK) _, _ = resp.Write([]byte("success")) return nil }, expectedResult: "success", expectedCode: http.StatusOK, expectedLog: "", }, } for _, testCase := range testCases { t.Run(testCase.name, func(t *testing.T) { testWithError(t, testCase.expectedCode, testCase.expectedResult, testCase.expectedLog, testCase.handler) }) } }
explode_data.jsonl/57373
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 788 }
[ 2830, 3393, 66102, 38606, 1155, 353, 8840, 836, 8, 341, 18185, 37302, 1669, 3056, 1235, 341, 197, 11609, 1843, 914, 198, 197, 53326, 286, 2915, 20267, 1758, 37508, 11, 4232, 353, 1254, 9659, 8, 1465, 198, 197, 42400, 2077, 914, 198, 1...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestParseCertificateSCTs(t *testing.T) { pemBlock, _ := pem.Decode([]byte(certWithSCTListPEM)) cert, err := ParseCertificate(pemBlock.Bytes) if err != nil { t.Fatalf("ParseCertificate()=_,%v; want _, nil", err) } if len(cert.RawSCT) == 0 { t.Errorf("len(cert.RawSCT)=0, want >0") } for i, got := range cert.SCTList.SCTList { want, _ := hex.DecodeString(wantSCTs[i]) if !bytes.Equal(got.Val, want) { t.Errorf("SCT[%d]=%x; want %x", i, got.Val, want) } } }
explode_data.jsonl/67983
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 228 }
[ 2830, 3393, 14463, 33202, 50, 1162, 82, 1155, 353, 8840, 836, 8, 341, 3223, 336, 4713, 11, 716, 1669, 54184, 56372, 10556, 3782, 87793, 2354, 50, 1162, 852, 1740, 44, 1171, 1444, 529, 11, 1848, 1669, 14775, 33202, 1295, 336, 4713, 368...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
5
func TestAmountCompression(t *testing.T) { t.Parallel() tests := []struct { name string uncompressed uint64 compressed uint64 }{ { name: "0 RMG (sometimes used in nulldata)", uncompressed: 0, compressed: 0, }, { name: "546 Atoms (current network dust value)", uncompressed: 546, compressed: 4911, }, { name: "0.001 RMG (typical transaction fee)", uncompressed: 1000, compressed: 4, }, { name: "0.01 RMG (typical transaction fee)", uncompressed: 10000, compressed: 5, }, { name: "12.345678 RMG", uncompressed: 12345678, compressed: 111111101, }, { name: "50 RMG", uncompressed: 50000000, compressed: 48, }, { name: "100 RMG", uncompressed: 100000000, compressed: 9, }, { name: "500 RMG", uncompressed: 500000000, compressed: 49, }, { name: "2100000000 RMG (max minted coins)", uncompressed: 2100000000000000, compressed: 21000000, }, } for _, test := range tests { // Ensure the amount compresses to the expected value. gotCompressed := compressTxOutAmount(test.uncompressed) if gotCompressed != test.compressed { t.Errorf("compressTxOutAmount (%s): did not get "+ "expected value - got %d, want %d", test.name, gotCompressed, test.compressed) continue } // Ensure the value decompresses to the expected value. gotDecompressed := decompressTxOutAmount(test.compressed) if gotDecompressed != test.uncompressed { t.Errorf("decompressTxOutAmount (%s): did not get "+ "expected value - got %d, want %d", test.name, gotDecompressed, test.uncompressed) continue } } }
explode_data.jsonl/8790
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 782 }
[ 2830, 3393, 10093, 81411, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 78216, 1669, 3056, 1235, 341, 197, 11609, 260, 914, 198, 197, 20479, 45703, 2622, 21, 19, 198, 197, 32810, 14318, 256, 2622, 21, 19, 198, 197, 59403, 197...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestTypeFlag_Wrap_GivenType_ReturnsWrappedType(t *testing.T) { g := NewGomegaWithT(t) ft := ArmFlag.ApplyTo(StringType) g.Expect(ft.element).To(Equal(StringType)) g.Expect(ft.HasFlag(ArmFlag)).To(BeTrue()) }
explode_data.jsonl/71881
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 90 }
[ 2830, 3393, 929, 12135, 2763, 4611, 2646, 2071, 929, 53316, 82, 67795, 929, 1155, 353, 8840, 836, 8, 341, 3174, 1669, 1532, 38, 32696, 2354, 51, 1155, 340, 18937, 1669, 12990, 12135, 36051, 1249, 2242, 929, 340, 3174, 81893, 63106, 1233...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestSearch(t *testing.T) { dictionary := Dictionary{"test": "this is just a test"} t.Run("know word", func(t *testing.T) { got, err := dictionary.Search("test") want := "this is just a test" assert.Equal(t, got, want) assert.Equal(t, err, nil) }) t.Run("unknown word", func(t *testing.T) { got, err := dictionary.Search("unknown") want := "" assert.Equal(t, got, want) assert.NotEqual(t, err, nil) }) }
explode_data.jsonl/54102
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 172 }
[ 2830, 3393, 5890, 1155, 353, 8840, 836, 8, 341, 2698, 3916, 1669, 10466, 4913, 1944, 788, 330, 574, 374, 1101, 264, 1273, 63159, 3244, 16708, 445, 32034, 3409, 497, 2915, 1155, 353, 8840, 836, 8, 341, 197, 3174, 354, 11, 1848, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func Test_hasShimAnnotation(t *testing.T) { type testT struct { Annot map[string]string Want bool } t.Run("ingress", func(t *testing.T) { tests := []testT{ {Annot: map[string]string{"cert-manager.io/issuer": ""}, Want: true}, {Annot: map[string]string{"cert-manager.io/cluster-issuer": ""}, Want: true}, {Annot: map[string]string{"kubernetes.io/tls-acme": "true"}, Want: true}, {Annot: map[string]string{"kubernetes.io/tls-acme": "false"}, Want: false}, {Annot: map[string]string{"kubernetes.io/tls-acme": ""}, Want: false}, {Annot: nil, Want: false}, } for _, test := range tests { shouldSyncIngress := hasShimAnnotation(buildIngress("", "", test.Annot), []string{"kubernetes.io/tls-acme"}) if shouldSyncIngress != test.Want { t.Errorf("Expected shouldSyncIngress=%v for annotations %#v", test.Want, test.Annot) } shouldSyncGateway := hasShimAnnotation(buildGateway("", "", test.Annot), []string{"kubernetes.io/tls-acme"}) if shouldSyncGateway != test.Want { t.Errorf("Expected shouldSyncGateway=%v for annotations %#v", test.Want, test.Annot) } } }) }
explode_data.jsonl/53187
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 463 }
[ 2830, 3393, 21778, 2016, 318, 19711, 1155, 353, 8840, 836, 8, 341, 13158, 1273, 51, 2036, 341, 197, 197, 2082, 1921, 2415, 14032, 30953, 198, 197, 17300, 517, 220, 1807, 198, 197, 630, 3244, 16708, 445, 287, 673, 497, 2915, 1155, 353,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestAtaString(t *testing.T) { const want = "Copyright 2019 the u" var ts = ataString{ 'o'<<8 | 'C', 'y'<<8 | 'p', 'i'<<8 | 'r', 'h'<<8 | 'g', ' '<<8 | 't', '0'<<8 | '2', '9'<<8 | '1', 't'<<8 | ' ', 'e'<<8 | 'h', 'u'<<8 | ' ', } got := ts.String() if got != want { t.Fatalf("Got %v, want %v", got, want) } }
explode_data.jsonl/39293
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 192 }
[ 2830, 3393, 1655, 64, 703, 1155, 353, 8840, 836, 8, 341, 4777, 1366, 284, 330, 13547, 220, 17, 15, 16, 24, 279, 575, 698, 2405, 10591, 284, 73476, 703, 515, 197, 197, 93472, 6, 2442, 23, 760, 364, 34, 751, 197, 197, 58758, 6, 24...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestImporter_NegativeVersion(t *testing.T) { tree, err := NewMutableTree(db.NewMemDB(), 0) require.NoError(t, err) _, err = tree.Import(-1) require.Error(t, err) }
explode_data.jsonl/25874
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 70 }
[ 2830, 3393, 77289, 1604, 15060, 5637, 1155, 353, 8840, 836, 8, 341, 51968, 11, 1848, 1669, 1532, 11217, 6533, 9791, 7121, 18816, 3506, 1507, 220, 15, 340, 17957, 35699, 1155, 11, 1848, 340, 197, 6878, 1848, 284, 4916, 67275, 4080, 16, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
1
func TestPipelinerunLog_follow_mode(t *testing.T) { var ( pipelineName = "output-pipeline" prName = "output-pipeline-1" prstart = clockwork.NewFakeClock() ns = "namespace" task1Name = "output-task" tr1Name = "output-task-1" tr1StartTime = prstart.Now().Add(20 * time.Second) tr1Pod = "output-task-pod-123456" tr1Step1Name = "writefile-step" ) nsList := []*corev1.Namespace{ { ObjectMeta: metav1.ObjectMeta{ Name: ns, }, }, } trs := []*v1alpha1.TaskRun{ tb.TaskRun(tr1Name, tb.TaskRunNamespace(ns), tb.TaskRunSpec( tb.TaskRunTaskRef(task1Name), ), tb.TaskRunStatus( tb.PodName(tr1Pod), tb.TaskRunStartTime(tr1StartTime), tb.StatusCondition(apis.Condition{ Type: apis.ConditionSucceeded, Status: corev1.ConditionTrue, }), tb.StepState( cb.StepName(tr1Step1Name), tb.StateTerminated(0), ), tb.StepState( cb.StepName("nop"), tb.StateTerminated(0), ), ), tb.TaskRunSpec( tb.TaskRunTaskRef(task1Name), ), ), } prs := []*v1alpha1.PipelineRun{ tb.PipelineRun(prName, tb.PipelineRunNamespace(ns), tb.PipelineRunLabel("tekton.dev/pipeline", prName), tb.PipelineRunSpec(pipelineName), tb.PipelineRunStatus( tb.PipelineRunStatusCondition(apis.Condition{ Status: corev1.ConditionTrue, Reason: v1beta1.PipelineRunReasonRunning.String(), }), tb.PipelineRunTaskRunsStatus(tr1Name, &v1alpha1.PipelineRunTaskRunStatus{ PipelineTaskName: task1Name, Status: &trs[0].Status, }), ), ), } pps := []*v1alpha1.Pipeline{ tb.Pipeline(pipelineName, tb.PipelineNamespace(ns), tb.PipelineSpec( tb.PipelineTask(task1Name, task1Name), ), ), } p := []*corev1.Pod{ tb.Pod(tr1Pod, tb.PodNamespace(ns), tb.PodLabel("tekton.dev/task", pipelineName), tb.PodSpec( tb.PodContainer(tr1Step1Name, tr1Step1Name+":latest"), tb.PodContainer("nop", "override-with-nop:latest"), ), cb.PodStatus( cb.PodPhase(corev1.PodSucceeded), ), ), } fakeLogStream := fake.Logs( fake.Task(tr1Pod, fake.Step(tr1Step1Name, "wrote a file1", "wrote a file2", "wrote a file3", "wrote a file4", ), fake.Step("nop", "Build successful"), ), ) cs, _ := test.SeedTestData(t, pipelinetest.Data{PipelineRuns: prs, Pipelines: pps, TaskRuns: trs, Pods: p, Namespaces: nsList}) cs.Pipeline.Resources = cb.APIResourceList(versionA1, []string{"task", "taskrun", "pipeline", "pipelinerun"}) tdc := testDynamic.Options{} dc, err := tdc.Client( cb.UnstructuredTR(trs[0], versionA1), cb.UnstructuredPR(prs[0], versionA1), cb.UnstructuredP(pps[0], versionA1), ) if err != nil { t.Errorf("unable to create dynamic client: %v", err) } prlo := logOptsv1aplha1(prName, ns, cs, dc, fake.Streamer(fakeLogStream), false, true) output, _ := fetchLogs(prlo) expectedLogs := []string{ "[output-task : writefile-step] wrote a file1", "[output-task : writefile-step] wrote a file2", "[output-task : writefile-step] wrote a file3", "[output-task : writefile-step] wrote a file4\n", "[output-task : nop] Build successful\n", } expected := strings.Join(expectedLogs, "\n") + "\n" test.AssertOutput(t, expected, output) }
explode_data.jsonl/14860
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 1580 }
[ 2830, 3393, 47, 81079, 10453, 359, 2201, 43490, 7302, 1155, 353, 8840, 836, 8, 341, 2405, 2399, 197, 3223, 8790, 675, 284, 330, 3006, 2268, 8790, 698, 197, 25653, 675, 981, 284, 330, 3006, 2268, 8790, 12, 16, 698, 197, 25653, 2468, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestAccKeycloakSamlIdentityProvider_extraConfigInvalid(t *testing.T) { t.Parallel() samlName := acctest.RandomWithPrefix("tf-acc") customConfigValue := acctest.RandomWithPrefix("tf-acc") resource.Test(t, resource.TestCase{ ProviderFactories: testAccProviderFactories, PreCheck: func() { testAccPreCheck(t) }, CheckDestroy: testAccCheckKeycloakSamlIdentityProviderDestroy(), Steps: []resource.TestStep{ { Config: testKeycloakSamlIdentityProvider_extra_config(samlName, "syncMode", customConfigValue), ExpectError: regexp.MustCompile("extra_config key \"syncMode\" is not allowed"), }, }, }) }
explode_data.jsonl/76730
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 241 }
[ 2830, 3393, 14603, 1592, 88751, 50, 9467, 18558, 5179, 31858, 2648, 7928, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 2822, 1903, 9467, 675, 1669, 1613, 67880, 26709, 2354, 14335, 445, 8935, 12, 4475, 1138, 1444, 1450, 2648, 1130, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestDivide(t *testing.T) { // Define test cases testCases := []struct { a float64 b float64 want float64 description string errExpected bool }{ { description: "dividing by zero", a: 2, b: 0, want: 123456789, errExpected: true, }, { description: "two positive numbers whos quotient is positive", a: 20, b: 2, want: 10, }, { description: " a positive and negative number whos quotient is negative", a: 10, b: -2, want: -5, }, { description: "a positive decimal and negative decimal whos quotient is a negative decimal", a: 8.4, b: -2.5, want: -3.3600000000000003, }, } t.Parallel() for _, c := range testCases { got, err := calculator.Divide(c.a, c.b) if err != nil && !c.errExpected { t.Errorf("error received while testing %s. The function call was: Divide(%v, %v), and the error was: %v", c.description, c.a, c.b, err) } // Only fail on want != got if an error was not expected if !c.errExpected && c.want != got { t.Errorf("want %v, got %v, while testing %s. The function call was: Divide(%v, %v)", c.want, got, c.description, c.a, c.b) } } }
explode_data.jsonl/12262
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 628 }
[ 2830, 3393, 12509, 577, 1155, 353, 8840, 836, 8, 341, 197, 322, 18614, 1273, 5048, 198, 18185, 37302, 1669, 3056, 1235, 341, 197, 11323, 1843, 2224, 21, 19, 198, 197, 2233, 1843, 2224, 21, 19, 198, 197, 50780, 286, 2224, 21, 19, 198...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestWrapFactoryErrorNoWrap(t *testing.T) { o := DecryptionClientOptions{ CryptoRegistry: initCryptoRegistryFrom(map[string]WrapEntry{ KMSWrap: (kmsKeyHandler{ kms: kms.New(unit.Session), }).decryptHandler, }, map[string]CEKEntry{ AESGCMNoPadding: newAESGCMContentCipher, }, map[string]Padder{}), } env := Envelope{ WrapAlg: "none", MatDesc: `{"kms_cmk_id":""}`, } wrap, err := wrapFromEnvelope(o, env) if err == nil { t.Error("expected error, but received none") } if wrap != nil { t.Errorf("expected nil wrap value, received %v", wrap) } }
explode_data.jsonl/63022
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 252 }
[ 2830, 3393, 26787, 4153, 1454, 2753, 26787, 1155, 353, 8840, 836, 8, 341, 22229, 1669, 3714, 15597, 2959, 3798, 515, 197, 6258, 9444, 15603, 25, 2930, 58288, 15603, 3830, 9147, 14032, 60, 26787, 5874, 515, 298, 39340, 4826, 26787, 25, 3...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestApp01myCustomerHndlrListIndex(t *testing.T) { var err error var td *TestData_App01myCustomer //var r string t.Logf("TestCustomer.HndlrListIndex()...\n") td = &TestData_App01myCustomer{} td.Setup(t) if err != nil { t.Fatalf("Error: Cannot connect: %s\n", err.Error()) } // Issue a request for ???. //TODO: Create a first() request followed by next()'s'. // Check response. /*TODO: Uncomment when requests are actually being performed. r = td.ResponseBody() if r != "" { t.Logf("\t%s\n", r) } */ // Parse response to verify //TODO: Parse the response. t.Logf("TestCustomer.HndlrListIndex() - End of Test\n\n\n") }
explode_data.jsonl/63210
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 329 }
[ 2830, 3393, 2164, 15, 16, 2408, 12792, 39, 303, 19018, 852, 1552, 1155, 353, 8840, 836, 8, 341, 262, 762, 1848, 260, 1465, 198, 262, 762, 17941, 688, 353, 83920, 36117, 15, 16, 2408, 12792, 198, 262, 442, 947, 435, 1843, 914, 271, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestGenerateCharacters(t *testing.T) { type testCase struct { start, length uint64 result string } testCases := []testCase{ {0, 5, " !\"#$"}, {33, 10, "ABCDEFGHIJ"}, {65, 4, "abcd"}, {90, 7, "z{|}~ !"}, } for _, tc := range testCases { s := generateCharacters(tc.start, tc.length) assert.Equal(t, tc.result, s) } }
explode_data.jsonl/46909
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 161 }
[ 2830, 3393, 31115, 37489, 1155, 353, 8840, 836, 8, 341, 13158, 54452, 2036, 341, 197, 21375, 11, 3084, 2622, 21, 19, 198, 197, 9559, 286, 914, 198, 197, 630, 18185, 37302, 1669, 3056, 66194, 515, 197, 197, 90, 15, 11, 220, 20, 11, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestState(t *testing.T) { s := NewState( "some_state", []func() error{}, []func() error{}, ) assert.False(t, s.GetActive()) s.forceActive() assert.Equal(t, "some_state", s.GetName()) assert.True(t, s.GetActive()) }
explode_data.jsonl/36218
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 101 }
[ 2830, 3393, 1397, 1155, 353, 8840, 836, 8, 341, 1903, 1669, 1532, 1397, 1006, 197, 197, 1, 14689, 4387, 756, 197, 197, 1294, 2830, 368, 1465, 38837, 197, 197, 1294, 2830, 368, 1465, 38837, 197, 692, 6948, 50757, 1155, 11, 274, 2234, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestConvertTaskToMetrics(t *testing.T) { typeurl.Register(&cgroups.Metrics{}, "io.containerd.cgroups.v1.Metrics") // Need to register the type to be used in UnmarshalAny later on. tests := []struct { name string typeUrl string values cgroups.Metrics error string expected *cgroups.Metrics }{ { "unregistered type", "io.containerd.cgroups.v1.Doge", cgroups.Metrics{}, "type with url io.containerd.cgroups.v1.Doge: not found", nil, }, { "missing values", "io.containerd.cgroups.v1.Metrics", cgroups.Metrics{}, "", &cgroups.Metrics{}, }, { "fully functional", "io.containerd.cgroups.v1.Metrics", cgroups.Metrics{Memory: &cgroups.MemoryStat{Cache: 100}}, "", &cgroups.Metrics{ Memory: &cgroups.MemoryStat{ Cache: 100, }, }, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { te := &mockTaskStruct{ mockMectric: func(ctx context.Context) (*types.Metric, error) { typeUrl := test.typeUrl jsonValue, _ := json.Marshal(test.values) metric := &types.Metric{ Data: &prototypes.Any{ TypeUrl: typeUrl, Value: jsonValue, }, } return metric, nil }, } taskFaked := containerd.Task(te) m, e := convertTasktoMetrics(taskFaked, context.Background()) require.Equal(t, test.expected, m) if e != nil { require.Equal(t, e.Error(), test.error) } }) } }
explode_data.jsonl/1858
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 668 }
[ 2830, 3393, 12012, 6262, 1249, 27328, 1155, 353, 8840, 836, 8, 341, 13158, 1085, 19983, 2099, 66, 16753, 1321, 13468, 22655, 330, 815, 18357, 67, 520, 16753, 3133, 16, 1321, 13468, 899, 442, 14656, 311, 4161, 279, 943, 311, 387, 1483, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestTaskFromACSWithOverrides(t *testing.T) { taskFromACS := ecsacs.Task{ Arn: strptr("myArn"), DesiredStatus: strptr("RUNNING"), Family: strptr("myFamily"), Version: strptr("1"), Containers: []*ecsacs.Container{ { Name: strptr("myName1"), MountPoints: []*ecsacs.MountPoint{ { ContainerPath: strptr("volumeContainerPath1"), SourceVolume: strptr("volumeName1"), }, }, Overrides: strptr(`{"command": ["foo", "bar"]}`), }, { Name: strptr("myName2"), Command: []*string{strptr("command")}, MountPoints: []*ecsacs.MountPoint{ { ContainerPath: strptr("volumeContainerPath2"), SourceVolume: strptr("volumeName2"), }, }, }, }, } seqNum := int64(42) task, err := TaskFromACS(&taskFromACS, &ecsacs.PayloadMessage{SeqNum: &seqNum}) assert.Nil(t, err, "Should be able to handle acs task") assert.Equal(t, 2, len(task.Containers)) // before PostUnmarshalTask assert.Equal(t, task.Containers[0].Command[0], "foo") assert.Equal(t, task.Containers[0].Command[1], "bar") assert.Equal(t, task.Containers[1].Command[0], "command") }
explode_data.jsonl/37222
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 523 }
[ 2830, 3393, 6262, 3830, 62687, 2354, 80010, 1155, 353, 8840, 836, 8, 341, 49115, 3830, 62687, 1669, 78422, 19252, 28258, 515, 197, 197, 58331, 25, 1843, 607, 3505, 445, 2408, 58331, 4461, 197, 197, 4896, 2690, 2522, 25, 607, 3505, 445, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestRate_String(t *testing.T) { v := Rate{ Limit: 0, Remaining: 0, Reset: Timestamp{}, } want := `github.Rate{Limit:0, Remaining:0, Reset:github.Timestamp{0001-01-01 00:00:00 +0000 UTC}}` if got := v.String(); got != want { t.Errorf("Rate.String = %v, want %v", got, want) } }
explode_data.jsonl/33277
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 136 }
[ 2830, 3393, 11564, 31777, 1155, 353, 8840, 836, 8, 341, 5195, 1669, 19775, 515, 197, 15070, 2353, 25, 257, 220, 15, 345, 197, 197, 54745, 25, 220, 15, 345, 197, 197, 14828, 25, 257, 32758, 38837, 197, 532, 50780, 1669, 1565, 5204, 2...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestActiveEndpoint_Reserve_WaitsForReady2Step(t *testing.T) { // This generates two updates, so that the inner loop of the `Watch` can be exercised. k8s, kna := fakeClients() kna.ServingV1alpha1().Revisions(testNamespace).Create( newRevisionBuilder(defaultRevisionLabels). withReady(false). build()) k8s.CoreV1().Services(testNamespace).Create(newServiceBuilder().build()) a := NewRevisionActivator(k8s, kna, TestLogger(t)) ch := make(chan ActivationResult) go func() { ch <- a.ActiveEndpoint(testNamespace, testRevision) }() select { case <-ch: t.Error("Unexpected result before revision is ready.") case <-time.After(1000 * time.Millisecond): // Wait long enough, so that `ActiveEndpoint()` Go routine above // sets up the `Watch` on the revisions. We need to sleep long enough // for this to happen reliably, otherwise the test will flake. } // Partially update the service, to trigger a watch, // which would not finish the loop. rev, _ := kna.ServingV1alpha1().Revisions(testNamespace).Get(testRevision, metav1.GetOptions{}) rev.Status.MarkResourcesAvailable() kna.ServingV1alpha1().Revisions(testNamespace).Update(rev) // ... and then finally make revision ready after a timeout. go func() { time.Sleep(250 * time.Millisecond) rev, _ := kna.ServingV1alpha1().Revisions(testNamespace).Get(testRevision, metav1.GetOptions{}) rev.Status.MarkActive() rev.Status.MarkContainerHealthy() kna.ServingV1alpha1().Revisions(testNamespace).Update(rev) }() select { case ar := <-ch: want := Endpoint{testServiceFQDN, v1alpha1.DefaultUserPort} if ar.Endpoint != want { t.Errorf("Unexpected endpoint. Want %+v. Got %+v.", want, ar.Endpoint) } if ar.Status != http.StatusOK { t.Errorf("Unexpected error state. Want 0. Got %v.", ar.Status) } if ar.ServiceName != "test-service" { t.Errorf("Unexpected service name. Want test-service. Got %v.", ar.ServiceName) } if ar.ConfigurationName != "test-config" { t.Errorf("Unexpected configuration name. Want test-config. Got %v.", ar.ConfigurationName) } if ar.Error != nil { t.Errorf("Unexpected error. Want nil. Got %v.", ar.Error) } case <-time.After(3 * time.Second): t.Error("Expected result after revision ready.") } }
explode_data.jsonl/27256
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 799 }
[ 2830, 3393, 5728, 27380, 92815, 5852, 2763, 56479, 2461, 19202, 17, 8304, 1155, 353, 8840, 836, 8, 341, 197, 322, 1096, 26885, 1378, 8837, 11, 773, 429, 279, 9179, 6337, 315, 279, 1565, 14247, 63, 646, 387, 61013, 624, 16463, 23, 82, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestStoreRemoveReplicaDestroy(t *testing.T) { defer leaktest.AfterTest(t)() stopper := stop.NewStopper() defer stopper.Stop(context.Background()) store, _ := createTestStore(t, testStoreOpts{createSystemRanges: true}, stopper) repl1, err := store.GetReplica(1) if err != nil { t.Fatal(err) } if err := store.RemoveReplica(context.Background(), repl1, repl1.Desc().NextReplicaID, RemoveOptions{ DestroyData: true, }); err != nil { t.Fatal(err) } // Verify that removal of a replica marks it as destroyed so that future raft // commands on the Replica will silently be dropped. err = repl1.withRaftGroup(true, func(r *raft.RawNode) (bool, error) { return true, errors.Errorf("unexpectedly created a raft group") }) require.Equal(t, errRemoved, err) repl1.mu.RLock() expErr := repl1.mu.destroyStatus.err repl1.mu.RUnlock() if expErr == nil { t.Fatal("replica was not marked as destroyed") } st := &kvserverpb.LeaseStatus{Timestamp: repl1.Clock().Now()} if err = repl1.checkExecutionCanProceed(&roachpb.BatchRequest{}, nil /* g */, st); !errors.Is(err, expErr) { t.Fatalf("expected error %s, but got %v", expErr, err) } }
explode_data.jsonl/83
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 430 }
[ 2830, 3393, 6093, 13021, 18327, 15317, 14245, 1155, 353, 8840, 836, 8, 341, 16867, 23352, 1944, 36892, 2271, 1155, 8, 741, 62644, 712, 1669, 2936, 7121, 10674, 712, 741, 16867, 2936, 712, 30213, 5378, 19047, 2398, 57279, 11, 716, 1669, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestCompressStreamXzUnsupported(t *testing.T) { dest, err := os.Create(tmp + "dest") if err != nil { t.Fatalf("Fail to create the destination file") } defer dest.Close() _, err = CompressStream(dest, Xz) if err == nil { t.Fatalf("Should fail as xz is unsupported for compression format.") } }
explode_data.jsonl/79224
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 111 }
[ 2830, 3393, 1092, 1873, 3027, 55, 89, 41884, 1155, 353, 8840, 836, 8, 341, 49616, 11, 1848, 1669, 2643, 7251, 10368, 488, 330, 4979, 1138, 743, 1848, 961, 2092, 341, 197, 3244, 30762, 445, 19524, 311, 1855, 279, 9106, 1034, 1138, 197,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRejectDropsMatchingLinesFromInput(t *testing.T) { t.Parallel() input := "This is the first line in the file.\nHello, world.\nThis is another line in the file.\n" tcs := []struct { reject, want string }{ { reject: "line", want: "Hello, world.\n", }, { reject: "another", want: "This is the first line in the file.\nHello, world.\n", }, { reject: "definitely won't match any lines", want: "This is the first line in the file.\nHello, world.\nThis is another line in the file.\n", }, } for _, tc := range tcs { got, err := script.Echo(input).Reject(tc.reject).String() if err != nil { t.Fatal(err) } if tc.want != got { t.Error(cmp.Diff(tc.want, got)) } } }
explode_data.jsonl/51498
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 317 }
[ 2830, 3393, 78413, 35, 3702, 64430, 16794, 3830, 2505, 1155, 353, 8840, 836, 8, 341, 3244, 41288, 7957, 741, 22427, 1669, 330, 1986, 374, 279, 1156, 1555, 304, 279, 1034, 7110, 77, 9707, 11, 1879, 7110, 77, 1986, 374, 2441, 1555, 304,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
4
func TestIoBufferAllocAndFree(t *testing.T) { b := newIoBuffer(0) for i := 0; i < 1024; i++ { b.Alloc(i) if b.Cap() < i { t.Errorf("Expect alloc at least %d bytes but allocated %d", i, b.Cap()) } } b.Reset() for i := 0; i < 1024; i++ { b.Alloc(i) if b.Cap() < i { t.Errorf("Expect alloc at least %d bytes but allocated %d", i, b.Cap()) } b.Free() if b.Cap() != 0 { t.Errorf("Expect free to 0 bytes but got %d", b.Cap()) } } }
explode_data.jsonl/24060
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 223 }
[ 2830, 3393, 42799, 4095, 25154, 3036, 10940, 1155, 353, 8840, 836, 8, 341, 2233, 1669, 501, 42799, 4095, 7, 15, 340, 2023, 600, 1669, 220, 15, 26, 600, 366, 220, 16, 15, 17, 19, 26, 600, 1027, 341, 197, 2233, 9636, 1074, 1956, 340...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
6
func TestInstanceInfo(t *testing.T) { var tests = []struct { src string name string targs []string typ string }{ {`package p0; func f[T any](T) {}; func _() { f(42) }`, `f`, []string{`int`}, `func(int)`, }, {`package p1; func f[T any](T) T { panic(0) }; func _() { f('@') }`, `f`, []string{`rune`}, `func(rune) rune`, }, {`package p2; func f[T any](...T) T { panic(0) }; func _() { f(0i) }`, `f`, []string{`complex128`}, `func(...complex128) complex128`, }, {`package p3; func f[A, B, C any](A, *B, []C) {}; func _() { f(1.2, new(string), []byte{}) }`, `f`, []string{`float64`, `string`, `byte`}, `func(float64, *string, []byte)`, }, {`package p4; func f[A, B any](A, *B, ...[]B) {}; func _() { f(1.2, new(byte)) }`, `f`, []string{`float64`, `byte`}, `func(float64, *byte, ...[]byte)`, }, {`package s1; func f[T any, P interface{~*T}](x T) {}; func _(x string) { f(x) }`, `f`, []string{`string`, `*string`}, `func(x string)`, }, {`package s2; func f[T any, P interface{~*T}](x []T) {}; func _(x []int) { f(x) }`, `f`, []string{`int`, `*int`}, `func(x []int)`, }, {`package s3; type C[T any] interface{~chan<- T}; func f[T any, P C[T]](x []T) {}; func _(x []int) { f(x) }`, `f`, []string{`int`, `chan<- int`}, `func(x []int)`, }, {`package s4; type C[T any] interface{~chan<- T}; func f[T any, P C[T], Q C[[]*P]](x []T) {}; func _(x []int) { f(x) }`, `f`, []string{`int`, `chan<- int`, `chan<- []*chan<- int`}, `func(x []int)`, }, {`package t1; func f[T any, P interface{~*T}]() T { panic(0) }; func _() { _ = f[string] }`, `f`, []string{`string`, `*string`}, `func() string`, }, {`package t2; func f[T any, P interface{~*T}]() T { panic(0) }; func _() { _ = (f[string]) }`, `f`, []string{`string`, `*string`}, `func() string`, }, {`package t3; type C[T any] interface{~chan<- T}; func f[T any, P C[T]]() []T { return nil }; func _() { _ = f[int] }`, `f`, []string{`int`, `chan<- int`}, `func() []int`, }, {`package t4; type C[T any] interface{~chan<- T}; func f[T any, P C[T], Q C[[]*P]]() []T { return nil }; func _() { _ = f[int] }`, `f`, []string{`int`, `chan<- int`, `chan<- []*chan<- int`}, `func() []int`, }, {`package i0; import "lib"; func _() { lib.F(42) }`, `F`, []string{`int`}, `func(int)`, }, {`package type0; type T[P interface{~int}] struct{ x P }; var _ T[int]`, `T`, []string{`int`}, `struct{x int}`, }, {`package type1; type T[P interface{~int}] struct{ x P }; var _ (T[int])`, `T`, []string{`int`}, `struct{x int}`, }, {`package type2; type T[P interface{~int}] struct{ x P }; var _ T[(int)]`, `T`, []string{`int`}, `struct{x int}`, }, {`package type3; type T[P1 interface{~[]P2}, P2 any] struct{ x P1; y P2 }; var _ T[[]int, int]`, `T`, []string{`[]int`, `int`}, `struct{x []int; y int}`, }, {`package type4; import "lib"; var _ lib.T[int]`, `T`, []string{`int`}, `[]int`, }, } for _, test := range tests { const lib = `package lib func F[P any](P) {} type T[P any] []P ` imports := make(testImporter) conf := Config{Importer: imports} instances := make(map[*ast.Ident]Instance) uses := make(map[*ast.Ident]Object) makePkg := func(src string) *Package { f, err := parser.ParseFile(fset, "p.go", src, 0) if err != nil { t.Fatal(err) } pkg, err := conf.Check("", fset, []*ast.File{f}, &Info{Instances: instances, Uses: uses}) if err != nil { t.Fatal(err) } imports[pkg.Name()] = pkg return pkg } makePkg(lib) pkg := makePkg(test.src) // look for instance information var targs []Type var typ Type for ident, inst := range instances { if ExprString(ident) == test.name { for i := 0; i < inst.TypeArgs.Len(); i++ { targs = append(targs, inst.TypeArgs.At(i)) } typ = inst.Type // Check that we can find the corresponding parameterized type. ptype := uses[ident].Type() lister, _ := ptype.(interface{ TypeParams() *TypeParamList }) if lister == nil || lister.TypeParams().Len() == 0 { t.Errorf("package %s: info.Types[%v] = %v, want parameterized type", pkg.Name(), ident, ptype) continue } // Verify the invariant that re-instantiating the generic type with // TypeArgs results in an equivalent type. inst2, err := Instantiate(nil, ptype, targs, true) if err != nil { t.Errorf("Instantiate(%v, %v) failed: %v", ptype, targs, err) } if !Identical(inst.Type, inst2) { t.Errorf("%v and %v are not identical", inst.Type, inst2) } break } } if targs == nil { t.Errorf("package %s: no instance information found for %s", pkg.Name(), test.name) continue } // check that type arguments are correct if len(targs) != len(test.targs) { t.Errorf("package %s: got %d type arguments; want %d", pkg.Name(), len(targs), len(test.targs)) continue } for i, targ := range targs { if got := targ.String(); got != test.targs[i] { t.Errorf("package %s, %d. type argument: got %s; want %s", pkg.Name(), i, got, test.targs[i]) continue } } // check that the types match if got := typ.Underlying().String(); got != test.typ { t.Errorf("package %s: got %s; want %s", pkg.Name(), got, test.typ) } } }
explode_data.jsonl/55537
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 2528 }
[ 2830, 3393, 2523, 1731, 1155, 353, 8840, 836, 8, 341, 2405, 7032, 284, 3056, 1235, 341, 197, 41144, 256, 914, 198, 197, 11609, 220, 914, 198, 197, 3244, 2116, 3056, 917, 198, 197, 25314, 256, 914, 198, 197, 59403, 197, 197, 90, 63, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
3
func TestRewriteWithNoLazyImage(t *testing.T) { description := `<img src="https://example.org/image.jpg" alt="Image"><noscript><p>Some text</p></noscript>` output := Rewriter("https://example.org/article", description, "add_dynamic_image") expected := description if expected != output { t.Errorf(`Not expected output: got "%s" instead of "%s"`, output, expected) } }
explode_data.jsonl/21479
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 122 }
[ 2830, 3393, 58465, 1247, 2354, 2753, 39766, 1906, 1155, 353, 8840, 836, 8, 341, 42407, 1669, 30586, 1892, 2286, 428, 2428, 1110, 8687, 2659, 23349, 4819, 1, 4797, 428, 1906, 3088, 36391, 1228, 1784, 79, 29, 8373, 1467, 522, 79, 1472, ...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
2
func TestCreateAndGet(t *testing.T) { cache, cleanup := withCache(t) defer cleanup() wasm, err := ioutil.ReadFile("./testdata/hackatom.wasm") require.NoError(t, err) checksum, err := Create(cache, wasm) require.NoError(t, err) code, err := GetCode(cache, checksum) require.NoError(t, err) require.Equal(t, wasm, code) }
explode_data.jsonl/62548
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 132 }
[ 2830, 3393, 4021, 97726, 1155, 353, 8840, 836, 8, 341, 52680, 11, 21290, 1669, 448, 8233, 1155, 340, 16867, 21290, 2822, 6692, 10530, 11, 1848, 1669, 43144, 78976, 13988, 92425, 7530, 473, 21855, 1418, 10530, 1138, 17957, 35699, 1155, 11,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1
func TestUnknowHintIgnore(t *testing.T) { store, clean := testkit.CreateMockStore(t) defer clean() tk := testkit.NewTestKit(t, store) tk.MustExec("USE test") tk.MustExec("create table t(a int)") tk.MustQuery("select /*+ unknown_hint(c1)*/ 1").Check(testkit.Rows("1")) tk.MustQuery("show warnings").Check(testkit.Rows("Warning 1064 Optimizer hint syntax error at line 1 column 23 near \"unknown_hint(c1)*/\" ")) _, err := tk.Exec("select 1 from /*+ test1() */ t") require.NoError(t, err) }
explode_data.jsonl/65470
{ "file_path": "/home/dung/Study/Code/Cross_test_gen/training_dataset/dedup_data/clean_data_go/data/explode_data.jsonl", "token_count": 189 }
[ 2830, 3393, 1806, 32034, 26987, 12497, 1155, 353, 8840, 836, 8, 341, 57279, 11, 4240, 1669, 1273, 8226, 7251, 11571, 6093, 1155, 340, 16867, 4240, 2822, 3244, 74, 1669, 1273, 8226, 7121, 2271, 7695, 1155, 11, 3553, 340, 3244, 74, 50463,...
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1...
1